text stringlengths 4 1.02M | meta dict |
|---|---|
"""
Test functions for models.regression
"""
# TODO: Test for LM
from statsmodels.compat.python import long, lrange
import warnings
import pandas
import numpy as np
from numpy.testing import (assert_almost_equal, assert_approx_equal,
assert_raises, assert_equal, assert_allclose)
from scipy.linalg import toeplitz
from statsmodels.tools.tools import add_constant, categorical
from statsmodels.compat.numpy import np_matrix_rank
from statsmodels.regression.linear_model import OLS, WLS, GLS, yule_walker
from statsmodels.datasets import longley
from scipy.stats import t as student_t
DECIMAL_4 = 4
DECIMAL_3 = 3
DECIMAL_2 = 2
DECIMAL_1 = 1
DECIMAL_7 = 7
DECIMAL_0 = 0
class CheckRegressionResults(object):
"""
res2 contains results from Rmodelwrap or were obtained from a statistical
packages such as R, Stata, or SAS and were written to model_results
"""
decimal_params = DECIMAL_4
def test_params(self):
assert_almost_equal(self.res1.params, self.res2.params,
self.decimal_params)
decimal_standarderrors = DECIMAL_4
def test_standarderrors(self):
assert_almost_equal(self.res1.bse,self.res2.bse,
self.decimal_standarderrors)
decimal_confidenceintervals = DECIMAL_4
def test_confidenceintervals(self):
#NOTE: stata rounds residuals (at least) to sig digits so approx_equal
conf1 = self.res1.conf_int()
conf2 = self.res2.conf_int()
for i in range(len(conf1)):
assert_approx_equal(conf1[i][0], conf2[i][0],
self.decimal_confidenceintervals)
assert_approx_equal(conf1[i][1], conf2[i][1],
self.decimal_confidenceintervals)
decimal_conf_int_subset = DECIMAL_4
def test_conf_int_subset(self):
if len(self.res1.params) > 1:
ci1 = self.res1.conf_int(cols=(1,2))
ci2 = self.res1.conf_int()[1:3]
assert_almost_equal(ci1, ci2, self.decimal_conf_int_subset)
else:
pass
decimal_scale = DECIMAL_4
def test_scale(self):
assert_almost_equal(self.res1.scale, self.res2.scale,
self.decimal_scale)
decimal_rsquared = DECIMAL_4
def test_rsquared(self):
assert_almost_equal(self.res1.rsquared, self.res2.rsquared,
self.decimal_rsquared)
decimal_rsquared_adj = DECIMAL_4
def test_rsquared_adj(self):
assert_almost_equal(self.res1.rsquared_adj, self.res2.rsquared_adj,
self.decimal_rsquared_adj)
def test_degrees(self):
assert_equal(self.res1.model.df_model, self.res2.df_model)
assert_equal(self.res1.model.df_resid, self.res2.df_resid)
decimal_ess = DECIMAL_4
def test_ess(self):
#Explained Sum of Squares
assert_almost_equal(self.res1.ess, self.res2.ess,
self.decimal_ess)
decimal_ssr = DECIMAL_4
def test_sumof_squaredresids(self):
assert_almost_equal(self.res1.ssr, self.res2.ssr, self.decimal_ssr)
decimal_mse_resid = DECIMAL_4
def test_mse_resid(self):
#Mean squared error of residuals
assert_almost_equal(self.res1.mse_model, self.res2.mse_model,
self.decimal_mse_resid)
decimal_mse_model = DECIMAL_4
def test_mse_model(self):
assert_almost_equal(self.res1.mse_resid, self.res2.mse_resid,
self.decimal_mse_model)
decimal_mse_total = DECIMAL_4
def test_mse_total(self):
assert_almost_equal(self.res1.mse_total, self.res2.mse_total,
self.decimal_mse_total, err_msg="Test class %s" % self)
decimal_fvalue = DECIMAL_4
def test_fvalue(self):
#didn't change this, not sure it should complain -inf not equal -inf
#if not (np.isinf(self.res1.fvalue) and np.isinf(self.res2.fvalue)):
assert_almost_equal(self.res1.fvalue, self.res2.fvalue,
self.decimal_fvalue)
decimal_loglike = DECIMAL_4
def test_loglike(self):
assert_almost_equal(self.res1.llf, self.res2.llf, self.decimal_loglike)
decimal_aic = DECIMAL_4
def test_aic(self):
assert_almost_equal(self.res1.aic, self.res2.aic, self.decimal_aic)
decimal_bic = DECIMAL_4
def test_bic(self):
assert_almost_equal(self.res1.bic, self.res2.bic, self.decimal_bic)
decimal_pvalues = DECIMAL_4
def test_pvalues(self):
assert_almost_equal(self.res1.pvalues, self.res2.pvalues,
self.decimal_pvalues)
decimal_wresid = DECIMAL_4
def test_wresid(self):
assert_almost_equal(self.res1.wresid, self.res2.wresid,
self.decimal_wresid)
decimal_resids = DECIMAL_4
def test_resids(self):
assert_almost_equal(self.res1.resid, self.res2.resid,
self.decimal_resids)
decimal_norm_resids = DECIMAL_4
def test_norm_resids(self):
assert_almost_equal(self.res1.resid_pearson, self.res2.resid_pearson,
self.decimal_norm_resids)
#TODO: test fittedvalues and what else?
class TestOLS(CheckRegressionResults):
@classmethod
def setupClass(cls):
from .results.results_regression import Longley
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
res1 = OLS(data.endog, data.exog).fit()
res2 = Longley()
res2.wresid = res1.wresid # workaround hack
cls.res1 = res1
cls.res2 = res2
res_qr = OLS(data.endog, data.exog).fit(method="qr")
model_qr = OLS(data.endog, data.exog)
Q, R = np.linalg.qr(data.exog)
model_qr.exog_Q, model_qr.exog_R = Q, R
model_qr.normalized_cov_params = np.linalg.inv(np.dot(R.T, R))
model_qr.rank = np_matrix_rank(R)
res_qr2 = model_qr.fit(method="qr")
cls.res_qr = res_qr
cls.res_qr_manual = res_qr2
def test_eigenvalues(self):
eigenval_perc_diff = (self.res_qr.eigenvals - self.res_qr_manual.eigenvals)
eigenval_perc_diff /= self.res_qr.eigenvals
zeros = np.zeros_like(eigenval_perc_diff)
assert_almost_equal(eigenval_perc_diff, zeros, DECIMAL_7)
# Robust error tests. Compare values computed with SAS
def test_HC0_errors(self):
#They are split up because the copied results do not have any DECIMAL_4
#places for the last place.
assert_almost_equal(self.res1.HC0_se[:-1],
self.res2.HC0_se[:-1], DECIMAL_4)
assert_approx_equal(np.round(self.res1.HC0_se[-1]), self.res2.HC0_se[-1])
def test_HC1_errors(self):
assert_almost_equal(self.res1.HC1_se[:-1],
self.res2.HC1_se[:-1], DECIMAL_4)
assert_approx_equal(self.res1.HC1_se[-1], self.res2.HC1_se[-1])
def test_HC2_errors(self):
assert_almost_equal(self.res1.HC2_se[:-1],
self.res2.HC2_se[:-1], DECIMAL_4)
assert_approx_equal(self.res1.HC2_se[-1], self.res2.HC2_se[-1])
def test_HC3_errors(self):
assert_almost_equal(self.res1.HC3_se[:-1],
self.res2.HC3_se[:-1], DECIMAL_4)
assert_approx_equal(self.res1.HC3_se[-1], self.res2.HC3_se[-1])
def test_qr_params(self):
assert_almost_equal(self.res1.params,
self.res_qr.params, 6)
def test_qr_normalized_cov_params(self):
#todo: need assert_close
assert_almost_equal(np.ones_like(self.res1.normalized_cov_params),
self.res1.normalized_cov_params /
self.res_qr.normalized_cov_params, 5)
def test_missing(self):
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
data.endog[[3, 7, 14]] = np.nan
mod = OLS(data.endog, data.exog, missing='drop')
assert_equal(mod.endog.shape[0], 13)
assert_equal(mod.exog.shape[0], 13)
def test_rsquared_adj_overfit(self):
# Test that if df_resid = 0, rsquared_adj = 0.
# This is a regression test for user issue:
# https://github.com/statsmodels/statsmodels/issues/868
with warnings.catch_warnings(record=True):
x = np.random.randn(5)
y = np.random.randn(5, 6)
results = OLS(x, y).fit()
rsquared_adj = results.rsquared_adj
assert_equal(rsquared_adj, np.nan)
def test_qr_alternatives(self):
assert_allclose(self.res_qr.params, self.res_qr_manual.params,
rtol=5e-12)
def test_norm_resid(self):
resid = self.res1.wresid
norm_resid = resid / np.sqrt(np.sum(resid**2.0) / self.res1.df_resid)
model_norm_resid = self.res1.resid_pearson
assert_almost_equal(model_norm_resid, norm_resid, DECIMAL_7)
def test_norm_resid_zero_variance(self):
with warnings.catch_warnings(record=True):
y = self.res1.model.endog
res = OLS(y,y).fit()
assert_allclose(res.scale, 0, atol=1e-20)
assert_allclose(res.wresid, res.resid_pearson, atol=5e-11)
class TestRTO(CheckRegressionResults):
@classmethod
def setupClass(cls):
from .results.results_regression import LongleyRTO
data = longley.load()
res1 = OLS(data.endog, data.exog).fit()
res2 = LongleyRTO()
res2.wresid = res1.wresid # workaround hack
cls.res1 = res1
cls.res2 = res2
res_qr = OLS(data.endog, data.exog).fit(method="qr")
cls.res_qr = res_qr
class TestFtest(object):
"""
Tests f_test vs. RegressionResults
"""
@classmethod
def setupClass(cls):
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
cls.res1 = OLS(data.endog, data.exog).fit()
R = np.identity(7)[:-1,:]
cls.Ftest = cls.res1.f_test(R)
def test_F(self):
assert_almost_equal(self.Ftest.fvalue, self.res1.fvalue, DECIMAL_4)
def test_p(self):
assert_almost_equal(self.Ftest.pvalue, self.res1.f_pvalue, DECIMAL_4)
def test_Df_denom(self):
assert_equal(self.Ftest.df_denom, self.res1.model.df_resid)
def test_Df_num(self):
assert_equal(self.Ftest.df_num, 6)
class TestFTest2(object):
"""
A joint test that the coefficient on
GNP = the coefficient on UNEMP and that the coefficient on
POP = the coefficient on YEAR for the Longley dataset.
Ftest1 is from statsmodels. Results are from Rpy using R's car library.
"""
@classmethod
def setupClass(cls):
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
res1 = OLS(data.endog, data.exog).fit()
R2 = [[0,1,-1,0,0,0,0],[0, 0, 0, 0, 1, -1, 0]]
cls.Ftest1 = res1.f_test(R2)
hyp = 'x2 = x3, x5 = x6'
cls.NewFtest1 = res1.f_test(hyp)
def test_new_ftest(self):
assert_equal(self.NewFtest1.fvalue, self.Ftest1.fvalue)
def test_fvalue(self):
assert_almost_equal(self.Ftest1.fvalue, 9.7404618732968196, DECIMAL_4)
def test_pvalue(self):
assert_almost_equal(self.Ftest1.pvalue, 0.0056052885317493459,
DECIMAL_4)
def test_df_denom(self):
assert_equal(self.Ftest1.df_denom, 9)
def test_df_num(self):
assert_equal(self.Ftest1.df_num, 2)
class TestFtestQ(object):
"""
A joint hypothesis test that Rb = q. Coefficient tests are essentially
made up. Test values taken from Stata.
"""
@classmethod
def setupClass(cls):
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
res1 = OLS(data.endog, data.exog).fit()
R = np.array([[0,1,1,0,0,0,0],
[0,1,0,1,0,0,0],
[0,1,0,0,0,0,0],
[0,0,0,0,1,0,0],
[0,0,0,0,0,1,0]])
q = np.array([0,0,0,1,0])
cls.Ftest1 = res1.f_test((R,q))
def test_fvalue(self):
assert_almost_equal(self.Ftest1.fvalue, 70.115557, 5)
def test_pvalue(self):
assert_almost_equal(self.Ftest1.pvalue, 6.229e-07, 10)
def test_df_denom(self):
assert_equal(self.Ftest1.df_denom, 9)
def test_df_num(self):
assert_equal(self.Ftest1.df_num, 5)
class TestTtest(object):
"""
Test individual t-tests. Ie., are the coefficients significantly
different than zero.
"""
@classmethod
def setupClass(cls):
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
cls.res1 = OLS(data.endog, data.exog).fit()
R = np.identity(7)
cls.Ttest = cls.res1.t_test(R)
hyp = 'x1 = 0, x2 = 0, x3 = 0, x4 = 0, x5 = 0, x6 = 0, const = 0'
cls.NewTTest = cls.res1.t_test(hyp)
def test_new_tvalue(self):
assert_equal(self.NewTTest.tvalue, self.Ttest.tvalue)
def test_tvalue(self):
assert_almost_equal(self.Ttest.tvalue, self.res1.tvalues, DECIMAL_4)
def test_sd(self):
assert_almost_equal(self.Ttest.sd, self.res1.bse, DECIMAL_4)
def test_pvalue(self):
assert_almost_equal(self.Ttest.pvalue, student_t.sf(
np.abs(self.res1.tvalues), self.res1.model.df_resid)*2,
DECIMAL_4)
def test_df_denom(self):
assert_equal(self.Ttest.df_denom, self.res1.model.df_resid)
def test_effect(self):
assert_almost_equal(self.Ttest.effect, self.res1.params)
class TestTtest2(object):
"""
Tests the hypothesis that the coefficients on POP and YEAR
are equal.
Results from RPy using 'car' package.
"""
@classmethod
def setupClass(cls):
R = np.zeros(7)
R[4:6] = [1,-1]
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
res1 = OLS(data.endog, data.exog).fit()
cls.Ttest1 = res1.t_test(R)
def test_tvalue(self):
assert_almost_equal(self.Ttest1.tvalue, -4.0167754636397284,
DECIMAL_4)
def test_sd(self):
assert_almost_equal(self.Ttest1.sd, 455.39079425195314, DECIMAL_4)
def test_pvalue(self):
assert_almost_equal(self.Ttest1.pvalue, 2*0.0015163772380932246,
DECIMAL_4)
def test_df_denom(self):
assert_equal(self.Ttest1.df_denom, 9)
def test_effect(self):
assert_almost_equal(self.Ttest1.effect, -1829.2025687186533, DECIMAL_4)
class TestGLS(object):
"""
These test results were obtained by replication with R.
"""
@classmethod
def setupClass(cls):
from .results.results_regression import LongleyGls
data = longley.load()
exog = add_constant(np.column_stack((data.exog[:,1],
data.exog[:,4])), prepend=False)
tmp_results = OLS(data.endog, exog).fit()
rho = np.corrcoef(tmp_results.resid[1:],
tmp_results.resid[:-1])[0][1] # by assumption
order = toeplitz(np.arange(16))
sigma = rho**order
GLS_results = GLS(data.endog, exog, sigma=sigma).fit()
cls.res1 = GLS_results
cls.res2 = LongleyGls()
# attach for test_missing
cls.sigma = sigma
cls.exog = exog
cls.endog = data.endog
def test_aic(self):
assert_approx_equal(self.res1.aic+2, self.res2.aic, 3)
def test_bic(self):
assert_approx_equal(self.res1.bic, self.res2.bic, 2)
def test_loglike(self):
assert_almost_equal(self.res1.llf, self.res2.llf, DECIMAL_0)
def test_params(self):
assert_almost_equal(self.res1.params, self.res2.params, DECIMAL_1)
def test_resid(self):
assert_almost_equal(self.res1.resid, self.res2.resid, DECIMAL_4)
def test_scale(self):
assert_almost_equal(self.res1.scale, self.res2.scale, DECIMAL_4)
def test_tvalues(self):
assert_almost_equal(self.res1.tvalues, self.res2.tvalues, DECIMAL_4)
def test_standarderrors(self):
assert_almost_equal(self.res1.bse, self.res2.bse, DECIMAL_4)
def test_fittedvalues(self):
assert_almost_equal(self.res1.fittedvalues, self.res2.fittedvalues,
DECIMAL_4)
def test_pvalues(self):
assert_almost_equal(self.res1.pvalues, self.res2.pvalues, DECIMAL_4)
def test_missing(self):
endog = self.endog.copy() # copy or changes endog for other methods
endog[[4,7,14]] = np.nan
mod = GLS(endog, self.exog, sigma=self.sigma, missing='drop')
assert_equal(mod.endog.shape[0], 13)
assert_equal(mod.exog.shape[0], 13)
assert_equal(mod.sigma.shape, (13,13))
class TestGLS_alt_sigma(CheckRegressionResults):
"""
Test that GLS with no argument is equivalent to OLS.
"""
@classmethod
def setupClass(cls):
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
ols_res = OLS(data.endog, data.exog).fit()
gls_res = GLS(data.endog, data.exog).fit()
gls_res_scalar = GLS(data.endog, data.exog, sigma=1)
cls.endog = data.endog
cls.exog = data.exog
cls.res1 = gls_res
cls.res2 = ols_res
cls.res3 = gls_res_scalar
# self.res2.conf_int = self.res2.conf_int()
def test_wrong_size_sigma_1d(self):
n = len(self.endog)
assert_raises(ValueError, GLS, self.endog, self.exog, sigma=np.ones(n-1))
def test_wrong_size_sigma_2d(self):
n = len(self.endog)
assert_raises(ValueError, GLS, self.endog, self.exog, sigma=np.ones((n-1,n-1)))
# def check_confidenceintervals(self, conf1, conf2):
# assert_almost_equal(conf1, conf2, DECIMAL_4)
class TestLM(object):
@classmethod
def setupClass(cls):
# TODO: Test HAC method
X = np.random.randn(100,3)
b = np.ones((3,1))
e = np.random.randn(100,1)
y = np.dot(X,b) + e
# Cases?
# Homoskedastic
# HC0
cls.res1_full = OLS(y,X).fit()
cls.res1_restricted = OLS(y,X[:,0]).fit()
cls.res2_full = cls.res1_full.get_robustcov_results('HC0')
cls.res2_restricted = cls.res1_restricted.get_robustcov_results('HC0')
cls.X = X
cls.Y = y
def test_LM_homoskedastic(self):
resid = self.res1_restricted.wresid
n = resid.shape[0]
X = self.X
S = np.dot(resid,resid) / n * np.dot(X.T,X) / n
Sinv = np.linalg.inv(S)
s = np.mean(X * resid[:,None], 0)
LMstat = n * np.dot(np.dot(s,Sinv),s.T)
LMstat_OLS = self.res1_full.compare_lm_test(self.res1_restricted)
LMstat2 = LMstat_OLS[0]
assert_almost_equal(LMstat, LMstat2, DECIMAL_7)
def test_LM_heteroskedastic_nodemean(self):
resid = self.res1_restricted.wresid
n = resid.shape[0]
X = self.X
scores = X * resid[:,None]
S = np.dot(scores.T,scores) / n
Sinv = np.linalg.inv(S)
s = np.mean(scores, 0)
LMstat = n * np.dot(np.dot(s,Sinv),s.T)
LMstat_OLS = self.res2_full.compare_lm_test(self.res2_restricted, demean=False)
LMstat2 = LMstat_OLS[0]
assert_almost_equal(LMstat, LMstat2, DECIMAL_7)
def test_LM_heteroskedastic_demean(self):
resid = self.res1_restricted.wresid
n = resid.shape[0]
X = self.X
scores = X * resid[:,None]
scores_demean = scores - scores.mean(0)
S = np.dot(scores_demean.T,scores_demean) / n
Sinv = np.linalg.inv(S)
s = np.mean(scores, 0)
LMstat = n * np.dot(np.dot(s,Sinv),s.T)
LMstat_OLS = self.res2_full.compare_lm_test(self.res2_restricted)
LMstat2 = LMstat_OLS[0]
assert_almost_equal(LMstat, LMstat2, DECIMAL_7)
def test_LM_heteroskedastic_LRversion(self):
resid = self.res1_restricted.wresid
resid_full = self.res1_full.wresid
n = resid.shape[0]
X = self.X
scores = X * resid[:,None]
s = np.mean(scores, 0)
scores = X * resid_full[:,None]
S = np.dot(scores.T,scores) / n
Sinv = np.linalg.inv(S)
LMstat = n * np.dot(np.dot(s,Sinv),s.T)
LMstat_OLS = self.res2_full.compare_lm_test(self.res2_restricted, use_lr = True)
LMstat2 = LMstat_OLS[0]
assert_almost_equal(LMstat, LMstat2, DECIMAL_7)
def test_LM_nonnested(self):
assert_raises(ValueError, self.res2_restricted.compare_lm_test, self.res2_full)
class TestOLS_GLS_WLS_equivalence(object):
@classmethod
def setupClass(cls):
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
y = data.endog
X = data.exog
n = y.shape[0]
w = np.ones(n)
cls.results = []
cls.results.append(OLS(y, X).fit())
cls.results.append(WLS(y, X, w).fit())
cls.results.append(GLS(y, X, 100*w).fit())
cls.results.append(GLS(y, X, np.diag(0.1*w)).fit())
def test_ll(self):
llf = np.array([r.llf for r in self.results])
llf_1 = np.ones_like(llf) * self.results[0].llf
assert_almost_equal(llf, llf_1, DECIMAL_7)
ic = np.array([r.aic for r in self.results])
ic_1 = np.ones_like(ic) * self.results[0].aic
assert_almost_equal(ic, ic_1, DECIMAL_7)
ic = np.array([r.bic for r in self.results])
ic_1 = np.ones_like(ic) * self.results[0].bic
assert_almost_equal(ic, ic_1, DECIMAL_7)
def test_params(self):
params = np.array([r.params for r in self.results])
params_1 = np.array([self.results[0].params] * len(self.results))
assert_allclose(params, params_1)
def test_ss(self):
bse = np.array([r.bse for r in self.results])
bse_1 = np.array([self.results[0].bse] * len(self.results))
assert_allclose(bse, bse_1)
def test_rsquared(self):
rsquared = np.array([r.rsquared for r in self.results])
rsquared_1 = np.array([self.results[0].rsquared] * len(self.results))
assert_almost_equal(rsquared, rsquared_1, DECIMAL_7)
class TestGLS_WLS_equivalence(TestOLS_GLS_WLS_equivalence):
# reuse test methods
@classmethod
def setupClass(cls):
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
y = data.endog
X = data.exog
n = y.shape[0]
np.random.seed(5)
w = np.random.uniform(0.5, 1, n)
w_inv = 1. / w
cls.results = []
cls.results.append(WLS(y, X, w).fit())
cls.results.append(WLS(y, X, 0.01 * w).fit())
cls.results.append(GLS(y, X, 100 * w_inv).fit())
cls.results.append(GLS(y, X, np.diag(0.1 * w_inv)).fit())
def test_rsquared(self):
# TODO: WLS rsquared is ok, GLS might have wrong centered_tss
# We only check that WLS and GLS rsquared is invariant to scaling
# WLS and GLS have different rsquared
assert_almost_equal(self.results[1].rsquared, self.results[0].rsquared,
DECIMAL_7)
assert_almost_equal(self.results[3].rsquared, self.results[2].rsquared,
DECIMAL_7)
class TestNonFit(object):
@classmethod
def setupClass(cls):
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
cls.endog = data.endog
cls.exog = data.exog
cls.ols_model = OLS(data.endog, data.exog)
def test_df_resid(self):
df_resid = self.endog.shape[0] - self.exog.shape[1]
assert_equal(self.ols_model.df_resid, long(9))
class TestWLS_CornerCases(object):
@classmethod
def setupClass(cls):
cls.exog = np.ones((1,))
cls.endog = np.ones((1,))
weights = 1
cls.wls_res = WLS(cls.endog, cls.exog, weights=weights).fit()
def test_wrong_size_weights(self):
weights = np.ones((10,10))
assert_raises(ValueError, WLS, self.endog, self.exog, weights=weights)
class TestWLSExogWeights(CheckRegressionResults):
#Test WLS with Greene's credit card data
#reg avgexp age income incomesq ownrent [aw=1/incomesq]
def __init__(self):
from .results.results_regression import CCardWLS
from statsmodels.datasets.ccard import load
dta = load()
dta.exog = add_constant(dta.exog, prepend=False)
nobs = 72.
weights = 1/dta.exog[:,2]
# for comparison with stata analytic weights
scaled_weights = ((weights * nobs)/weights.sum())
self.res1 = WLS(dta.endog, dta.exog, weights=scaled_weights).fit()
self.res2 = CCardWLS()
self.res2.wresid = scaled_weights ** .5 * self.res2.resid
# correction because we use different definition for loglike/llf
corr_ic = 2 * (self.res1.llf - self.res2.llf)
self.res2.aic -= corr_ic
self.res2.bic -= corr_ic
self.res2.llf += 0.5 * np.sum(np.log(self.res1.model.weights))
def test_wls_example():
#example from the docstring, there was a note about a bug, should
#be fixed now
Y = [1,3,4,5,2,3,4]
X = lrange(1,8)
X = add_constant(X, prepend=False)
wls_model = WLS(Y,X, weights=lrange(1,8)).fit()
#taken from R lm.summary
assert_almost_equal(wls_model.fvalue, 0.127337843215, 6)
assert_almost_equal(wls_model.scale, 2.44608530786**2, 6)
def test_wls_tss():
y = np.array([22, 22, 22, 23, 23, 23])
X = [[1, 0], [1, 0], [1, 1], [0, 1], [0, 1], [0, 1]]
ols_mod = OLS(y, add_constant(X, prepend=False)).fit()
yw = np.array([22, 22, 23.])
Xw = [[1,0],[1,1],[0,1]]
w = np.array([2, 1, 3.])
wls_mod = WLS(yw, add_constant(Xw, prepend=False), weights=w).fit()
assert_equal(ols_mod.centered_tss, wls_mod.centered_tss)
class TestWLSScalarVsArray(CheckRegressionResults):
@classmethod
def setupClass(cls):
from statsmodels.datasets.longley import load
dta = load()
dta.exog = add_constant(dta.exog, prepend=True)
wls_scalar = WLS(dta.endog, dta.exog, weights=1./3).fit()
weights = [1/3.] * len(dta.endog)
wls_array = WLS(dta.endog, dta.exog, weights=weights).fit()
cls.res1 = wls_scalar
cls.res2 = wls_array
#class TestWLS_GLS(CheckRegressionResults):
# @classmethod
# def setupClass(cls):
# from statsmodels.datasets.ccard import load
# data = load()
# cls.res1 = WLS(data.endog, data.exog, weights = 1/data.exog[:,2]).fit()
# cls.res2 = GLS(data.endog, data.exog, sigma = data.exog[:,2]).fit()
#
# def check_confidenceintervals(self, conf1, conf2):
# assert_almost_equal(conf1, conf2(), DECIMAL_4)
def test_wls_missing():
from statsmodels.datasets.ccard import load
data = load()
endog = data.endog
endog[[10, 25]] = np.nan
mod = WLS(data.endog, data.exog, weights = 1/data.exog[:,2], missing='drop')
assert_equal(mod.endog.shape[0], 70)
assert_equal(mod.exog.shape[0], 70)
assert_equal(mod.weights.shape[0], 70)
class TestWLS_OLS(CheckRegressionResults):
@classmethod
def setupClass(cls):
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
cls.res1 = OLS(data.endog, data.exog).fit()
cls.res2 = WLS(data.endog, data.exog).fit()
def check_confidenceintervals(self, conf1, conf2):
assert_almost_equal(conf1, conf2(), DECIMAL_4)
class TestGLS_OLS(CheckRegressionResults):
@classmethod
def setupClass(cls):
data = longley.load()
data.exog = add_constant(data.exog, prepend=False)
cls.res1 = GLS(data.endog, data.exog).fit()
cls.res2 = OLS(data.endog, data.exog).fit()
def check_confidenceintervals(self, conf1, conf2):
assert_almost_equal(conf1, conf2(), DECIMAL_4)
#TODO: test AR
# why the two-stage in AR?
#class test_ar(object):
# from statsmodels.datasets.sunspots import load
# data = load()
# model = AR(data.endog, rho=4).fit()
# R_res = RModel(data.endog, aic="FALSE", order_max=4)
# def test_params(self):
# assert_almost_equal(self.model.rho,
# pass
# def test_order(self):
# In R this can be defined or chosen by minimizing the AIC if aic=True
# pass
class TestYuleWalker(object):
@classmethod
def setupClass(cls):
from statsmodels.datasets.sunspots import load
data = load()
cls.rho, cls.sigma = yule_walker(data.endog, order=4,
method="mle")
cls.R_params = [1.2831003105694765, -0.45240924374091945,
-0.20770298557575195, 0.047943648089542337]
def test_params(self):
assert_almost_equal(self.rho, self.R_params, DECIMAL_4)
class TestDataDimensions(CheckRegressionResults):
@classmethod
def setupClass(cls):
np.random.seed(54321)
cls.endog_n_ = np.random.uniform(0,20,size=30)
cls.endog_n_one = cls.endog_n_[:,None]
cls.exog_n_ = np.random.uniform(0,20,size=30)
cls.exog_n_one = cls.exog_n_[:,None]
cls.degen_exog = cls.exog_n_one[:-1]
cls.mod1 = OLS(cls.endog_n_one, cls.exog_n_one)
cls.mod1.df_model += 1
cls.res1 = cls.mod1.fit()
# Note that these are created for every subclass..
# A little extra overhead probably
cls.mod2 = OLS(cls.endog_n_one, cls.exog_n_one)
cls.mod2.df_model += 1
cls.res2 = cls.mod2.fit()
def check_confidenceintervals(self, conf1, conf2):
assert_almost_equal(conf1, conf2(), DECIMAL_4)
class TestGLS_large_data(TestDataDimensions):
@classmethod
def setupClass(cls):
nobs = 1000
y = np.random.randn(nobs,1)
X = np.random.randn(nobs,20)
sigma = np.ones_like(y)
cls.gls_res = GLS(y, X, sigma=sigma).fit()
cls.gls_res_scalar = GLS(y, X, sigma=1).fit()
cls.gls_res_none= GLS(y, X).fit()
cls.ols_res = OLS(y, X).fit()
def test_large_equal_params(self):
assert_almost_equal(self.ols_res.params, self.gls_res.params, DECIMAL_7)
def test_large_equal_loglike(self):
assert_almost_equal(self.ols_res.llf, self.gls_res.llf, DECIMAL_7)
def test_large_equal_params_none(self):
assert_almost_equal(self.gls_res.params, self.gls_res_none.params,
DECIMAL_7)
class TestNxNx(TestDataDimensions):
@classmethod
def setupClass(cls):
super(TestNxNx, cls).setupClass()
cls.mod2 = OLS(cls.endog_n_, cls.exog_n_)
cls.mod2.df_model += 1
cls.res2 = cls.mod2.fit()
class TestNxOneNx(TestDataDimensions):
@classmethod
def setupClass(cls):
super(TestNxOneNx, cls).setupClass()
cls.mod2 = OLS(cls.endog_n_one, cls.exog_n_)
cls.mod2.df_model += 1
cls.res2 = cls.mod2.fit()
class TestNxNxOne(TestDataDimensions):
@classmethod
def setupClass(cls):
super(TestNxNxOne, cls).setupClass()
cls.mod2 = OLS(cls.endog_n_, cls.exog_n_one)
cls.mod2.df_model += 1
cls.res2 = cls.mod2.fit()
def test_bad_size():
np.random.seed(54321)
data = np.random.uniform(0,20,31)
assert_raises(ValueError, OLS, data, data[1:])
def test_const_indicator():
np.random.seed(12345)
X = np.random.randint(0, 3, size=30)
X = categorical(X, drop=True)
y = np.dot(X, [1., 2., 3.]) + np.random.normal(size=30)
modc = OLS(y, add_constant(X[:,1:], prepend=True)).fit()
mod = OLS(y, X, hasconst=True).fit()
assert_almost_equal(modc.rsquared, mod.rsquared, 12)
def test_706():
# make sure one regressor pandas Series gets passed to DataFrame
# for conf_int.
y = pandas.Series(np.random.randn(10))
x = pandas.Series(np.ones(10))
res = OLS(y,x).fit()
conf_int = res.conf_int()
np.testing.assert_equal(conf_int.shape, (1, 2))
np.testing.assert_(isinstance(conf_int, pandas.DataFrame))
def test_summary():
# test 734
import re
dta = longley.load_pandas()
X = dta.exog
X["constant"] = 1
y = dta.endog
with warnings.catch_warnings(record=True):
res = OLS(y, X).fit()
table = res.summary().as_latex()
# replace the date and time
table = re.sub("(?<=\n\\\\textbf\{Date:\} &).+?&",
" Sun, 07 Apr 2013 &", table)
table = re.sub("(?<=\n\\\\textbf\{Time:\} &).+?&",
" 13:46:07 &", table)
expected = """\\begin{center}
\\begin{tabular}{lclc}
\\toprule
\\textbf{Dep. Variable:} & TOTEMP & \\textbf{ R-squared: } & 0.995 \\\\
\\textbf{Model:} & OLS & \\textbf{ Adj. R-squared: } & 0.992 \\\\
\\textbf{Method:} & Least Squares & \\textbf{ F-statistic: } & 330.3 \\\\
\\textbf{Date:} & Sun, 07 Apr 2013 & \\textbf{ Prob (F-statistic):} & 4.98e-10 \\\\
\\textbf{Time:} & 13:46:07 & \\textbf{ Log-Likelihood: } & -109.62 \\\\
\\textbf{No. Observations:} & 16 & \\textbf{ AIC: } & 233.2 \\\\
\\textbf{Df Residuals:} & 9 & \\textbf{ BIC: } & 238.6 \\\\
\\textbf{Df Model:} & 6 & \\textbf{ } & \\\\
\\bottomrule
\\end{tabular}
\\begin{tabular}{lccccc}
& \\textbf{coef} & \\textbf{std err} & \\textbf{t} & \\textbf{P$>$$|$t$|$} & \\textbf{[95.0\\% Conf. Int.]} \\\\
\\midrule
\\textbf{GNPDEFL} & 15.0619 & 84.915 & 0.177 & 0.863 & -177.029 207.153 \\\\
\\textbf{GNP} & -0.0358 & 0.033 & -1.070 & 0.313 & -0.112 0.040 \\\\
\\textbf{UNEMP} & -2.0202 & 0.488 & -4.136 & 0.003 & -3.125 -0.915 \\\\
\\textbf{ARMED} & -1.0332 & 0.214 & -4.822 & 0.001 & -1.518 -0.549 \\\\
\\textbf{POP} & -0.0511 & 0.226 & -0.226 & 0.826 & -0.563 0.460 \\\\
\\textbf{YEAR} & 1829.1515 & 455.478 & 4.016 & 0.003 & 798.788 2859.515 \\\\
\\textbf{constant} & -3.482e+06 & 8.9e+05 & -3.911 & 0.004 & -5.5e+06 -1.47e+06 \\\\
\\bottomrule
\\end{tabular}
\\begin{tabular}{lclc}
\\textbf{Omnibus:} & 0.749 & \\textbf{ Durbin-Watson: } & 2.559 \\\\
\\textbf{Prob(Omnibus):} & 0.688 & \\textbf{ Jarque-Bera (JB): } & 0.684 \\\\
\\textbf{Skew:} & 0.420 & \\textbf{ Prob(JB): } & 0.710 \\\\
\\textbf{Kurtosis:} & 2.434 & \\textbf{ Cond. No. } & 4.86e+09 \\\\
\\bottomrule
\\end{tabular}
%\\caption{OLS Regression Results}
\\end{center}"""
assert_equal(table, expected)
class TestRegularizedFit(object):
# Make sure there are no issues when there are no selected
# variables.
def test_empty_model(self):
np.random.seed(742)
n = 100
endog = np.random.normal(size=n)
exog = np.random.normal(size=(n, 3))
model = OLS(endog, exog)
result = model.fit_regularized(alpha=1000)
assert_equal(result.params, 0.)
assert_equal(result.bse, 0.)
def test_regularized(self):
import os
from . import glmnet_r_results
cur_dir = os.path.dirname(os.path.abspath(__file__))
data = np.loadtxt(os.path.join(cur_dir, "results", "lasso_data.csv"),
delimiter=",")
tests = [x for x in dir(glmnet_r_results) if x.startswith("rslt_")]
for test in tests:
vec = getattr(glmnet_r_results, test)
n = vec[0]
p = vec[1]
L1_wt = float(vec[2])
lam = float(vec[3])
params = vec[4:].astype(np.float64)
endog = data[0:n, 0]
exog = data[0:n, 1:(p+1)]
endog = endog - endog.mean()
endog /= endog.std(ddof=1)
exog = exog - exog.mean(0)
exog /= exog.std(0, ddof=1)
mod = OLS(endog, exog)
rslt = mod.fit_regularized(L1_wt=L1_wt, alpha=lam)
assert_almost_equal(rslt.params, params, decimal=3)
# Smoke test for summary
smry = rslt.summary()
def test_formula_missing_cat():
# gh-805
import statsmodels.api as sm
from statsmodels.formula.api import ols
from patsy import PatsyError
dta = sm.datasets.grunfeld.load_pandas().data
dta.ix[0, 'firm'] = np.nan
mod = ols(formula='value ~ invest + capital + firm + year',
data=dta.dropna())
res = mod.fit()
mod2 = ols(formula='value ~ invest + capital + firm + year',
data=dta)
res2 = mod2.fit()
assert_almost_equal(res.params.values, res2.params.values)
assert_raises(PatsyError, ols, 'value ~ invest + capital + firm + year',
data=dta, missing='raise')
if __name__=="__main__":
import nose
# run_module_suite()
nose.runmodule(argv=[__file__,'-vvs','-x','--pdb', '--pdb-failure'],
exit=False)
# nose.runmodule(argv=[__file__,'-vvs','-x'], exit=False) #, '--pdb'
| {
"content_hash": "4f8a928317eaad513ff85afdb55b0c02",
"timestamp": "",
"source": "github",
"line_count": 1067,
"max_line_length": 131,
"avg_line_length": 34.829428303655106,
"alnum_prop": 0.5906143206953153,
"repo_name": "detrout/debian-statsmodels",
"id": "15956c98a081643ac89db3c7b1366e168acd87a3",
"size": "37163",
"binary": false,
"copies": "1",
"ref": "refs/heads/debian",
"path": "statsmodels/regression/tests/test_regression.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AGS Script",
"bytes": "457842"
},
{
"name": "Assembly",
"bytes": "10509"
},
{
"name": "Batchfile",
"bytes": "351"
},
{
"name": "C",
"bytes": "12092"
},
{
"name": "HTML",
"bytes": "148470"
},
{
"name": "Python",
"bytes": "7263953"
},
{
"name": "R",
"bytes": "21637"
},
{
"name": "Stata",
"bytes": "16079"
}
],
"symlink_target": ""
} |
from unittest import mock
import functools
from metafunctions.tests.util import BaseTestCase
from metafunctions.tests.simple_nodes import *
from metafunctions.api import store, recall, node, bind_call_state
from metafunctions.core import SimpleFunction, CallState
class TestUnit(BaseTestCase):
def test_bind_call_state(self):
"""
If decorated with bind_call_state, the function receives the call state dictionary as its
first argument.
"""
@node
@bind_call_state
def a_(call_state, x):
self.assertIsInstance(call_state, CallState)
call_state.data["a"] = "b"
return x + "a"
@node
@bind_call_state
def f(call_state, x):
return x + call_state.data.get("a", "f")
self.assertEqual(a("_"), "_a")
self.assertEqual(f("_"), "_f")
cmp = a_ | f
self.assertEqual(cmp("_"), "_ab")
cmp = f | a_ | a_ | f + f
self.assertEqual(cmp("_"), "_faab_faab")
def test_node_bracketless(self):
"""
I'm allowing the node decorator to be applied without calling because this is how both
celery and function_pipes work.
"""
@node
def a(x):
return x + "a"
@node()
def b(x):
return x + "b"
self.assertIsInstance(a, SimpleFunction)
self.assertIsInstance(b, SimpleFunction)
self.assertEqual((b | a)("_"), "_ba")
def test_store(self):
state = CallState()
abc = a | b | store("output") | c
big = (
a
| b
| c + store("ab") + store("ab2")
| store("abc")
| recall("ab") + recall("ab2")
| c + recall("abc")
)
self.assertEqual(abc("_", call_state=state), "_abc")
self.assertEqual(state.data["output"], "_ab")
self.assertEqual(big("_"), "_ab_abc_abc_ab_ab")
def test_recall(self):
state = a.new_call_state()
state.data["k"] = "secret"
cmp = a + b | store("k") | c + recall("k")
self.assertEqual(cmp("_"), "_a_bc_a_b")
cmp = a + b | store("k") | c + recall("k") | recall("k", from_call_state=state)
self.assertEqual(cmp("_"), "secret")
def test_str_store(self):
# this should be possible
self.assertEqual(str(store("key")), "store('key')")
def test_str_recall(self):
self.assertEqual(str(recall("key")), "recall('key')")
| {
"content_hash": "8a24ac0de6bd48ed11c224d4fd425735",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 97,
"avg_line_length": 29.174418604651162,
"alnum_prop": 0.5328816261458749,
"repo_name": "ForeverWintr/metafunctions",
"id": "80b9ac76bf3054cc2751af597d81d36e0fd85ff2",
"size": "2509",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metafunctions/tests/test_api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "75947"
}
],
"symlink_target": ""
} |
from django import template
import json
import itertools
register = template.Library()
@register.filter
def decodejson(data):
return json.loads(data)
@register.filter
def encode2json(data):
return json.dumps(data)
| {
"content_hash": "3d384ea1b027939c44936aa4fcffbe71",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 29,
"avg_line_length": 17.307692307692307,
"alnum_prop": 0.7688888888888888,
"repo_name": "Pazitos10/TNT",
"id": "832d7d53ca04957d22f58fbd46fe794dae4fca0e",
"size": "225",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webapp/app/tntapp/templatetags/mytags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7830"
},
{
"name": "HTML",
"bytes": "58854"
},
{
"name": "JavaScript",
"bytes": "17870"
},
{
"name": "Nginx",
"bytes": "1459"
},
{
"name": "Python",
"bytes": "38974"
},
{
"name": "Ruby",
"bytes": "287"
},
{
"name": "Shell",
"bytes": "6430"
}
],
"symlink_target": ""
} |
"""Support for HomematicIP Cloud binary sensor."""
import logging
from typing import Any, Dict
from homematicip.aio.device import (
AsyncAccelerationSensor,
AsyncContactInterface,
AsyncDevice,
AsyncFullFlushContactInterface,
AsyncMotionDetectorIndoor,
AsyncMotionDetectorOutdoor,
AsyncMotionDetectorPushButton,
AsyncPluggableMainsFailureSurveillance,
AsyncPresenceDetectorIndoor,
AsyncRotaryHandleSensor,
AsyncShutterContact,
AsyncShutterContactMagnetic,
AsyncSmokeDetector,
AsyncWaterSensor,
AsyncWeatherSensor,
AsyncWeatherSensorPlus,
AsyncWeatherSensorPro,
)
from homematicip.aio.group import AsyncSecurityGroup, AsyncSecurityZoneGroup
from homematicip.base.enums import SmokeDetectorAlarmType, WindowState
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_LIGHT,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_MOVING,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_POWER,
DEVICE_CLASS_PRESENCE,
DEVICE_CLASS_SAFETY,
DEVICE_CLASS_SMOKE,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity
from .hap import HomematicipHAP
_LOGGER = logging.getLogger(__name__)
ATTR_ACCELERATION_SENSOR_MODE = "acceleration_sensor_mode"
ATTR_ACCELERATION_SENSOR_NEUTRAL_POSITION = "acceleration_sensor_neutral_position"
ATTR_ACCELERATION_SENSOR_SENSITIVITY = "acceleration_sensor_sensitivity"
ATTR_ACCELERATION_SENSOR_TRIGGER_ANGLE = "acceleration_sensor_trigger_angle"
ATTR_INTRUSION_ALARM = "intrusion_alarm"
ATTR_MOISTURE_DETECTED = "moisture_detected"
ATTR_MOTION_DETECTED = "motion_detected"
ATTR_POWER_MAINS_FAILURE = "power_mains_failure"
ATTR_PRESENCE_DETECTED = "presence_detected"
ATTR_SMOKE_DETECTOR_ALARM = "smoke_detector_alarm"
ATTR_TODAY_SUNSHINE_DURATION = "today_sunshine_duration_in_minutes"
ATTR_WATER_LEVEL_DETECTED = "water_level_detected"
ATTR_WINDOW_STATE = "window_state"
GROUP_ATTRIBUTES = {
"moistureDetected": ATTR_MOISTURE_DETECTED,
"motionDetected": ATTR_MOTION_DETECTED,
"powerMainsFailure": ATTR_POWER_MAINS_FAILURE,
"presenceDetected": ATTR_PRESENCE_DETECTED,
"waterlevelDetected": ATTR_WATER_LEVEL_DETECTED,
}
SAM_DEVICE_ATTRIBUTES = {
"accelerationSensorNeutralPosition": ATTR_ACCELERATION_SENSOR_NEUTRAL_POSITION,
"accelerationSensorMode": ATTR_ACCELERATION_SENSOR_MODE,
"accelerationSensorSensitivity": ATTR_ACCELERATION_SENSOR_SENSITIVITY,
"accelerationSensorTriggerAngle": ATTR_ACCELERATION_SENSOR_TRIGGER_ANGLE,
}
async def async_setup_entry(
hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the HomematicIP Cloud binary sensor from a config entry."""
hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id]
entities = []
for device in hap.home.devices:
if isinstance(device, AsyncAccelerationSensor):
entities.append(HomematicipAccelerationSensor(hap, device))
if isinstance(device, (AsyncContactInterface, AsyncFullFlushContactInterface)):
entities.append(HomematicipContactInterface(hap, device))
if isinstance(
device,
(AsyncShutterContact, AsyncShutterContactMagnetic, AsyncRotaryHandleSensor),
):
entities.append(HomematicipShutterContact(hap, device))
if isinstance(
device,
(
AsyncMotionDetectorIndoor,
AsyncMotionDetectorOutdoor,
AsyncMotionDetectorPushButton,
),
):
entities.append(HomematicipMotionDetector(hap, device))
if isinstance(device, AsyncPluggableMainsFailureSurveillance):
entities.append(
HomematicipPluggableMainsFailureSurveillanceSensor(hap, device)
)
if isinstance(device, AsyncPresenceDetectorIndoor):
entities.append(HomematicipPresenceDetector(hap, device))
if isinstance(device, AsyncSmokeDetector):
entities.append(HomematicipSmokeDetector(hap, device))
if isinstance(device, AsyncWaterSensor):
entities.append(HomematicipWaterDetector(hap, device))
if isinstance(device, (AsyncWeatherSensorPlus, AsyncWeatherSensorPro)):
entities.append(HomematicipRainSensor(hap, device))
if isinstance(
device, (AsyncWeatherSensor, AsyncWeatherSensorPlus, AsyncWeatherSensorPro)
):
entities.append(HomematicipStormSensor(hap, device))
entities.append(HomematicipSunshineSensor(hap, device))
if isinstance(device, AsyncDevice) and device.lowBat is not None:
entities.append(HomematicipBatterySensor(hap, device))
for group in hap.home.groups:
if isinstance(group, AsyncSecurityGroup):
entities.append(HomematicipSecuritySensorGroup(hap, group))
elif isinstance(group, AsyncSecurityZoneGroup):
entities.append(HomematicipSecurityZoneSensorGroup(hap, group))
if entities:
async_add_entities(entities)
class HomematicipAccelerationSensor(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP acceleration sensor."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_MOVING
@property
def is_on(self) -> bool:
"""Return true if acceleration is detected."""
return self._device.accelerationSensorTriggered
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the state attributes of the acceleration sensor."""
state_attr = super().device_state_attributes
for attr, attr_key in SAM_DEVICE_ATTRIBUTES.items():
attr_value = getattr(self._device, attr, None)
if attr_value:
state_attr[attr_key] = attr_value
return state_attr
class HomematicipContactInterface(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP contact interface."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_OPENING
@property
def is_on(self) -> bool:
"""Return true if the contact interface is on/open."""
if self._device.windowState is None:
return None
return self._device.windowState != WindowState.CLOSED
class HomematicipShutterContact(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP shutter contact."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_DOOR
@property
def is_on(self) -> bool:
"""Return true if the shutter contact is on/open."""
if self._device.windowState is None:
return None
return self._device.windowState != WindowState.CLOSED
class HomematicipMotionDetector(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP motion detector."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_MOTION
@property
def is_on(self) -> bool:
"""Return true if motion is detected."""
return self._device.motionDetected
class HomematicipPresenceDetector(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP presence detector."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_PRESENCE
@property
def is_on(self) -> bool:
"""Return true if presence is detected."""
return self._device.presenceDetected
class HomematicipSmokeDetector(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP smoke detector."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_SMOKE
@property
def is_on(self) -> bool:
"""Return true if smoke is detected."""
if self._device.smokeDetectorAlarmType:
return (
self._device.smokeDetectorAlarmType
== SmokeDetectorAlarmType.PRIMARY_ALARM
)
return False
class HomematicipWaterDetector(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP water detector."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_MOISTURE
@property
def is_on(self) -> bool:
"""Return true, if moisture or waterlevel is detected."""
return self._device.moistureDetected or self._device.waterlevelDetected
class HomematicipStormSensor(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP storm sensor."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize storm sensor."""
super().__init__(hap, device, "Storm")
@property
def icon(self) -> str:
"""Return the icon."""
return "mdi:weather-windy" if self.is_on else "mdi:pinwheel-outline"
@property
def is_on(self) -> bool:
"""Return true, if storm is detected."""
return self._device.storm
class HomematicipRainSensor(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP rain sensor."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize rain sensor."""
super().__init__(hap, device, "Raining")
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_MOISTURE
@property
def is_on(self) -> bool:
"""Return true, if it is raining."""
return self._device.raining
class HomematicipSunshineSensor(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP sunshine sensor."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize sunshine sensor."""
super().__init__(hap, device, "Sunshine")
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_LIGHT
@property
def is_on(self) -> bool:
"""Return true if sun is shining."""
return self._device.sunshine
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the state attributes of the illuminance sensor."""
state_attr = super().device_state_attributes
today_sunshine_duration = getattr(self._device, "todaySunshineDuration", None)
if today_sunshine_duration:
state_attr[ATTR_TODAY_SUNSHINE_DURATION] = today_sunshine_duration
return state_attr
class HomematicipBatterySensor(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP low battery sensor."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize battery sensor."""
super().__init__(hap, device, "Battery")
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_BATTERY
@property
def is_on(self) -> bool:
"""Return true if battery is low."""
return self._device.lowBat
class HomematicipPluggableMainsFailureSurveillanceSensor(
HomematicipGenericEntity, BinarySensorEntity
):
"""Representation of the HomematicIP pluggable mains failure surveillance sensor."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize pluggable mains failure surveillance sensor."""
super().__init__(hap, device)
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_POWER
@property
def is_on(self) -> bool:
"""Return true if power mains fails."""
return not self._device.powerMainsFailure
class HomematicipSecurityZoneSensorGroup(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP security zone sensor group."""
def __init__(self, hap: HomematicipHAP, device, post: str = "SecurityZone") -> None:
"""Initialize security zone group."""
device.modelType = f"HmIP-{post}"
super().__init__(hap, device, post)
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_SAFETY
@property
def available(self) -> bool:
"""Security-Group available."""
# A security-group must be available, and should not be affected by
# the individual availability of group members.
return True
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the state attributes of the security zone group."""
state_attr = super().device_state_attributes
for attr, attr_key in GROUP_ATTRIBUTES.items():
attr_value = getattr(self._device, attr, None)
if attr_value:
state_attr[attr_key] = attr_value
window_state = getattr(self._device, "windowState", None)
if window_state and window_state != WindowState.CLOSED:
state_attr[ATTR_WINDOW_STATE] = str(window_state)
return state_attr
@property
def is_on(self) -> bool:
"""Return true if security issue detected."""
if (
self._device.motionDetected
or self._device.presenceDetected
or self._device.unreach
or self._device.sabotage
):
return True
if (
self._device.windowState is not None
and self._device.windowState != WindowState.CLOSED
):
return True
return False
class HomematicipSecuritySensorGroup(
HomematicipSecurityZoneSensorGroup, BinarySensorEntity
):
"""Representation of the HomematicIP security group."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize security group."""
super().__init__(hap, device, "Sensors")
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the state attributes of the security group."""
state_attr = super().device_state_attributes
smoke_detector_at = getattr(self._device, "smokeDetectorAlarmType", None)
if smoke_detector_at:
if smoke_detector_at == SmokeDetectorAlarmType.PRIMARY_ALARM:
state_attr[ATTR_SMOKE_DETECTOR_ALARM] = str(smoke_detector_at)
if smoke_detector_at == SmokeDetectorAlarmType.INTRUSION_ALARM:
state_attr[ATTR_INTRUSION_ALARM] = str(smoke_detector_at)
return state_attr
@property
def is_on(self) -> bool:
"""Return true if safety issue detected."""
parent_is_on = super().is_on
if parent_is_on:
return True
if (
self._device.powerMainsFailure
or self._device.moistureDetected
or self._device.waterlevelDetected
or self._device.lowBat
or self._device.dutyCycle
):
return True
if (
self._device.smokeDetectorAlarmType is not None
and self._device.smokeDetectorAlarmType != SmokeDetectorAlarmType.IDLE_OFF
):
return True
return False
| {
"content_hash": "d80663b2f9eb0b8f83b62c832e044166",
"timestamp": "",
"source": "github",
"line_count": 455,
"max_line_length": 88,
"avg_line_length": 34.51428571428571,
"alnum_prop": 0.6703387671930718,
"repo_name": "titilambert/home-assistant",
"id": "aae7f881be0dfa8e35d81d552bed2b4981e89e86",
"size": "15704",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/homematicip_cloud/binary_sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "25849092"
},
{
"name": "Shell",
"bytes": "4410"
}
],
"symlink_target": ""
} |
import unittest
import sys
import os
PROJECT_PATH = os.path.sep.join(os.path.abspath(__file__).split(os.path.sep)[:-2])
ROOT_PATH = os.path.dirname(__file__)
def main():
path_setup()
tests = unittest.TestLoader().discover(ROOT_PATH, "*.py")
result = unittest.TextTestRunner(verbosity=2).run(tests)
if not result.wasSuccessful():
sys.exit(1)
def path_setup():
print(PROJECT_PATH)
sys.path.append(PROJECT_PATH)
if __name__ == '__main__':
main()
| {
"content_hash": "c0a3e476192f12190e744c3fa45c1fe2",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 82,
"avg_line_length": 21.130434782608695,
"alnum_prop": 0.6440329218106996,
"repo_name": "viollarr/cursoaulahu",
"id": "391419d3ce31f4abff8a8b65888c060d70634bc4",
"size": "525",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/testeloadr.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20117"
}
],
"symlink_target": ""
} |
from ..mpi import MPI
from .mpi import MPITestCase
import sys
import os
import re
import shutil
import unittest
import numpy as np
import numpy.testing as nt
from ..dist import *
from .. import qarray as qa
from ..tod import Interval, TODCache, TODGround
from ._helpers import (create_outdir, create_distdata, boresight_focalplane,
uniform_chunks)
# This file will only be imported if SPT3G is already available
from spt3g import core as c3g
from ..tod import spt3g_utils as s3utils
from ..tod import spt3g as s3g
class Spt3gTest(MPITestCase):
def setUp(self):
fixture_name = os.path.splitext(os.path.basename(__file__))[0]
self.outdir = create_outdir(self.comm, fixture_name)
self.datawrite = os.path.join(self.outdir, "test_3g")
self.dataexport = os.path.join(self.outdir, "export_3g")
self.groundexport = os.path.join(self.outdir, "export_3g_ground")
# Reset the frame file size for these tests, so that we can test
# boundary effects.
self._original_framefile = s3g.TARGET_FRAMEFILE_SIZE
# Create observations, divided evenly between groups
self.nobs = 4
opg = self.nobs
if self.comm.size >= 2:
opg = self.nobs // 2
self.data = create_distdata(self.comm, obs_per_group=opg)
# Create a set of boresight detectors
self.ndet = 4
self.rate = 20.0
self.dnames, self.dquat, depsilon, drate, dnet, dfmin, dfknee, dalpha \
= boresight_focalplane(self.ndet, samplerate=self.rate)
# Properties of each observation
self.obslen = 90.0
self.obsgap = 10.0
self.obstotalsamp = int((self.obslen + self.obsgap)
* self.rate)
self.obssamp = int(self.obslen * self.rate)
self.obsgapsamp = self.obstotalsamp - self.obssamp
self.obstotal = self.obstotalsamp / self.rate
self.obslen = self.obssamp / self.rate
self.obsgap = self.obstotal - self.obsgap
# Properties of the intervals within an observation
self.nsub = 12
self.subtotsamp = self.obssamp // self.nsub
self.subgapsamp = 0
self.subsamp = self.subtotsamp - self.subgapsamp
self.frames = list()
for i in range(self.nsub):
self.frames.append(self.subtotsamp)
if self.obssamp > self.subtotsamp * self.nsub:
self.frames.append(self.obssamp - self.subtotsamp * self.nsub)
# Ground scan properties
self.site_lon = '-67:47:10'
self.site_lat = '-22:57:30'
self.site_alt = 5200.
self.coord = 'C'
self.azmin=45
self.azmax=55
self.el=60
self.scanrate = 5.0
self.scan_accel = 100.0
self.CES_start = None
def tearDown(self):
# Reset the frame file size.
s3g.TARGET_FRAMEFILE_SIZE = self._original_framefile
return
def meta_setup(self):
ret = dict()
ret["string"] = "blahblahblah"
ret["double"] = -1.234567890123e9
ret["int64"] = -100000000000
return ret
def meta_verify(self, dct):
nt.assert_equal(dct["string"], "blahblahblah")
nt.assert_equal(dct["int64"], -100000000000)
nt.assert_almost_equal(dct["double"], -1.234567890123e9)
return
def create_bore(self, total, local):
theta_incr = (0.5*np.pi) / (total - 1)
phi_incr = (2.0*np.pi) / (total - 1)
theta_start = local[0] * theta_incr
phi_start = local[0] * phi_incr
theta_stop = theta_start + (local[1] - 1) * theta_incr
phi_stop = phi_start + (local[1] - 1) * phi_incr
theta = np.linspace(theta_start, theta_stop, num=local[1],
endpoint=True, dtype=np.float64)
phi = np.linspace(phi_start, phi_stop, num=local[1],
endpoint=True, dtype=np.float64)
pa = np.zeros(local[1], dtype=np.float64)
return qa.from_angles(theta, phi, pa)
def obs_create(self, comm, name):
# fake metadata
props = self.meta_setup()
tod = s3g.TOD3G(comm, comm.size, detectors=self.dquat,
samples=self.obssamp, framesizes=self.frames,
azel=True, meta=props)
obs = dict()
obs["name"] = name
obs["tod"] = tod
return obs
def obs_init(self, obs, start, off):
tod = obs["tod"]
# Now write the data. For this test, we simply write the detector
# index (as a float) to the detector timestream. We also flag every
# other sample. For the boresight pointing, we create a fake spiral
# pattern.
detranks, sampranks = tod.grid_size
rankdet, ranksamp = tod.grid_ranks
off = tod.local_samples[0]
n = tod.local_samples[1]
# We use this for both the common and all the detector
# flags just to check write/read roundtrip.
flags = np.zeros(n, dtype=np.uint8)
flags[::2] = 1
# Everyone writes their timestamps
incr = 1.0 / self.rate
stamps = np.arange(n, dtype=np.float64)
stamps *= incr
stamps += start + (off * incr)
tod.write_times(stamps=stamps)
# Same with the boresight
boresight = self.create_bore(tod.total_samples, tod.local_samples)
tod.write_boresight(data=boresight)
# Just duplicate the RA/DEC quaternions to AZ/EL. We are just
# checking read / write integrity.
tod.write_boresight_azel(data=boresight)
# Fake velocity / position data
posvec = np.zeros((n, 3), dtype=np.float64)
posvec[:,2] = 1.0
tod.write_velocity(vel=posvec)
tod.write_position(pos=posvec)
# Now the common flags
tod.write_common_flags(flags=flags)
# Detector data
fakedata = np.empty(n, dtype=np.float64)
for d in tod.local_dets:
# get unique detector index and convert to float
indx = float(tod.detindx[d])
# write data based on this to all local elements
fakedata[:] = np.arange(n)
fakedata += off
fakedata *= indx
tod.write(detector=d, data=fakedata)
# write detector flags
tod.write_flags(detector=d, flags=flags)
return
def obs_zero(self, obs, start, off):
tod = obs["tod"]
# Write empty data to all fields
detranks, sampranks = tod.grid_size
rankdet, ranksamp = tod.grid_ranks
off = tod.local_samples[0]
n = tod.local_samples[1]
# We use this for both the common and all the detector
# flags just to check write/read roundtrip.
flags = np.zeros(n, dtype=np.uint8)
# Everyone writes their timestamps
stamps = np.zeros(n, dtype=np.float64)
tod.write_times(stamps=stamps)
# Same with the boresight
boresight = np.zeros((n, 4), dtype=np.float64)
tod.write_boresight(data=boresight)
tod.write_boresight_azel(data=boresight)
# Fake velocity / position data
posvec = np.zeros((n, 3), dtype=np.float64)
tod.write_velocity(vel=posvec)
tod.write_position(pos=posvec)
# Now the common flags
tod.write_common_flags(flags=flags)
# Detector data
fakedata = np.zeros(n, dtype=np.float64)
for d in tod.local_dets:
tod.write(detector=d, data=fakedata)
tod.write_flags(detector=d, flags=flags)
return
def obs_verify(self, tod, start, off):
nlocal = tod.local_samples[1]
odd = False
if tod.local_samples[0] % 2 != 0:
odd = True
# Verify metadata
self.meta_verify(tod.meta())
# Read and compare timestamps
compstamps = np.arange(nlocal, dtype=np.float64)
compstamps /= self.rate
compstamps += start + (tod.local_samples[0] / self.rate)
stamps = tod.read_times()
nt.assert_almost_equal(stamps, compstamps)
del stamps
# Read and compare boresight
compbore = self.create_bore(tod.total_samples, tod.local_samples)
boresight = tod.read_boresight()
nt.assert_almost_equal(boresight, compbore)
del boresight
boresight = tod.read_boresight_azel()
nt.assert_almost_equal(boresight, compbore)
del boresight
# flags. We use this for both the common and all the detector
# flags just to check write/read roundtrip.
compflags = np.zeros(nlocal, dtype=np.uint8)
if odd:
compflags[1::2] = 1
else:
compflags[::2] = 1
flags = tod.read_common_flags()
nt.assert_equal(flags, compflags)
del flags
# detector data
compdata = np.empty(nlocal, dtype=np.float64)
for d in tod.local_dets:
# get unique detector index and convert to float
indx = float(tod.detindx[d])
# comparison values
compdata[:] = np.arange(tod.local_samples[1])
compdata += tod.local_samples[0]
compdata *= indx
# read and check
data = tod.read(detector=d)
nt.assert_almost_equal(data, compdata)
del data
# check detector flags
flags = tod.read_flags(detector=d)
nt.assert_equal(flags, compflags)
del flags
return
def data_init(self):
data = Data(self.data.comm)
for ob in range(len(self.data.obs)):
obsname = "obs_{}_{:02d}".format(self.data.comm.group, ob)
obsstart = ob * self.obstotal
obsoff = ob * self.obstotalsamp
obs = self.obs_create(self.data.comm.comm_group, obsname)
self.obs_init(obs, obsstart, obsoff)
data.obs.append(obs)
return data
def init_ground(self):
# Create the simulated TODs
for ob in range(len(self.data.obs)):
obsname = "obs_{}_{:02d}".format(self.data.comm.group, ob)
start = ob * self.obstotal
first = ob * self.obstotalsamp
tod = TODGround(
self.data.comm.comm_group,
self.dquat,
self.obstotalsamp,
detranks=self.data.comm.group_size,
firsttime=start,
rate=self.rate,
site_lon=self.site_lon,
site_lat=self.site_lat,
site_alt=self.site_alt,
azmin=self.azmin,
azmax=self.azmax,
el=self.el,
coord=self.coord,
scanrate=self.scanrate,
scan_accel=self.scan_accel,
CES_start=self.CES_start)
self.data.obs[ob]["tod"] = tod
return
def data_verify(self, path, prefix):
for ob in range(len(self.data.obs)):
obsname = "obs_{}_{:02d}".format(self.data.comm.group, ob)
obsdir = os.path.join(path, obsname)
obsstart = ob * self.obstotal
obsoff = ob * self.obstotalsamp
tod = s3g.TOD3G(self.data.comm.comm_group,
self.data.comm.comm_group.size, path=obsdir, prefix=prefix)
self.obs_verify(tod, obsstart, obsoff)
return
def test_utils(self):
s3g.TARGET_FRAMEFILE_SIZE = 200000
# We want to test the frame operations with a process grid that has
# multiple ranks in both the sample and detector directions.
detranks = self.comm.size
if self.comm.size % 2 == 0:
detranks = 2
# Create a simple tod with a cache that we can use for testing.
tod_in = TODCache(self.comm, self.dnames, self.obssamp,
detquats=self.dquat, detranks=detranks,
sampsizes=self.frames)
tod_out = TODCache(self.comm, self.dnames, self.obssamp,
detquats=self.dquat, detranks=detranks,
sampsizes=self.frames)
# Write some fake data to this TOD
obs_in = {"tod": tod_in}
self.obs_init(obs_in, 0.0, 0)
obs_out = {"tod": tod_out}
self.obs_zero(obs_out, 0.0, 0)
# For timestamps, we need to copy the internal cached timestamps
# (float64) into spt3g timestamps.
stamps = np.copy(tod_in.cache.reference(tod_in._stamps))
stamps *= 1.0e9
istamps = stamps.astype(np.int64)
tod_in.cache.put("spt3gtime", istamps)
# Make lists of fields that we are going to write to the spt3g frames.
# These use the internal names of the cache objects in a TODCache
# class. We will test that we can dump this data to frames and
# restore it.
common = list()
common.append( ("spt3gtime", c3g.G3VectorTime, "spt3gtime") )
common.append( (tod_in._bore, c3g.G3VectorDouble, tod_in._bore) )
common.append( (tod_in._bore_azel, c3g.G3VectorDouble, tod_in._bore_azel) )
common.append( (tod_in._pos, c3g.G3VectorDouble, tod_in._pos) )
common.append( (tod_in._vel, c3g.G3VectorDouble, tod_in._vel) )
common.append( (tod_in._common, c3g.G3VectorUnsignedChar, tod_in._common) )
detfields = [ ("{}{}".format(tod_in._pref_detdata, x),
"{}{}".format(tod_in._pref_detdata, x))\
for x in tod_in.detectors ]
flagfields = [ ("{}{}".format(tod_in._pref_detflags, x),
"{}{}".format(tod_in._pref_detflags, x)) \
for x in tod_in.detectors ]
off = 0
frames = list()
for findx, frm in enumerate(self.frames):
#print("cache to frame {} at {}".format(findx, off), flush=True)
fdata = s3utils.cache_to_frames(tod_in, findx, 1, [off], [frm],
common=common, detector_fields=detfields,
flag_fields=flagfields, units=c3g.G3TimestreamUnits.Tcmb)
#print(" got ",fdata, flush=True)
frames.extend(fdata)
off += frm
# Restore frames to cache
off = 0
for findx, frm in enumerate(self.frames):
#print("frame to cache {} at {}".format(findx, off), flush=True)
s3utils.frame_to_cache(tod_out, findx, off, frm,
frame_data=frames[findx])
off += frm
# Compare input to output
np.testing.assert_almost_equal(tod_in.cache.reference("spt3gtime"),
tod_out.cache.reference("spt3gtime"))
np.testing.assert_almost_equal(tod_in.read_position(),
tod_out.read_position())
np.testing.assert_almost_equal(tod_in.read_velocity(),
tod_out.read_velocity())
np.testing.assert_almost_equal(tod_in.read_boresight(),
tod_out.read_boresight())
np.testing.assert_almost_equal(tod_in.read_boresight_azel(),
tod_out.read_boresight_azel())
np.testing.assert_equal(tod_in.read_common_flags(),
tod_out.read_common_flags())
for det in tod_in.local_dets:
np.testing.assert_almost_equal(tod_in.read(detector=det),
tod_out.read(detector=det))
np.testing.assert_equal(tod_in.read_flags(detector=det),
tod_out.read_flags(detector=det))
return
def test_io(self):
s3g.TARGET_FRAMEFILE_SIZE = 200000
origdata = self.data_init()
if self.comm.rank == 0:
if os.path.isdir(self.datawrite):
shutil.rmtree(self.datawrite)
self.comm.barrier()
dumper = s3g.Op3GExport(self.datawrite, s3g.TOD3G, use_todchunks=True,
export_opts={"prefix" : "test"})
dumper.exec(origdata)
#print("{}: Done with export 1".format(self.comm.rank), flush=True)
self.comm.barrier()
self.data_verify(self.datawrite, "test")
#print("{}: Done with verify 1".format(self.comm.rank), flush=True)
self.comm.barrier()
loaddata = s3g.load_spt3g(self.data.comm,
self.data.comm.comm_group.size,
self.datawrite, "test",
s3g.obsweight_spt3g,
s3g.TOD3G)
#print("{}: Done with load".format(self.comm.rank), flush=True)
self.comm.barrier()
if self.comm.rank == 0:
if os.path.isdir(self.dataexport):
shutil.rmtree(self.dataexport)
self.comm.barrier()
exporter = s3g.Op3GExport(self.dataexport, s3g.TOD3G,
use_todchunks=True,
export_opts={"prefix" : "test"})
exporter.exec(loaddata)
#print("{}: Done with export 2".format(self.comm.rank), flush=True)
self.comm.barrier()
self.data_verify(self.dataexport, "test")
#print("{}: Done with verify 2".format(self.comm.rank), flush=True)
self.comm.barrier()
return
def test_ground(self):
s3g.TARGET_FRAMEFILE_SIZE = 400000
# Create simulated tods in memory
self.init_ground()
# Export this
if self.comm.rank == 0:
if os.path.isdir(self.groundexport):
shutil.rmtree(self.groundexport)
self.comm.barrier()
dumper = s3g.Op3GExport(self.groundexport, s3g.TOD3G,
use_todchunks=True,
export_opts={"prefix" : "test"})
dumper.exec(self.data)
# Load it back in.
loaddata = s3g.load_spt3g(self.data.comm,
self.data.comm.comm_group.size,
self.groundexport, "test",
s3g.obsweight_spt3g,
s3g.TOD3G)
return
| {
"content_hash": "05b7aa0f3e7063925c5acfad98df6612",
"timestamp": "",
"source": "github",
"line_count": 547,
"max_line_length": 83,
"avg_line_length": 32.80438756855576,
"alnum_prop": 0.5691038787338386,
"repo_name": "tskisner/pytoast",
"id": "ad50da9c8e8cd41addddc6ebed0bee9bd3ce9845",
"size": "18138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/tests/spt3g.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "29103"
},
{
"name": "Makefile",
"bytes": "685"
},
{
"name": "Python",
"bytes": "228040"
}
],
"symlink_target": ""
} |
import argparse
import os
import sys
import yaml
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(base_dir)
#sys.path.append(os.path.join(base_dir, '/netconf/protos/third_party'))
from common.structlog_setup import setup_logging
from common.utils.dockerhelpers import get_my_containers_name
from common.utils.nethelpers import get_my_primary_local_ipv4
#from netconf.grpc_client.grpc_client import GrpcClient
#from netconf.nc_server import NCServer
from dashd.dashd_impl import DashDaemon
defs = dict(
config=os.environ.get('CONFIG', './dashd.yml'),
consul=os.environ.get('CONSUL', 'localhost:8500'),
external_host_address=os.environ.get('EXTERNAL_HOST_ADDRESS',
get_my_primary_local_ipv4()),
grafana_url=os.environ.get('GRAFANA_URL',
'http://admin:admin@localhost:8882/api'),
kafka=os.environ.get('KAFKA', None),
topic=os.environ.get('KAFKA_TOPIC', 'voltha.kpis'),
docker_host=os.environ.get('DOCKER_HOST', None),
instance_id=os.environ.get('INSTANCE_ID', os.environ.get('HOSTNAME', '1')),
internal_host_address=os.environ.get('INTERNAL_HOST_ADDRESS',
get_my_primary_local_ipv4()),
)
def parse_args():
parser = argparse.ArgumentParser("Manage Grafana dashboards")
_help = ('Path to dashd.yml config file (default: %s). '
'If relative, it is relative to main.py of dashd.'
% defs['config'])
parser.add_argument('-c', '--config',
dest='config',
action='store',
default=defs['config'],
help=_help)
_help = '<hostname>:<port> to consul agent (default: %s)' % defs['consul']
parser.add_argument(
'-C', '--consul', dest='consul', action='store',
default=defs['consul'],
help=_help)
_help = '<hostname>:<port> to the kafka bus (default: %s)' % defs['kafka']
parser.add_argument(
'-k', '--kafka', dest='kafka', action='store',
default=defs['kafka'],
help=_help)
_help = 'The kafka topic to listen to (default: %s)' % defs['topic']
parser.add_argument(
'-t', '--topic', dest='topic', action='store',
default=defs['topic'],
help=_help)
_help = 'The URL of the Grafana server (default: %s)' % \
defs['grafana_url']
parser.add_argument(
'-g', '--grafana_url', dest='grafana_url', action='store',
default=defs['grafana_url'],
help=_help)
_help = 'The docker host ip (default %s)' % \
defs['docker_host']
parser.add_argument(
'-d', '--docker_host', dest='docker_host', action='store',
default=defs['docker_host'],
help=_help)
_help = ('unique string id of this netconf server instance (default: %s)'
% defs['instance_id'])
parser.add_argument('-i', '--instance-id',
dest='instance_id',
action='store',
default=defs['instance_id'],
help=_help)
_help = 'omit startup banner log lines'
parser.add_argument('-n', '--no-banner',
dest='no_banner',
action='store_true',
default=False,
help=_help)
_help = "suppress debug and info logs"
parser.add_argument('-q', '--quiet',
dest='quiet',
action='count',
help=_help)
_help = 'enable verbose logging'
parser.add_argument('-v', '--verbose',
dest='verbose',
action='count',
help=_help)
_help = ('use docker container name as netconf server instance id'
' (overrides -i/--instance-id option)')
parser.add_argument('--instance-id-is-container-name',
dest='instance_id_is_container_name',
action='store_true',
default=False,
help=_help)
args = parser.parse_args()
# post-processing
if args.instance_id_is_container_name:
args.instance_id = get_my_containers_name()
return args
def load_config(args):
path = args.config
if path.startswith('.'):
dir = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(dir, path)
path = os.path.abspath(path)
with open(path) as fd:
config = yaml.load(fd)
return config
banner = r'''
__
| \ _ _ __ _ _
||\ |/ ' |/ /| |__ __| |
||/ | o |\ \| _ \ / _ |
|__/ \_._|/_/|_| |_|\__._|
'''
def print_banner(log):
for line in banner.strip('\n').splitlines():
log.info(line)
log.info('(to stop: press Ctrl-C)')
class Main(object):
def __init__(self):
self.args = args = parse_args()
self.config = load_config(args)
verbosity_adjust = (args.verbose or 0) - (args.quiet or 0)
self.log = setup_logging(self.config.get('logging', {}),
args.instance_id,
verbosity_adjust=verbosity_adjust)
self.dashd_server = None
self.dashd_server_started = False
self.exiting = False
if not args.no_banner:
print_banner(self.log)
self.startup_components()
def start(self):
#pass
self.start_reactor() # will not return except Keyboard interrupt
@inlineCallbacks
def startup_components(self):
try:
args = self.args
self.log.info('starting-dash-daemon', consul=args.consul,
grafana_url=args.grafana_url,
topic=args.topic)
self.dashd_server = yield \
DashDaemon(args.consul, #'10.0.2.15:8500',
args.kafka,
args.grafana_url, #'http://admin:admin@localhost:8882/api',
topic=args.topic ) #"voltha.kpis")
reactor.callWhenRunning(self.dashd_server.start)
self.log.info('started')
except:
e = sys.exc_info()
print("ERROR: ", e)
@inlineCallbacks
def shutdown_components(self):
"""Execute before the reactor is shut down"""
self.log.info('exiting-on-keyboard-interrupt')
self.exiting = True
def start_reactor(self):
reactor.callWhenRunning(
lambda: self.log.info('twisted-reactor-started'))
reactor.addSystemEventTrigger('before', 'shutdown',
self.shutdown_components)
reactor.run()
if __name__ == '__main__':
Main().start()
| {
"content_hash": "71e3e1286590ddff2d006a10e3a8d020",
"timestamp": "",
"source": "github",
"line_count": 216,
"max_line_length": 90,
"avg_line_length": 32.43981481481482,
"alnum_prop": 0.5326102468959611,
"repo_name": "opencord/voltha",
"id": "f51a8ff83132d8256aa2b6a3faaee6eaded92fe7",
"size": "7629",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dashd/main.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "30265"
},
{
"name": "Dockerfile",
"bytes": "2881"
},
{
"name": "Go",
"bytes": "181529"
},
{
"name": "Jinja",
"bytes": "25855"
},
{
"name": "Makefile",
"bytes": "76329"
},
{
"name": "Python",
"bytes": "9758796"
},
{
"name": "RobotFramework",
"bytes": "10188"
},
{
"name": "Ruby",
"bytes": "1126"
},
{
"name": "Shell",
"bytes": "758475"
},
{
"name": "XSLT",
"bytes": "175917"
}
],
"symlink_target": ""
} |
from collections import defaultdict
import copy
import numpy as np
import torch
import torch.nn as nn
import torch.utils.data
from torch_deep_neural_classifier import TorchDeepNeuralClassifier
__author__ = "Atticus Geiger"
__version__ = "CS224u, Stanford, Spring 2022"
class IITModel(torch.nn.Module):
def __init__(self, model, layers, id_to_coords,device):
super().__init__()
self.model = model
self.layers = layers
self.id_to_coords = defaultdict(lambda: defaultdict(list))
for k, vals in id_to_coords.items():
for d in vals:
layer = d['layer']
self.id_to_coords[k][layer].append(d)
self.device = device
def no_IIT_forward(self, X):
return self.model(X)
def forward(self, X):
base = X[:,0,:].squeeze(1).type(torch.FloatTensor).to(self.device)
coord_ids = X[:,1,:].squeeze(1).type(torch.FloatTensor).to(self.device)
sources = X[:,2:,:].to(self.device)
sources = [sources[:,j,:].squeeze(1).type(torch.FloatTensor).to(self.device)
for j in range(sources.shape[1])]
gets = self.id_to_coords[int(coord_ids.flatten()[0])]
sets = copy.deepcopy(gets)
self.activation = dict()
for layer in gets:
for i, get in enumerate(gets[layer]):
handlers = self._gets_sets(gets ={layer: [get]},sets = None)
source_logits = self.no_IIT_forward(sources[i])
for handler in handlers:
handler.remove()
sets[layer][i]["intervention"] = self.activation[f'{get["layer"]}-{get["start"]}-{get["end"]}']
base_logits = self.no_IIT_forward(base)
handlers = self._gets_sets(gets = None, sets = sets)
counterfactual_logits = self.no_IIT_forward(base)
for handler in handlers:
handler.remove()
return counterfactual_logits, base_logits
def make_hook(self, gets, sets, layer):
def hook(model, input, output):
layer_gets, layer_sets = [], []
if gets is not None and layer in gets:
layer_gets = gets[layer]
if sets is not None and layer in sets:
layer_sets = sets[layer]
for set in layer_sets:
output = torch.cat([output[:,:set["start"]], set["intervention"], output[:,set["end"]:]], dim = 1)
for get in layer_gets:
self.activation[f'{get["layer"]}-{get["start"]}-{get["end"]}'] = output[:,get["start"]: get["end"] ]
return output
return hook
def _gets_sets(self,gets=None, sets = None):
handlers = []
for layer in range(len(self.layers)):
hook = self.make_hook(gets,sets, layer)
both_handler = self.layers[layer].register_forward_hook(hook)
handlers.append(both_handler)
return handlers
def retrieve_activations(self, input, get, sets):
input = input.type(torch.FloatTensor).to(self.device)
self.activation = dict()
get_val = {get["layer"]: [get]} if get is not None else None
set_val = {sets["layer"]: [sets]} if sets is not None else None
handlers = self._gets_sets(get_val, set_val)
logits = self.model(input)
for handler in handlers:
handler.remove()
return self.activation[f'{get["layer"]}-{get["start"]}-{get["end"]}']
class CrossEntropyLossIIT(nn.Module):
def __init__(self):
super().__init__()
self.loss = nn.CrossEntropyLoss(reduction="mean")
def forward(self, preds, labels):
return self.loss(preds[0], labels[: , 0]) + self.loss(preds[1], labels[:,1])
class TorchDeepNeuralClassifierIIT(TorchDeepNeuralClassifier):
def __init__(self, id_to_coords=None, **base_kwargs):
super().__init__(**base_kwargs)
self.loss = CrossEntropyLossIIT()
self.id_to_coords = id_to_coords
self.shuffle_train = False
def build_graph(self):
model = super().build_graph()
IITmodel = IITModel(model, self.layers, self.id_to_coords, self.device)
return IITmodel
def batched_indices(self, max_len):
batch_indices = [x for x in range((max_len // self.batch_size))]
output = []
while len(batch_indices) != 0:
batch_index = random.sample(batch_indices, 1)[0]
batch_indices.remove(batch_index)
output.append([batch_index*self.batch_size + x for x in range(self.batch_size)])
return output
def build_dataset(self, base, sources, base_y, IIT_y, coord_ids):
base = torch.FloatTensor(np.array(base))
sources = [torch.FloatTensor(np.array(source)) for source in sources]
self.input_dim = base.shape[1]
coord_ids = torch.FloatTensor(np.array(coord_ids))
base_y = np.array(base_y)
self.classes_ = sorted(set(base_y))
self.n_classes_ = len(self.classes_)
class2index = dict(zip(self.classes_, range(self.n_classes_)))
base_y = [class2index[label] for label in base_y]
base_y = torch.tensor(base_y)
IIT_y = np.array(IIT_y)
IIT_y = [class2index[int(label)] for label in IIT_y]
IIT_y = torch.tensor(IIT_y)
bigX = torch.stack([base, coord_ids.unsqueeze(1).expand(-1, base.shape[1])] + sources, dim=1)
bigy = torch.stack((IIT_y, base_y), dim=1)
dataset = torch.utils.data.TensorDataset(bigX, bigy)
return dataset
def prep_input(self, base, sources, coord_ids):
bigX = torch.stack([base, coord_ids.unsqueeze(1).expand(-1, base.shape[1])] + sources, dim=1)
return bigX
def iit_predict(self, base, sources, coord_ids):
IIT_test = self.prep_input(base, sources, coord_ids)
IIT_preds, base_preds = self.model(IIT_test)
IIT_preds = np.array(IIT_preds.argmax(axis=1).cpu())
base_preds = np.array(base_preds.argmax(axis=1).cpu())
return IIT_preds, base_preds
if __name__ == '__main__':
import iit
from sklearn.metrics import classification_report
import utils
utils.fix_random_seeds()
V1 = 0
data_size = 10000
embedding_dim = 4
id_to_coords = {
V1: [{"layer": 1, "start": 0, "end": embedding_dim}]
}
iit_equality_dataset = iit.get_IIT_equality_dataset(
"V1", embedding_dim, data_size)
X_base_train, X_sources_train, y_base_train, y_IIT_train, interventions = iit_equality_dataset
model = TorchDeepNeuralClassifierIIT(
hidden_dim=embedding_dim*4,
hidden_activation=torch.nn.ReLU(),
num_layers=3,
id_to_coords=id_to_coords)
model.fit(
X_base_train,
X_sources_train,
y_base_train,
y_IIT_train,
interventions)
X_base_test, X_sources_test, y_base_test, y_IIT_test, interventions = iit.get_IIT_equality_dataset(
"V1", embedding_dim, 100)
IIT_preds, base_preds = model.iit_predict(
X_base_test, X_sources_test, interventions)
print("\nStandard evaluation")
print(classification_report(y_base_test, base_preds))
print("V1 counterfactual evaluation")
print(classification_report(y_IIT_test, IIT_preds))
| {
"content_hash": "e5a7d01c104be6e9798002eca1d5cd86",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 116,
"avg_line_length": 36.80203045685279,
"alnum_prop": 0.5984827586206897,
"repo_name": "cgpotts/cs224u",
"id": "53de83712460127f2deb44c8f763d4b40722a5a6",
"size": "7250",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "torch_deep_neural_classifier_iit.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "4297079"
},
{
"name": "Python",
"bytes": "364552"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SubtaskIssue.description'
db.add_column(u'project_subtaskissue', 'description',
self.gf('django.db.models.fields.CharField')(default='Unknown', max_length=400),
keep_default=False)
def backwards(self, orm):
# Deleting field 'SubtaskIssue.description'
db.delete_column(u'project_subtaskissue', 'description')
models = {
u'accounts.employee': {
'Meta': {'object_name': 'Employee'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'empid': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isManager': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
u'accounts.skill': {
'Meta': {'object_name': 'Skill'},
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'skillid': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'project.project': {
'Meta': {'object_name': 'Project'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'manager': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Employee']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'projectid': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'project.subtask': {
'Meta': {'unique_together': "(('subtaskid', 'task', 'project'),)", 'object_name': 'Subtask'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'assignee': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Employee']"}),
'deadline': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '400'}),
'enddate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'Normal'", 'max_length': '200'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '400'}),
'startdate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'subtaskid': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Task']"})
},
u'project.subtaskissue': {
'Meta': {'object_name': 'SubtaskIssue'},
'createdate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'description': ('django.db.models.fields.CharField', [], {'default': "'Unknown'", 'max_length': '400'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'Low'", 'max_length': '200'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'resolvedate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'subtask': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Subtask']"}),
'subtaskissueid': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Task']"})
},
u'project.subtaskskills': {
'Meta': {'unique_together': "(('skill', 'subtask', 'task', 'project'),)", 'object_name': 'SubtaskSkills'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'skill': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Skill']"}),
'subtask': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Subtask']"}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Task']"})
},
u'project.task': {
'Meta': {'unique_together': "(('taskid', 'project'),)", 'object_name': 'Task'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deadline': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '400'}),
'enddate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'manager': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Employee']"}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'Normal'", 'max_length': '200'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '400'}),
'startdate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'taskid': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'project.taskissue': {
'Meta': {'object_name': 'TaskIssue'},
'createdate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'description': ('django.db.models.fields.CharField', [], {'default': "'Unknown'", 'max_length': '400'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'Low'", 'max_length': '200'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'resolvedate': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Task']"}),
'taskissueid': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'project.taskskills': {
'Meta': {'unique_together': "(('skill', 'task', 'project'),)", 'object_name': 'TaskSkills'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"}),
'skill': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Skill']"}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Task']"})
},
u'project.teammember': {
'Meta': {'object_name': 'Teammember'},
'employee': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Employee']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['project.Project']"})
}
}
complete_apps = ['project'] | {
"content_hash": "2874055c74d7070cf67cafc48376ed4f",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 118,
"avg_line_length": 70.94214876033058,
"alnum_prop": 0.5540540540540541,
"repo_name": "sushant-hiray/teamflowy",
"id": "1c418e64dfd6a0d3115091110eb1b072784eafed",
"size": "8608",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project/migrations/0002_auto__add_field_subtaskissue_description.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13244"
},
{
"name": "JavaScript",
"bytes": "35892"
},
{
"name": "Python",
"bytes": "176508"
},
{
"name": "TeX",
"bytes": "50445"
}
],
"symlink_target": ""
} |
__title__ = 'latinpigsay'
__license__ = 'MIT'
__author__ = 'Steven Cutting'
__author_email__ = 'steven.c.projects@gmail.com'
__created_on__ = '12/3/2014'
from data import charpool as charp
import generalfunctions as gfunc
import re
import string
from string import ascii_letters
import logging
_LOG = logging.getLogger(__name__)
class Translator(object):
def __init__(self, text):
# Separates text into words and whitespace
self.__listofwords = re.findall(r'(?:\S+)|(?:\s+)', text)
self.__output = []
self.__translated = self.buildstring()
def buildstring(self):
self.__parsewords(self.__listofwords)
return ''.join(self.__output)
def __str__(self):
return self.__translated
@property
def returnstr(self):
return self.__translated
__vowels = 'aeiouAEIOU'
__rmcon = gfunc.regexpreplacer(charp.contractions)
def __parsewords(self, words, vowels=__vowels, rmcon=__rmcon):
for word in words:
word2 = rmcon.replace(word)
if word2 is not word:
self.__parsewords(word2.split())
else:
# Whitespace does not require translation
if not word.strip():
self.__output.append(word)
continue
# Punctuation does not require translation
if not set(ascii_letters).intersection(word):
self.__output.append(word)
continue
m = re.match(r'^(?P<pre>[\W]*)(?P<word>.+?)(?P<post>[\W]*)$',
word
)
d = m.groupdict()
i = 0
word = d['word']
length = len(word)
while length > i:
if word[i] in vowels:
break
if i > 0 and word[i] in 'yY':
break
i += 1
d['fore'] = word[i:]
d['aft'] = word[:i]
if word[0] in vowels:
new_word = '%(pre)s%(fore)s%(aft)sway%(post)s' % d
else:
new_word = '%(pre)s%(fore)s%(aft)say%(post)s' % d
new_word = new_word.lower()
#Check if fist letter is uppercase
if word[0].isupper():
new_word = new_word.capitalize()
self.__output.append(new_word)
| {
"content_hash": "9ec4e88967e9950c577132e97330e4a5",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 77,
"avg_line_length": 30.180722891566266,
"alnum_prop": 0.4782435129740519,
"repo_name": "steven-cutting/latinpigsay",
"id": "09798bd5ece2a6770996e0d8307e7586933af463",
"size": "2529",
"binary": false,
"copies": "1",
"ref": "refs/heads/stable",
"path": "latinpigsay/latinpig.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "53007"
}
],
"symlink_target": ""
} |
from __future__ import annotations
import json
import re
import unittest
from unittest import mock
from unittest.mock import patch
import pytest
from parameterized import parameterized
from trino.transaction import IsolationLevel
from airflow import AirflowException
from airflow.models import Connection
from airflow.providers.trino.hooks.trino import TrinoHook
HOOK_GET_CONNECTION = 'airflow.providers.trino.hooks.trino.TrinoHook.get_connection'
BASIC_AUTHENTICATION = 'airflow.providers.trino.hooks.trino.trino.auth.BasicAuthentication'
KERBEROS_AUTHENTICATION = 'airflow.providers.trino.hooks.trino.trino.auth.KerberosAuthentication'
TRINO_DBAPI_CONNECT = 'airflow.providers.trino.hooks.trino.trino.dbapi.connect'
JWT_AUTHENTICATION = 'airflow.providers.trino.hooks.trino.trino.auth.JWTAuthentication'
CERT_AUTHENTICATION = 'airflow.providers.trino.hooks.trino.trino.auth.CertificateAuthentication'
class TestTrinoHookConn:
@patch(BASIC_AUTHENTICATION)
@patch(TRINO_DBAPI_CONNECT)
@patch(HOOK_GET_CONNECTION)
def test_get_conn_basic_auth(self, mock_get_connection, mock_connect, mock_basic_auth):
self.set_get_connection_return_value(mock_get_connection, password='password')
TrinoHook().get_conn()
self.assert_connection_called_with(mock_connect, auth=mock_basic_auth)
mock_basic_auth.assert_called_once_with('login', 'password')
@patch('airflow.providers.trino.hooks.trino.generate_trino_client_info')
@patch(BASIC_AUTHENTICATION)
@patch(TRINO_DBAPI_CONNECT)
@patch(HOOK_GET_CONNECTION)
def test_http_headers(
self,
mock_get_connection,
mock_connect,
mock_basic_auth,
mocked_generate_airflow_trino_client_info_header,
):
mock_get_connection.return_value = Connection(
login='login', password='password', host='host', schema='hive'
)
client = json.dumps(
{
"dag_id": "dag-id",
"execution_date": "2022-01-01T00:00:00",
"task_id": "task-id",
"try_number": "1",
"dag_run_id": "dag-run-id",
"dag_owner": "dag-owner",
},
sort_keys=True,
)
http_headers = {'X-Trino-Client-Info': client}
mocked_generate_airflow_trino_client_info_header.return_value = http_headers['X-Trino-Client-Info']
conn = TrinoHook().get_conn()
self.assert_connection_called_with(mock_connect, auth=mock_basic_auth, http_headers=http_headers)
mock_basic_auth.assert_called_once_with('login', 'password')
assert mock_connect.return_value == conn
@patch(HOOK_GET_CONNECTION)
def test_get_conn_invalid_auth(self, mock_get_connection):
extras = {'auth': 'kerberos'}
self.set_get_connection_return_value(
mock_get_connection,
password='password',
extra=json.dumps(extras),
)
with pytest.raises(
AirflowException, match=re.escape("The 'kerberos' authorization type doesn't support password.")
):
TrinoHook().get_conn()
@patch(JWT_AUTHENTICATION)
@patch(TRINO_DBAPI_CONNECT)
@patch(HOOK_GET_CONNECTION)
def test_get_conn_jwt_auth(self, mock_get_connection, mock_connect, mock_jwt_auth):
extras = {
'auth': 'jwt',
'jwt__token': 'TEST_JWT_TOKEN',
}
self.set_get_connection_return_value(
mock_get_connection,
extra=json.dumps(extras),
)
TrinoHook().get_conn()
self.assert_connection_called_with(mock_connect, auth=mock_jwt_auth)
@patch(CERT_AUTHENTICATION)
@patch(TRINO_DBAPI_CONNECT)
@patch(HOOK_GET_CONNECTION)
def test_get_conn_cert_auth(self, mock_get_connection, mock_connect, mock_cert_auth):
extras = {
'auth': 'certs',
'certs__client_cert_path': '/path/to/client.pem',
'certs__client_key_path': '/path/to/client.key',
}
self.set_get_connection_return_value(
mock_get_connection,
extra=json.dumps(extras),
)
TrinoHook().get_conn()
self.assert_connection_called_with(mock_connect, auth=mock_cert_auth)
mock_cert_auth.assert_called_once_with('/path/to/client.pem', '/path/to/client.key')
@patch(KERBEROS_AUTHENTICATION)
@patch(TRINO_DBAPI_CONNECT)
@patch(HOOK_GET_CONNECTION)
def test_get_conn_kerberos_auth(self, mock_get_connection, mock_connect, mock_auth):
extras = {
'auth': 'kerberos',
'kerberos__config': 'TEST_KERBEROS_CONFIG',
'kerberos__service_name': 'TEST_SERVICE_NAME',
'kerberos__mutual_authentication': 'TEST_MUTUAL_AUTHENTICATION',
'kerberos__force_preemptive': True,
'kerberos__hostname_override': 'TEST_HOSTNAME_OVERRIDE',
'kerberos__sanitize_mutual_error_response': True,
'kerberos__principal': 'TEST_PRINCIPAL',
'kerberos__delegate': 'TEST_DELEGATE',
'kerberos__ca_bundle': 'TEST_CA_BUNDLE',
'verify': 'true',
}
self.set_get_connection_return_value(
mock_get_connection,
extra=json.dumps(extras),
)
TrinoHook().get_conn()
self.assert_connection_called_with(mock_connect, auth=mock_auth)
@parameterized.expand(
[
('False', False),
('false', False),
('true', True),
('true', True),
('/tmp/cert.crt', '/tmp/cert.crt'),
]
)
@patch(HOOK_GET_CONNECTION)
@patch(TRINO_DBAPI_CONNECT)
def test_get_conn_verify(self, current_verify, expected_verify, mock_connect, mock_get_connection):
extras = {'verify': current_verify}
self.set_get_connection_return_value(mock_get_connection, extra=json.dumps(extras))
TrinoHook().get_conn()
self.assert_connection_called_with(mock_connect, verify=expected_verify)
@staticmethod
def set_get_connection_return_value(mock_get_connection, extra=None, password=None):
mocked_connection = Connection(
login='login', password=password, host='host', schema='hive', extra=extra or '{}'
)
mock_get_connection.return_value = mocked_connection
@staticmethod
def assert_connection_called_with(mock_connect, http_headers=mock.ANY, auth=None, verify=True):
mock_connect.assert_called_once_with(
catalog='hive',
host='host',
port=None,
http_scheme='http',
http_headers=http_headers,
schema='hive',
source='airflow',
user='login',
isolation_level=IsolationLevel.AUTOCOMMIT,
auth=None if not auth else auth.return_value,
verify=verify,
)
class TestTrinoHook(unittest.TestCase):
def setUp(self):
super().setUp()
self.cur = mock.MagicMock(rowcount=0)
self.conn = mock.MagicMock()
self.conn.cursor.return_value = self.cur
conn = self.conn
class UnitTestTrinoHook(TrinoHook):
conn_name_attr = 'test_conn_id'
def get_conn(self):
return conn
def get_isolation_level(self):
return IsolationLevel.READ_COMMITTED
self.db_hook = UnitTestTrinoHook()
@patch('airflow.providers.common.sql.hooks.sql.DbApiHook.insert_rows')
def test_insert_rows(self, mock_insert_rows):
table = "table"
rows = [("hello",), ("world",)]
target_fields = None
commit_every = 10
replace = True
self.db_hook.insert_rows(table, rows, target_fields, commit_every, replace)
mock_insert_rows.assert_called_once_with(table, rows, None, 10, True)
def test_get_first_record(self):
statement = 'SQL'
result_sets = [('row1',), ('row2',)]
self.cur.fetchone.return_value = result_sets[0]
assert result_sets[0] == self.db_hook.get_first(statement)
self.conn.close.assert_called_once_with()
self.cur.close.assert_called_once_with()
self.cur.execute.assert_called_once_with(statement)
def test_get_records(self):
statement = 'SQL'
result_sets = [('row1',), ('row2',)]
self.cur.fetchall.return_value = result_sets
assert result_sets == self.db_hook.get_records(statement)
self.conn.close.assert_called_once_with()
self.cur.close.assert_called_once_with()
self.cur.execute.assert_called_once_with(statement)
def test_get_pandas_df(self):
statement = 'SQL'
column = 'col'
result_sets = [('row1',), ('row2',)]
self.cur.description = [(column,)]
self.cur.fetchall.return_value = result_sets
df = self.db_hook.get_pandas_df(statement)
assert column == df.columns[0]
assert result_sets[0][0] == df.values.tolist()[0][0]
assert result_sets[1][0] == df.values.tolist()[1][0]
self.cur.execute.assert_called_once_with(statement, None)
@patch('airflow.providers.trino.hooks.trino.TrinoHook.run')
def test_run(self, mock_run):
sql = "SELECT 1"
autocommit = False
parameters = ("hello", "world")
handler = list
self.db_hook.run(sql, autocommit, parameters, list)
mock_run.assert_called_once_with(sql, autocommit, parameters, handler)
def test_connection_success(self):
status, msg = self.db_hook.test_connection()
assert status is True
assert msg == 'Connection successfully tested'
@patch('airflow.providers.trino.hooks.trino.TrinoHook.get_conn')
def test_connection_failure(self, mock_conn):
mock_conn.side_effect = Exception('Test')
self.db_hook.get_conn = mock_conn
status, msg = self.db_hook.test_connection()
assert status is False
assert msg == 'Test'
class TestTrinoHookIntegration(unittest.TestCase):
@pytest.mark.integration("trino")
@mock.patch.dict('os.environ', AIRFLOW_CONN_TRINO_DEFAULT="trino://airflow@trino:8080/")
def test_should_record_records(self):
hook = TrinoHook()
sql = "SELECT name FROM tpch.sf1.customer ORDER BY custkey ASC LIMIT 3"
records = hook.get_records(sql)
assert [['Customer#000000001'], ['Customer#000000002'], ['Customer#000000003']] == records
@pytest.mark.integration("trino")
@pytest.mark.integration("kerberos")
def test_should_record_records_with_kerberos_auth(self):
conn_url = (
'trino://airflow@trino.example.com:7778/?'
'auth=kerberos&kerberos__service_name=HTTP&'
'verify=False&'
'protocol=https'
)
with mock.patch.dict('os.environ', AIRFLOW_CONN_TRINO_DEFAULT=conn_url):
hook = TrinoHook()
sql = "SELECT name FROM tpch.sf1.customer ORDER BY custkey ASC LIMIT 3"
records = hook.get_records(sql)
assert [['Customer#000000001'], ['Customer#000000002'], ['Customer#000000003']] == records
| {
"content_hash": "28478d5ddb604a094221a2bf48a7c235",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 108,
"avg_line_length": 38.43448275862069,
"alnum_prop": 0.6240803875829894,
"repo_name": "cfei18/incubator-airflow",
"id": "11d95c2bcce31ce3facb472b77960d19dfe34ab9",
"size": "11933",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/providers/trino/hooks/test_trino.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25980"
},
{
"name": "Dockerfile",
"bytes": "72003"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "173434"
},
{
"name": "JavaScript",
"bytes": "143068"
},
{
"name": "Jinja",
"bytes": "38808"
},
{
"name": "Jupyter Notebook",
"bytes": "5482"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "22660683"
},
{
"name": "R",
"bytes": "313"
},
{
"name": "Shell",
"bytes": "312715"
},
{
"name": "TypeScript",
"bytes": "472379"
}
],
"symlink_target": ""
} |
from typing import Optional
from es.elastic.api import Connection as ESConnection, connect
from airflow.hooks.dbapi import DbApiHook
from airflow.models.connection import Connection as AirflowConnection
class ElasticsearchHook(DbApiHook):
"""
Interact with Elasticsearch through the elasticsearch-dbapi.
This hook uses the Elasticsearch conn_id.
:param elasticsearch_conn_id: The Airflow connection used for Elasticsearch credentials.
:type elasticsearch_conn_id: str
"""
conn_name_attr = 'elasticsearch_conn_id'
default_conn_name = 'elasticsearch_default'
conn_type = 'elasticsearch'
hook_name = 'Elasticsearch'
def __init__(self, schema: str = "http", connection: Optional[AirflowConnection] = None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.schema = schema
self.connection = connection
def get_conn(self) -> ESConnection:
"""Returns a elasticsearch connection object"""
conn_id = getattr(self, self.conn_name_attr)
conn = self.connection or self.get_connection(conn_id)
conn_args = dict(
host=conn.host,
port=conn.port,
user=conn.login or None,
password=conn.password or None,
scheme=conn.schema or "http",
)
if conn.extra_dejson.get('http_compress', False):
conn_args["http_compress"] = bool(["http_compress"])
if conn.extra_dejson.get('timeout', False):
conn_args["timeout"] = conn.extra_dejson["timeout"]
conn = connect(**conn_args)
return conn
def get_uri(self) -> str:
conn_id = getattr(self, self.conn_name_attr)
conn = self.connection or self.get_connection(conn_id)
login = ''
if conn.login:
login = '{conn.login}:{conn.password}@'.format(conn=conn)
host = conn.host
if conn.port is not None:
host += f':{conn.port}'
uri = '{conn.conn_type}+{conn.schema}://{login}{host}/'.format(conn=conn, login=login, host=host)
extras_length = len(conn.extra_dejson)
if not extras_length:
return uri
uri += '?'
for arg_key, arg_value in conn.extra_dejson.items():
extras_length -= 1
uri += f"{arg_key}={arg_value}"
if extras_length:
uri += '&'
return uri
| {
"content_hash": "413ce332e8e90cc28496c16dd10d39fe",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 110,
"avg_line_length": 31.233766233766232,
"alnum_prop": 0.6070686070686071,
"repo_name": "sekikn/incubator-airflow",
"id": "df1561911e38700b15fbe1af04b268d2efdd4511",
"size": "3193",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "airflow/providers/elasticsearch/hooks/elasticsearch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "15900"
},
{
"name": "HTML",
"bytes": "151266"
},
{
"name": "JavaScript",
"bytes": "25486"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "10792443"
},
{
"name": "Shell",
"bytes": "243458"
},
{
"name": "TSQL",
"bytes": "879"
}
],
"symlink_target": ""
} |
import rosbag
import yaml
import matplotlib
import sys
if "-o" in sys.argv: # write to file mode
matplotlib.use("AGG")
else:
matplotlib.use("WXAgg")
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from matplotlib.widgets import Button
import time
#from progressbar import *
from std_msgs.msg import Header
import argparse
import rospy
import re
try:
import colorama
except:
print "Please install colorama by pip install colorama"
sys.exit(1)
from colorama import Fore, Style
class MessageWrapper():
def __init__(self, obj, header):
self.__field__ = dict()
self.obj = obj
self.header = header
class MessageFieldAccessor():
_is_slot_array_regexp = re.compile("\[([0-9]+)\]$")
_extract_slot_array_regexp = re.compile("(.*)\[([0-9]+)\]$")
def __init__(self, field):
# field = ["hoge", "fuga", "piyo[3]"]
self.parsed_fields = []
for f in field:
if self._is_slot_array_regexp.search(f):
res = self._extract_slot_array_regexp.match(f)
self.parsed_fields.append(res.group(1))
self.parsed_fields.append(int(res.group(2)))
else:
self.parsed_fields.append(f)
def parse(self, msg):
for f in self.parsed_fields:
if isinstance(f, int):
msg = msg[f]
else:
msg = getattr(msg, f)
return msg
def expandArrayFields(fields, topics):
ret_fields = []
ret_topics = []
for f, t in zip(fields, topics):
if re.search("\[([0-9]+):([0-9]+)\]", f): # [X:Y]
res = re.match(".*\[([0-9]+):([0-9]+)\]", f)
X = int(res.group(1))
Y = int(res.group(2))
for i in range(X, Y):
ret_fields.append(re.sub("\[[0-9+:[0-9]+\]", "[" + str(i) + "]", f))
ret_topics.append(t)
else:
ret_fields.append(f)
ret_topics.append(t)
return (ret_fields, ret_topics)
class PlotData():
def __init__(self, options):
self.label = None
self.legend_font_size = 8
(self.fields_orig, self.topics) = expandArrayFields(options["field"], options["topic"])
self.fields = [f.split("/") for f in self.fields_orig]
self.field_accessors = [MessageFieldAccessor(f) for f in self.fields]
self.time_offset = options["time_offset"]
self.values = []
for i in range(len(self.fields)):
self.values.append([])
self.options = options
def addValue(self, topic, value):
if topic not in self.topics:
return
for target_topic, i in zip(self.topics, range(len(self.topics))):
if target_topic == topic:
self.values[i].append((value.header.stamp,
self.field_accessors[i].parse(value.obj)))
def filter(self, start_time, end_time):
for i in range(len(self.values)):
self.values[i] = [v for v in self.values[i]
if v[0] >= start_time and
v[0] <= end_time]
def plot(self, min_stamp, fig, layout, show_legend, share_ax = None):
if share_ax:
ax = fig.add_subplot(layout, sharex=share_ax)
else:
ax = fig.add_subplot(layout)
for vs, i in zip(self.values, range(len(self.values))):
xs = [v[0].to_sec() - min_stamp.to_sec() + self.time_offset for v in vs]
ys = [v[1] for v in vs]
if self.label:
ax.plot(xs, ys, label=self.label[i])
else:
ax.plot(xs, ys, label=self.topics[i] + "/" + self.fields_orig[i])
ax.set_title(self.options["title"])
if show_legend and self.options["legend"]:
legend = ax.legend(prop={'size': self.legend_font_size}, frameon=False)
ax.minorticks_on()
ax.grid(True)
self.ax = ax
return ax
class BagPlotterException(Exception):
pass
class BagPlotter():
def __init__(self):
pass
# self.bag_file = bag_file
# self.conf_file = conf_file
def parse(self):
parser = argparse.ArgumentParser(description='Plot from bag file')
parser.add_argument('config',
help='yaml file to configure plot')
parser.add_argument('bag', nargs="+",
help='bag file to plot')
parser.add_argument('--duration', '-d', type=int,
help='Duration to plot')
parser.add_argument('--start-time', '-s', type=int, default=0,
help='Start time to plot')
parser.add_argument('-o', help='Write to file')
args = parser.parse_args()
self.output_file = args.o
self.bag_file = args.bag
self.conf_file = args.config
self.duration = args.duration
self.start_time = args.start_time
def processConfFile(self):
"""
conf file format is:
global:
layout: "vertical" or "horizontal"
plots:
- title: "title"
type: "line" or "hist"
topics:
- topic: "topic name"
field: "field name"
- topic: "topic name"
field: "field name"
legend: true
- title: "title"
type: "line" or "hist"
topics:
- topic: "topic name"
field: "field name"
- topic: "topic name"
field: "field name"
legend: true
"""
with open(self.conf_file) as f:
data = yaml.load(f)
self.setGlobalOptions(data)
self.setPlotOptions(data)
def readOption(self, option, name, default_value):
if option.has_key(name):
return option[name]
else:
return default_value
def setGlobalOptions(self, data):
global_options = self.readOption(data, "global", dict())
self.global_options = dict()
self.global_options["layout"] = self.readOption(global_options, "layout", "vertical")
self.global_options["legend_font_size"] = self.readOption(global_options, "legend_font_size", 8)
def setPlotOptions(self, data):
plot_options = self.readOption(data, "plots", [])
if len(plot_options) == 0:
raise BagPlotterException("No plots section in conf file")
self.plot_options = []
self.all_topics = set()
self.topic_data = []
for opt in plot_options:
if not opt.has_key("title"):
raise BagPlotterException("plot config requires title section")
opt["type"] = self.readOption(opt, "type", "line")
opt["legend"] = self.readOption(opt, "legend", True)
opt["layout"] = self.readOption(opt, "layout", None)
opt["time_offset"] = self.readOption(opt, "time_offset", 0)
if self.global_options["layout"] == "manual" and opt["layout"] == None:
raise BagPlotterException("Need to specify layout field for manual layout")
if not opt.has_key("topic"):
raise BagPlotterException("plots config requires topic section")
if not opt.has_key("field"):
raise BagPlotterException("plots config requires fields section")
if isinstance(opt["topic"], str):
opt["topic"] = [opt["topic"]]
if isinstance(opt["field"], str):
opt["field"] = [opt["field"]]
if len(opt["topic"]) != len(opt["field"]):
raise BagPlotterException("lengt of topic and field should be same")
for topic in opt["topic"]:
self.all_topics.add(topic)
self.topic_data.append(PlotData(opt))
if "label" in opt:
print "set", opt["label"]
self.topic_data[-1].label = opt["label"]
self.topic_data[-1].legend_font_size = self.global_options["legend_font_size"]
self.plot_options.append(opt)
def layoutGridSize(self):
if self.global_options["layout"] == "vertical":
return (len(self.topic_data), 1)
elif self.global_options["layout"] == "horizontal":
return (1, len(self.topic_data))
elif self.global_options["layout"] == "manual":
max_x = 0
max_y = 0
for topic_data in self.topic_data:
max_x = max(topic_data.options["layout"][0], max_x)
max_y = max(topic_data.options["layout"][1], max_y)
return (max_y + 1, max_x + 1)
def layoutPosition(self, gs, topic_data, i):
if self.global_options["layout"] == "vertical":
return gs[i]
elif self.global_options["layout"] == "horizontal":
return gs[i]
elif self.global_options["layout"] == "manual":
return gs[topic_data.options["layout"][1], topic_data.options["layout"][0]]
def plot(self):
plt.interactive(True)
min_stamp = None
max_stamp = None
no_valid_data = True
for abag in self.bag_file:
with rosbag.Bag(abag) as bag:
info = yaml.load(bag._get_yaml_info())
message_num = sum([topic["messages"] for topic in info["topics"]
if topic["topic"] in self.all_topics])
#widgets = [Fore.GREEN + "%s: " % (abag) + Fore.RESET, Percentage(), Bar()]
# pbar = ProgressBar(maxval=max(1, message_num), widgets=widgets).start()
counter = 0
read_data = [(topic, msg, timestamp)
for topic, msg, timestamp
in bag.read_messages(topics=self.all_topics)]
for topic, msg, timestamp in read_data:
# pbar.update(counter)
# check topic has header field
if not hasattr(msg, "header"):
msg = MessageWrapper(msg, Header())
msg.header.stamp = timestamp
else:
msg = MessageWrapper(msg, msg.header)
for topic_data in self.topic_data:
topic_data.addValue(topic, msg)
no_valid_data = False
if min_stamp:
if min_stamp > msg.header.stamp:
min_stamp = msg.header.stamp
else:
min_stamp = msg.header.stamp
if max_stamp:
if max_stamp < msg.header.stamp:
max_stamp = msg.header.stamp
else:
max_stamp = msg.header.stamp
counter = counter + 1
# pbar.finish()
if no_valid_data:
print Fore.RED + "Cannot find valid data in bag files, valid topics are:\n%s" % ", ".join(self.all_topics) + Fore.RESET
return
title = ("""Plot %s using %s from [%s] to [%s] (%d secs)""" %
(", ".join(self.bag_file),
self.conf_file,
str(time.ctime(min_stamp.to_sec())),
str(time.ctime(max_stamp.to_sec())),
(max_stamp - min_stamp).to_sec()))
start_time = rospy.Duration(self.start_time) + min_stamp
if self.duration:
end_time = start_time + rospy.Duration(self.duration)
else:
end_time = max_stamp
for topic_data in self.topic_data:
topic_data.filter(start_time, end_time)
fig = plt.figure(facecolor="1.0")
self.fig = fig
fig.suptitle(title)
self.show_legend = True
fig.canvas.mpl_connect('key_press_event', self.keyPress)
# Compute layout
self.start_time = start_time
self.plotAll(fig, start_time, self.show_legend)
if self.output_file:
fig = matplotlib.pyplot.gcf()
fig.set_size_inches(40, 30)
fig.savefig(self.output_file)
else:
self.printUsage()
self.runp = True
while self.runp:
plt.pause(1)
def keyPress(self, event):
if event.key == "l" or event.key == "L":
self.toggleLegend()
elif event.key == "q" or event.key == "Q":
self.runp = False
def printUsage(self):
print "Usage::"
print " l or L: toggle legend"
print " q or Q: quit"
def toggleLegend(self):
self.show_legend = not self.show_legend
plt.clf()
self.plotAll(self.fig, self.start_time, self.show_legend)
def plotAll(self, fig, start_time, show_legend):
grid_size = self.layoutGridSize()
gs = gridspec.GridSpec(*grid_size)
ax = None
for topic_data, i in zip(self.topic_data,
range(len(self.topic_data))):
ax = topic_data.plot(start_time,
fig,
self.layoutPosition(gs, topic_data, i),
show_legend, share_ax=ax)
fig.subplots_adjust(hspace=0.4)
plt.draw()
plt.show()
def run(self):
self.processConfFile()
self.plot()
plotter=BagPlotter()
plotter.parse()
plotter.run()
| {
"content_hash": "bc626fdfe3ddfff5a0aa536bc97082d5",
"timestamp": "",
"source": "github",
"line_count": 335,
"max_line_length": 131,
"avg_line_length": 40.376119402985076,
"alnum_prop": 0.5207748040810292,
"repo_name": "AtsushiSakai/jsk_visualization_packages",
"id": "bcf9967562e489787d3f79f14e671d1b1ac04d9b",
"size": "13612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jsk_topic_tools/scripts/bag_plotter.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "1110265"
},
{
"name": "CMake",
"bytes": "21999"
},
{
"name": "Python",
"bytes": "195785"
},
{
"name": "Shell",
"bytes": "106"
}
],
"symlink_target": ""
} |
from .io import load_scoring_matrix
import array
import itertools
class Directions:
undefined, diag, up, left = range(4)
def __init__(self):
pass
def levenshtein(first, second, scoring_matrix=None, gap=1):
"""
Calculate the levenshtein distance between two sequences. http://en.wikipedia.org/wiki/Levenshtein_distance
Wraps alignment score, with a negation
Arguments: Sequence first, Sequence second, str scoring_matrix, int or (int, int) gap
Returns: int
"""
return -1 * alignment_score(first, second, scoring_matrix, gap)
def alignment_score(first, second, scoring_matrix=None, gap=1):
"""
Calculate the alignment score distance between two sequences.
Can take a custom scoring matrix and gap penalty
Arguments: Sequence first, Sequence second, str scoring_matrix, int or (int, int) gap
Returns: int
"""
scoring = _get_scoring_matrix(scoring_matrix)
s = first.sequence
t = second.sequence
if type(gap) is int:
matrix = _alignment_matrix(s, t, scoring, gap)
else:
matrix, pointer = _affine_alignment_matrix(s, t, scoring, gap)
return matrix[-1][-1]
def global_alignment(first, second, scoring_matrix=None, gap=1):
"""
Find an optimal alignment minimizing edit distance between two sequences, return the distance and a pair of
strings representing an optimal alignment.
Arguments: Sequence first, Sequence second, str scoring_matrix, int or (int, int) gap
Returns: int, str, str
"""
scoring = _get_scoring_matrix(scoring_matrix)
s = first.sequence
t = second.sequence
if type(gap) == int:
matrix = _alignment_matrix(s, t, scoring, gap)
s1, t1 = _alignment_backtrack(s, t, matrix, scoring, gap)
else:
matrix, pointer = _affine_alignment_matrix(s, t, scoring, gap)
s1, t1, = _alignment_backtrack_pointer(s, t, matrix, pointer)
return matrix[-1][-1], s1, t1
def optimal_alignment_count(first, second, modulus=None, scoring_matrix=None, gap=1):
"""
Finds the number of valid optimal alignments for a pair of sequences. As this can get large, can return modulo
some number.
Arguments: Sequence first, Sequence second, int modulus, str scoring_matrix, int or (int, int) gap
Returns: int
"""
scoring = _get_scoring_matrix(scoring_matrix)
s = first.sequence
t = second.sequence
matrix = _alignment_matrix(s, t, scoring, gap)
return _count_alignments(s, t, matrix, modulus)
def local_alignment(first, second, scoring_matrix=None, gap=1):
"""
Finds the local alignment of a pair of sequences with the shortest edit distance.
Arguments: Sequence first, Sequence second, str scoring_matrix, int or (int, int) gap
Returns: ?
"""
scoring = _get_scoring_matrix(scoring_matrix)
s = first.sequence
t = second.sequence
if type(gap) is int:
matrix = _alignment_matrix(s, t, scoring, gap, True)
start = _array_max_index(matrix)
s1, t1 = _alignment_backtrack(s, t, matrix, scoring, gap, True, "", start)
else:
matrix, pointer = _affine_alignment_matrix(s, t, scoring, gap, True)
start = _array_max_index(matrix)
s1, t1 = _alignment_backtrack_pointer(s, t, matrix, pointer, True, "", start)
return matrix[start[0]][start[1]], s1, t1
def semi_global_alignment(first, second, scoring_matrix=None, gap=1, trim=False):
"""
Finds the local alignment of a pair of sequences with the shortest edit distance.
Arguments: Sequence first, Sequence second, str scoring_matrix, int or (int, int) gap
Returns: ?
"""
scoring = _get_scoring_matrix(scoring_matrix)
s = first.sequence
t = second.sequence
m = len(s)
n = len(t)
if type(gap) is int:
matrix = _alignment_matrix(s, t, scoring, gap, False, (True, True))
if n > m:
start = (m, matrix[-1].index(max(matrix[-1])))
else:
last_col = [row[-1] for row in matrix]
start = (last_col.index(max(last_col)), n)
s1, t1 = _alignment_backtrack(s, t, matrix, scoring, gap, False, "-", start)
if m > n:
s1 += s[start[0]:]
t1 += "-" * (m - start[0])
else:
t1 += t[start[0]:]
s1 += "-" * (n - start[0])
else:
raise ValueError("Affine gap not implemented")
if trim:
s1, t1 = _trim_gaps(s1, t1)
return matrix[start[0]][start[1]], s1, t1
def all_semi_global_alignments(first, second, scoring_matrix=None, gap=1, k=0):
scoring = _get_scoring_matrix(scoring_matrix)
s = first.sequence
t = second.sequence
m = len(s)
n = len(t)
if n < m:
s, t = t, s
m, n = n, m
pairs = []
if type(gap) is int:
matrix = _alignment_matrix(s, t, scoring, gap, False, (False, True))
for i in xrange(len(matrix[-1])):
sub_score = matrix[-1][i]
if sub_score >= k:
s1, t1 = _alignment_backtrack(s, t, matrix, scoring, gap, False, "-", (m, i))
long_all_gaps = t1.count("-")
short_leading_gaps = _leading_symbols(s1)
n1 = len(t1)
true_len = n1 - short_leading_gaps - long_all_gaps
pairs.append((short_leading_gaps, true_len))
else:
raise ValueError("Affine gap not implemented")
return pairs
def overlap_alignment(first, second, scoring_matrix=None, gap=1):
"""
Finds the local alignment of a pair of sequences with the shortest edit distance.
Arguments: Sequence first, Sequence second, str scoring_matrix, int or (int, int) gap
Returns: ?
"""
scoring = _get_scoring_matrix(scoring_matrix)
s = first.sequence
t = second.sequence
m = len(s)
n = len(t)
matrix = _alignment_matrix(s, t, scoring, gap, False, (True, True))
start_candidate1 = (m, matrix[-1].index(max(matrix[-1])))
last_col = [row[-1] for row in matrix]
start_candidate2 = (last_col.index(max(last_col)), n)
if matrix[start_candidate1[0]][start_candidate1[1]] >= matrix[start_candidate2[0]][start_candidate2[1]]:
score = matrix[start_candidate1[0]][start_candidate1[1]]
s1, t1 = _alignment_backtrack(s, t, matrix, scoring, gap, False, "-", start_candidate1)
else:
score = matrix[start_candidate2[0]][start_candidate2[1]]
s1, t1 = _alignment_backtrack(s, t, matrix, scoring, gap, False, "-", start_candidate2)
s1, t1 = _trim_gaps(s1, t1)
return score, s1, t1
def _alignment_matrix(s, t, scoring, gap, local=False, free_end_gaps=(False, False)):
"""
Calculate a matrix showing the alignment score between all prefixes of two strings, with the last element
being the alignment score between the whole two strings. Only works for a linear gap penalty, but uses less space
than a more general solution.
Arguments: str s, str t, {(str, str): int} scoring, int gap, bool local
Returns: int[][]
"""
m = len(s)
n = len(t)
matrix = [array.array('i', itertools.repeat(0, n + 1)) for _ in xrange(m + 1)]
if not local and not free_end_gaps[0]:
for i in xrange(m+1):
matrix[i][0] = i * -gap
if not local and not free_end_gaps[1]:
for j in xrange(n+1):
matrix[0][j] = j * -gap
for j in xrange(1, n + 1):
for i in xrange(1, m + 1):
matrix[i][j] = max(matrix[i - 1][j] - gap, matrix[i][j - 1] - gap, matrix[i - 1][j - 1] + scoring[(s[i - 1], t[j - 1])])
if local and matrix[i][j] < 0:
matrix[i][j] = 0
return matrix
def _affine_alignment_matrix(s, t, scoring, gap, local=False):
"""
Calculate a matrix showing the alignment score between all prefixes of two strings, with the last element
being the alignment score between the whole two strings. Works for affine gap penalty (constant gap penalty can
be expressed as an affine penalty with 0 extension penalty, so this works for constant penalty too.) Uses 5 times
the space as the linear version, and needs to return a pointer matrix.
Arguments: str s, str t, {(str, str): int} scoring, int gap, bool local
Returns: int[][], int[][]
"""
m = len(s)
n = len(t)
open_gap, extend_gap = gap
scores = [array.array('i', itertools.repeat(0, n + 1)) for _ in xrange(m + 1)]
f = [array.array('i', itertools.repeat(0, n + 1)) for _ in xrange(m + 1)]
ii = [array.array('i', itertools.repeat(0, n + 1)) for _ in xrange(m + 1)]
ij = [array.array('i', itertools.repeat(0, n + 1)) for _ in xrange(m + 1)]
pointer = {'f': [array.array('i', itertools.repeat(Directions.undefined, n + 1)) for _ in xrange(m + 1)],
'i': [array.array('i', itertools.repeat(Directions.up, n + 1)) for _ in xrange(m + 1)],
'j': [array.array('i', itertools.repeat(Directions.left, n + 1)) for _ in xrange(m + 1)]}
for i in xrange(1, m+1):
pointer['f'][i][0] = Directions.up
pointer['i'][i][0] = Directions.up
pointer['j'][i][0] = Directions.up
if not local:
scores[i][0] = -(open_gap + (i - 1) * extend_gap)
ii[i][0] = -(open_gap + (i - 1) * extend_gap)
for i in xrange(1, n+1):
pointer['f'][0][i] = Directions.left
pointer['i'][0][i] = Directions.left
pointer['j'][0][i] = Directions.left
if not local:
scores[0][i] = -(open_gap + (i - 1) * extend_gap)
ij[0][i] = -(open_gap + (i - 1) * extend_gap)
for j in xrange(1, n + 1):
for i in xrange(1, m + 1):
f[i][j] = scores[i - 1][j - 1] + scoring[(s[i - 1], t[j - 1])]
if local and f[i][j] < 0:
f[i][j] = 0
if scores[i - 1][j - 1] == f[i - 1][j - 1]:
pointer['f'][i][j] = Directions.diag
elif scores[i - 1][j - 1] == ii[i - 1][j - 1]:
pointer['f'][i][j] = Directions.up
elif scores[i - 1][j - 1] == ij[i - 1][j - 1]:
pointer['f'][i][j] = Directions.left
else:
raise ValueError("Uh Oh")
if i == 1:
ii[i][j] = scores[i - 1][j] - open_gap
if local and ii[i][j] < 0:
ii[i][j] = 0
else:
ii[i][j] = max(ii[i - 1][j] - extend_gap, scores[i - 1][j] - open_gap)
if local and ii[i][j] < 0:
ii[i][j] = 0
if ii[i][j] == scores[i - 1][j] - open_gap and scores[i - 1][j] == f[i - 1][j]:
pointer['i'][i][j] = Directions.diag
if j == 1:
ij[i][j] = scores[i][j] - open_gap
if local and ij[i][j] < 0:
ij[i][j] = 0
else:
ij[i][j] = max(ij[i][j - 1] - extend_gap, scores[i][j - 1] - open_gap)
if local and ij[i][j] < 0:
ij[i][j] = 0
if ij[i][j] == scores[i][j - 1] - open_gap and scores[i][j - 1] == f[i][j - 1]:
pointer['j'][i][j] = Directions.diag
scores[i][j] = max(f[i][j], ii[i][j], ij[i][j])
if scores[-1][-1] == f[-1][-1]:
pointer['start'] = 'f'
elif scores[-1][-1] == ii[-1][-1]:
pointer['start'] = 'i'
elif scores[-1][-1] == ij[-1][-1]:
pointer['start'] = 'j'
return scores, pointer
def _alignment_backtrack(s, t, matrix, scoring, gap, local=False, gap_symbol="-", start=None):
"""
Given two strings and the alignment score matrix between the two, backtrack through and create an alignment.
Arguments: str s, str t, int[][] matrix, {(str, str): int} scoring, int gap, bool local, str gap_symbol, (int, int) start
Returns: str, str
"""
s1 = ""
t1 = ""
if start is None:
i = len(s)
j = len(t)
else:
i = start[0]
j = start[1]
while i > 0 or j > 0:
if local and matrix[i][j] == 0:
break
if j > 0 and i > 0 and matrix[i][j] - matrix[i - 1][j - 1] == scoring[(s[i - 1], t[j - 1])]:
i -= 1
j -= 1
s1 = s[i] + s1
t1 = t[j] + t1
elif (j == 0) or (i > 0 and matrix[i][j] - matrix[i - 1][j] == -gap):
i -= 1
s1 = s[i] + s1
t1 = gap_symbol + t1
elif (i == 0) or (j > 0 and matrix[i][j] - matrix[i][j - 1] == -gap):
j -= 1
s1 = gap_symbol + s1
t1 = t[j] + t1
else:
raise ValueError("Wrong scoring matrix?")
return s1, t1
def _alignment_backtrack_pointer(s, t, matrix, pointer, local=False, gap_symbol="-", start=None):
"""
Given two strings and a set of pointer matricies, backtrack through and create an alignment. The alignment score matrix is
only really necessary for the local version, but will require a refactor to make optional.
Arguments: str s, str t, int[][] matrix, int[][] pointer, bool local, str gap_symbol, (int, int) start
Returns: str, str
"""
s1 = ""
t1 = ""
if start is None:
i = len(s)
j = len(t)
pointer_id = pointer['start']
else:
i = start[0]
j = start[1]
pointer_id = 'f'
current_pointer = pointer[pointer_id]
while i > 0 or j > 0:
next_dir = current_pointer[i][j]
if local and matrix[i][j] == 0:
break
if pointer_id == 'f':
i -= 1
j -= 1
s1 = s[i] + s1
t1 = t[j] + t1
elif pointer_id == 'i':
i -= 1
s1 = s[i] + s1
t1 = gap_symbol + t1
elif pointer_id == 'j':
j -= 1
t1 = t[j] + t1
s1 = gap_symbol + s1
else:
raise ValueError("Invalid pointer matrix")
if next_dir == Directions.diag:
pointer_id = 'f'
elif next_dir == Directions.up:
pointer_id = 'i'
elif next_dir == Directions.left:
pointer_id = 'j'
else:
raise ValueError("Invalid pointer matrix")
current_pointer = pointer[pointer_id]
return s1, t1
def _count_alignments(s, t, matrix, modulus=None):
"""
Using an alignment score matrix and the two original strings, count the number of optimal alignments modulo some
number. This currently does not work for an arbitrary scoring scheme, only the simplest default.
Arguments: str s, str t, int[][] matrix, int or None modulus
Returns: int
"""
current = [1] * (len(t) + 1)
for i in xrange(1, len(s) + 1):
prev = current
current = [1] * (len(t) + 1)
for j in xrange(1, len(t) + 1):
if s[i - 1] == t[j - 1]:
diag = matrix[i - 1][j - 1] + 1
else:
diag = matrix[i - 1][j - 1]
m = max(diag, matrix[i - 1][j], matrix[i][j - 1])
alignments = 0
if diag == m:
alignments += prev[j - 1]
if matrix[i - 1][j] == m:
alignments += prev[j]
if matrix[i][j - 1] == m:
alignments += current[j - 1]
current[j] = alignments
if modulus is not None:
current[j] %= modulus
return current[-1]
def _get_scoring_matrix(scoring_matrix):
if scoring_matrix is None:
return _DefaultMatrix(0, -1)
elif type(scoring_matrix) is str:
return load_scoring_matrix(scoring_matrix)
else:
return _DefaultMatrix(scoring_matrix[0], scoring_matrix[1])
class _DefaultMatrix(dict):
"""
Dict-like object for the default scoring of -1 for a substitution, 0 for a match.
"""
def __init__(self, match, substitution):
self.match = match
self.substitution = substitution
def __missing__(self, key):
if key[0] == key[1]:
return self.match
else:
return self.substitution
def _array_max_index(matrix):
"""
Calculates the index of the maximum of a list of lists of ints.
Arguments: int[][]
Returns: int, int
"""
maxes = []
indicies = []
for i in xrange(len(matrix)):
maxes.append(max(matrix[i]))
indicies.append(matrix[i].index(maxes[i]))
i = maxes.index(max(maxes))
j = indicies[i]
return i, j
def _trim_gaps(s, t, symbol="-"):
front_strip = (_leading_symbols(s), _leading_symbols(t))
back_strip = (_leading_symbols(s[::-1]), _leading_symbols(t[::-1]))
#-0 == 0 so no backtrim trims all without the None
s = s[max(front_strip):-max(back_strip) or None]
t = t[max(front_strip):-max(back_strip) or None]
return s, t
def _leading_symbols(s, symbol="-"):
count = 0
for char in s:
if char == symbol:
count += 1
else:
break
return count
| {
"content_hash": "471a41e3f0e8ad17605415b8e71ce640",
"timestamp": "",
"source": "github",
"line_count": 451,
"max_line_length": 132,
"avg_line_length": 38.4390243902439,
"alnum_prop": 0.5369173973234886,
"repo_name": "billletson/rosalind",
"id": "f614d939a42161e5eb65413ba8c8c3d4af0dfa9e",
"size": "17336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rosalind/alignment.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "102112"
}
],
"symlink_target": ""
} |
import random
import copy
from matplotlib import pyplot as plt
import csv
class Kmean(object):
"""docstring for Kmean"""
def __init__(self, k):
super(Kmean, self).__init__()
self.k = k
self.data = []
self.dim = int
self.centers = []
self.clusters = {}
self.index2point = {}
self.label = {}
self.getlabel()
self.readdata_csv()
#self.readdata()
self.setcenter()
def readdata(self):
data = open('simulate.txt')
for point in data:
point = point[0:-1].split()
point = map(float, point)
self.data.append(point)
self.dim = len(self.data[0])
def readdata_csv(self):
f = open('ClusterSamples.csv')
count = 1
for item in f:
item = item[0:-1].split(',')
item = map(float, item)
self.data.append(item)
self.index2point[count] = item
count += 1
def dist(self, p1=list, p2=list):
distsum = 0.0
for i in range(0, len(p1)):
distsum = pow(p2[i]- p1[i], 2) + distsum
return pow(distsum, 0.5)
def setcenter(self):
self.centers = random.sample(self.data, self.k)
def updatecenter(self, cluster=list):
if len(cluster) == 0:
return random.sample(self.data, 1)
newcenter = []
for item in range(0, 784):
newcenter.append(0)
for item in cluster:
newcenter = list(map(lambda x: x[0] + x[1], zip(newcenter, item)))
result = []
for item in newcenter:
center = round(item/float(len(cluster)), 2)
result.append(center)
return result
def algorithm(self):
count = 0
centers = self.centers
while True:
count += 1
centers_copy = copy.deepcopy(centers)
clusters = {}
for i in range(0, self.k):
clusters[i] = []
for point in self.data:
distdict = {}
for i in range(0, len(clusters)):
distdict[i] = self.dist(self.centers[i], point)
for index in distdict:
if distdict[index] == min(distdict.values()):
clusters[index].append(point)
break
for i in range(0, self.k):
centers[i] = self.updatecenter(clusters[i])
pass
print '{}th itor'.format(str(count))
if centers_copy == self.centers:
f = open('out_index.txt', 'w')
for item in clusters:
f.write('{}class\n'.format(item))
for point in clusters[item]:
key = self.search(self.index2point, point)
f.write(str(key)+'\n')
f.write('\n')
return clusters
def getlabel(self):
f = open('SampleLabels.csv')
count = 1
for item in f:
self.label[count] = item[:-1]
count += 1
def plot_simulate(self):
cluster = self.algorithm()
x_axis = []
y_axis = []
fig = plt.figure()
ax1 = fig.add_subplot(111)
ax1.set_title("K-means")
plt.xlabel('x')
plt.ylabel('y')
color = ['r','b']
label = ['label1', 'label2']
marker = ['x', 'o']
for index in cluster:
for point in cluster[index]:
plt.scatter(point[0], point[1], c=color[index], marker=marker[index], label = label[index],alpha=0.6)
plt.savefig('k-means_simulate.jpg')
def search(self, d =dict, val = list):
for item in d:
if d[item] == val:
return item
def plot_minist(self):
f = open('out_index.txt')
clusterSize = []
clusters = []
clusterLabel = []
for item in f:
if 'class' in item:
L = []
continue
if item == '\n':
clusters.append(L)
del(L)
continue
L.append(item)
allLebal = []
for item in clusters:
L = []
for elm in item:
elm = elm.replace('\n', '')
L.append(self.label[int(elm)])
allLebal.append(L)
del(L)
print len(allLebal)
f = open('count3.csv', 'w')
count = 0
for item in allLebal:
f.write('class{},'.format(str(count)))
for i in range(0, len(allLebal)):
f.write(str(item.count(str(i)))+',')
f.write('\n')
count += 1
clusterSize = map(len, clusters)
axis = []
label = []
for i in range(0, len(clusterSize)):
axis.append(i)
label.append('Class'+str(i))
plt.title(u'The histogram of {} clusters using Minist Data (10000 samples)'.format(str(len(clusterSize))))
plt.bar(axis, clusterSize, color = ['#4682b4'], label = label, align='center', width=0.3, alpha=0.8,linewidth=None)
plt.savefig('Hist3.jpg')
plt.show()
K = Kmean(10)
K.algorithm()
K.plot_minist()
| {
"content_hash": "860f71bd6566334a8e9e2f00d50b3be7",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 117,
"avg_line_length": 22.236559139784948,
"alnum_prop": 0.6112185686653772,
"repo_name": "hitlonewind/PR-experiment",
"id": "cfffae6f852a188e43008407f6e7bd4275bab605",
"size": "4150",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Kmeans/kmeans.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "44866"
}
],
"symlink_target": ""
} |
from server import Server
# Create an echo server class
class EchoServer(Server):
def onStart(self):
print "Echo server has started"
def onMessage(self, socket, message):
# This function takes two arguments: 'socket' and 'message'.
# 'socket' can be used to send a message string back over the wire.
# 'message' holds the incoming message string (minus the line-return).
# convert the string to an upper case version
message = message.upper()
# Just echo back what we received
socket.send(message)
# Signify all is well
return True | {
"content_hash": "f306539ab10ef6aaa5f56affb5a67261",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 82,
"avg_line_length": 33.21052631578947,
"alnum_prop": 0.6450079239302694,
"repo_name": "QasAshraf/cloaked-octo-hipster.py",
"id": "d05d47af6970bfe87827f9200f23bdbc38be5e05",
"size": "631",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/echoServer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18296"
}
],
"symlink_target": ""
} |
"""
Django settings for develop project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
import datetime
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ermn@t3e6)2lwtgca9nfyxf$h6b9fpo%(!h%mtgt7tyy2ut6m*'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
if not DEBUG:
HOST = "https://itelpark.com"
ALLOWED_HOSTS = ['www.itelpark.com','itelpark.com']
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# CSRF_COOKIE_SECURE = True
else:
ALLOWED_HOSTS = ['*']
# for security reasons
# CSRF_COOKIE_SECURE=True
# SESSION_COOKIE_SECURE=True
# SECURE_CONTENT_TYPE_NOSNIFF=True
# SECURE_BROWSER_XSS_FILTER=True
# SECURE_SSL_REDIRECT=True
# X_FRAME_OPTIONS='Deny'
# Application definition
INSTALLED_APPS = [
'material',
'material.admin',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'ckeditor',
'ckeditor_uploader',
'rest_framework',
'api.apps.ApiConfig',
'football.apps.FootballConfig',
'news.apps.NewsConfig',
]
CKEDITOR_JQUERY_URL = 'https://ajax.googleapis.com/ajax/libs/jquery/2.2.4/jquery.min.js'
CKEDITOR_UPLOAD_PATH = "uploads/"
CKEDITOR_CONFIGS = {
'awesome_ckeditor': {
'skin': 'moono',
# 'skin': 'office2013',
'toolbar_Basic': [
['Source', '-', 'Bold', 'Italic']
],
'toolbar_YouCustomToolbarConfig': [
{'name': 'document', 'items': ['Source', '-', 'Save', 'NewPage', 'Preview', 'Print', '-', 'Templates']},
{'name': 'clipboard', 'items': ['Cut', 'Copy', 'Paste', 'PasteText', 'PasteFromWord', '-', 'Undo', 'Redo']},
{'name': 'editing', 'items': ['Find', 'Replace', '-', 'SelectAll']},
{'name': 'forms',
'items': ['Form', 'Checkbox', 'Radio', 'TextField', 'Textarea', 'Select', 'Button', 'ImageButton',
'HiddenField']},
'/',
{'name': 'basicstyles',
'items': ['Bold', 'Italic', 'Underline', 'Strike', 'Subscript', 'Superscript', '-', 'RemoveFormat']},
{'name': 'paragraph',
'items': ['NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', '-', 'Blockquote', 'CreateDiv', '-',
'JustifyLeft', 'JustifyCenter', 'JustifyRight', 'JustifyBlock', '-', 'BidiLtr', 'BidiRtl',
'Language']},
{'name': 'links', 'items': ['Link', 'Unlink', 'Anchor']},
{'name': 'insert',
'items': ['Image', 'Flash', 'Table', 'HorizontalRule', 'Smiley', 'SpecialChar', 'PageBreak', 'Iframe']},
'/',
{'name': 'styles', 'items': ['Styles', 'Format', 'Font', 'FontSize']},
{'name': 'colors', 'items': ['TextColor', 'BGColor']},
{'name': 'tools', 'items': ['Maximize', 'ShowBlocks']},
{'name': 'about', 'items': ['About']},
'/', # put this to force next toolbar on new line
{'name': 'youcustomtools', 'items': [
# put the name of your editor.ui.addButton here
'Preview',
'Maximize',
]},
],
'toolbar': 'YouCustomToolbarConfig', # put selected toolbar config here
# 'toolbarGroups': [{ 'name': 'document', 'groups': [ 'mode', 'document', 'doctools' ] }],
# 'height': 291,
# 'width': '100%',
# 'filebrowserWindowHeight': 725,
# 'filebrowserWindowWidth': 940,
# 'toolbarCanCollapse': True,
# 'mathJaxLib': '//cdn.mathjax.org/mathjax/2.2-latest/MathJax.js?config=TeX-AMS_HTML',
'tabSpaces': 4,
'extraPlugins': ','.join(
[
# you extra plugins here
'div',
'autolink',
'autoembed',
'embedsemantic',
'autogrow',
# 'devtools',
'widget',
'lineutils',
'clipboard',
'dialog',
'dialogui',
'elementspath'
]),
}
}
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DATETIME_FORMAT': "%Y-%m-%d %H:%M:%S",
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticated',
],
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication',
),
}
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'develop.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'develop.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
if not DEBUG:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'develop_usta',
'USER': 'develop_user',
'PASSWORD': '7HRdMcOvx6toidiEPFTKCAE8gNdA6C04',
'HOST': 'localhost',
'PORT': '',
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'az'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
if not DEBUG:
STATIC_ROOT = 'static'
else:
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
'/home/munis/Documents/unicopycenter/static',
]
MEDIA_URL = '/media/'
MEDIA_ROOT = 'media'
LOG_ROOT = os.path.join(BASE_DIR,'logs')
LOG_LEVEL = 'DEBUG'
LOGGING = {
'version': 1,
'disable_existing_loggers': not DEBUG, # True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s:=> %(message)s',
},
'focused': {
'format': '\n----------------------\n%(asctime)s [%(levelname)s] %(name)s:=> %(message)s \n----------------------',
},
},
'handlers': {
'my_custom_debug': {
'level': LOG_LEVEL,
'class': 'logging.FileHandler',
'filename': '%s/mylog.log' % LOG_ROOT,
'formatter': 'focused',
},
'request_handler': {
'level': LOG_LEVEL,
'class': 'logging.FileHandler',
'filename': '%s/django_requests.log' % LOG_ROOT,
'formatter': 'standard',
},
},
'loggers': {
'': {
'handlers': ['my_custom_debug'],
'level': LOG_LEVEL,
'propagate': True,
},
'django.request': {
'handlers': ['request_handler'],
'level': LOG_LEVEL,
'propagate': True,
},
},
}
| {
"content_hash": "649180f54bc7120c6e937019ddfa7d4f",
"timestamp": "",
"source": "github",
"line_count": 297,
"max_line_length": 127,
"avg_line_length": 30.316498316498315,
"alnum_prop": 0.5638605064415815,
"repo_name": "munisisazade/developer_portal",
"id": "0e6869e24cc434ef85162bf1f870aefaa0f82cc8",
"size": "9004",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "develop/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "328323"
},
{
"name": "HTML",
"bytes": "277419"
},
{
"name": "JavaScript",
"bytes": "1304121"
},
{
"name": "Python",
"bytes": "4178222"
}
],
"symlink_target": ""
} |
"""
Common functions used by two or more scripts
"""
def board_full(board, glength):
"""
Returns True if the board is full or False if not
"""
for i in range(glength):
for j in range(glength):
if board[i][j] == -1:
return False
return True
def win(board, player, glength):
"""
Returns if there is a win situation for the player
"""
for i in range(glength):
flag = False
for j in range(glength):
if board[i][j] != player:
flag = True
break
if not flag:
return True
for i in range(glength):
flag = False
for j in range(glength):
if board[j][i] != player:
flag = True
break
if not flag:
return True
flag = False
for i in range(glength):
if board[i][i] != player:
flag = True
break
if not flag:
return True
flag = False
for i in range(glength):
if board[glength - i - 1][i] != player:
flag = True
break
if not flag:
return True
return 0
| {
"content_hash": "235d90009d771f8ba259420ae5c6c4fd",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 54,
"avg_line_length": 21.545454545454547,
"alnum_prop": 0.48860759493670886,
"repo_name": "sk364/N_by_N_Tic_Tac_Toe",
"id": "791add960c5a348fd9c4ee57415ef0daf86ff8d5",
"size": "1185",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "common.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21643"
}
],
"symlink_target": ""
} |
from .testutils import FullStackTests
import os
import time
import json
import yaml
from datetime import datetime
from webrecorder.models.usermanager import CLIUserManager
from webrecorder.models.datshare import DatShare
from webrecorder.utils import get_new_id, today_str
import responses
from itertools import count
# ============================================================================
class TestDatShare(FullStackTests):
COLL_ID = '100'
@classmethod
def setup_class(cls):
os.environ['ALLOW_DAT'] = '1'
super(TestDatShare, cls).setup_class(storage_worker=True)
cls.manager = CLIUserManager()
cls.set_uuids('Collection', count(int(cls.COLL_ID) - 1))
cls.dat_info = {'datKey': get_new_id(size=20),
'discoveryKey': get_new_id(size=20)
}
@classmethod
def teardown_class(cls):
os.environ.pop('ALLOW_DAT', '')
DatShare.dat_share.close()
super(TestDatShare, cls).teardown_class()
def test_init_coll_and_user(self):
res = self.testapp.post_json('/api/v1/collections?user={user}'.format(user=self.anon_user), params={'title': 'temp'})
assert res.json['collection']
self.manager.create_user('test@example.com', 'test', 'TestTest123', 'archivist', 'Test')
today = today_str()
TestDatShare.coll_store_dir = today + '/' + self.COLL_ID
def test_not_allowed_anon(self):
params = {'collDir': self.warcs_dir}
res = self.testapp.post_json('/api/v1/collection/temp/dat/share?user={user}'.format(user=self.anon_user), params=params, status=400)
assert res.json == {'error': 'not_allowed'}
def test_login(self):
params = {'username': 'test',
'password': 'TestTest123'}
res = self.testapp.post_json('/api/v1/auth/login', params=params)
assert res.json['user']['username'] == 'test'
res = self.testapp.get('/test/default-collection')
res.charset = 'utf-8'
assert '"test"' in res.text
def test_record_1(self):
res = self.testapp.get('/_new/default-collection/rec/record/mp_/http://httpbin.org/get?food=bar')
assert res.status_code == 302
res = res.follow()
res.charset = 'utf-8'
assert '"food": "bar"' in res.text, res.text
def test_commit_1(self):
self.params = {}
def assert_committed():
res = self.testapp.post_json('/api/v1/collection/default-collection/commit?user=test', params=self.params)
self.params = res.json
assert self.params['success'] == True
self.sleep_try(0.2, 10.0, assert_committed)
def test_not_allowed_default_user(self):
params = {'collDir': self.warcs_dir}
res = self.testapp.post_json('/api/v1/collection/default-collection/dat/share?user=test', params=params, status=400)
assert res.json == {'error': 'not_allowed'}
def test_set_role(self):
user = self.manager.all_users['test']
user['role'] = 'beta-archivist'
res = self.testapp.get('/api/v1/user/test')
assert res.json['user']['role'] == 'beta-archivist'
@responses.activate
def test_dat_share(self):
responses.add(responses.POST, 'http://dat:3000/init', status=200,
json=self.dat_info)
responses.add(responses.POST, 'http://dat:3000/share', status=200,
json=self.dat_info)
params = {'collDir': self.coll_store_dir}
res = self.testapp.post_json('/api/v1/collection/default-collection/dat/share?user=test', params=params)
assert res.json['dat_key'] == self.dat_info['datKey']
assert res.json['dat_updated_at'] <= datetime.utcnow().isoformat()
assert res.json['dat_share'] == True
assert len(responses.calls) == 2
assert responses.calls[0].request.url == 'http://dat:3000/init'
assert responses.calls[1].request.url == 'http://dat:3000/share'
today = today_str()
# test dat.json
with open(os.path.join(self.storage_dir, today, self.COLL_ID, 'dat.json'), 'rt') as fh:
datjson = json.loads(fh.read())
assert datjson['url'] == 'dat://' + self.dat_info['datKey']
assert datjson['author'] == 'Test'
assert datjson['title'] == 'Default Collection'
assert datjson['desc'].startswith('*This is your first collection')
# test metadata.yaml
with open(os.path.join(self.storage_dir, today, self.COLL_ID, 'metadata', 'metadata.yaml'), 'rt') as fh:
metadata = yaml.load(fh.read())
assert metadata['collection']
# pages in recordings
assert 'pages' not in metadata['collection']
assert 'recordings' in metadata['collection']
for recording in metadata['collection']['recordings']:
assert 'pages' in recording
assert 'lists' in metadata['collection']
def test_coll_info_with_dat(self):
res = self.testapp.get('/api/v1/collection/default-collection?user=test')
assert res.json['collection']['dat_key'] == self.dat_info['datKey']
assert res.json['collection']['dat_updated_at'] >= res.json['collection']['updated_at']
assert res.json['collection']['dat_share'] == True
@responses.activate
def test_dat_already_shared(self):
params = {'collDir': self.coll_store_dir}
res = self.testapp.post_json('/api/v1/collection/default-collection/dat/share?user=test', params=params, status=400)
assert res.json == {'error': 'already_updated'}
assert len(responses.calls) == 0
@responses.activate
def test_dat_already_shared_always_update(self):
responses.add(responses.POST, 'http://dat:3000/share', status=200,
json=self.dat_info)
params = {'collDir': self.coll_store_dir, 'always_update': True}
res = self.testapp.post_json('/api/v1/collection/default-collection/dat/share?user=test', params=params, status=200)
assert res.json['dat_key'] == self.dat_info['datKey']
assert res.json['dat_updated_at'] <= datetime.utcnow().isoformat()
assert res.json['dat_share'] == True
assert len(responses.calls) == 1
@responses.activate
def test_dat_list_create_updated_at(self):
responses.add(responses.POST, 'http://dat:3000/share', status=200,
json=self.dat_info)
# No update needed
params = {'collDir': self.coll_store_dir}
res = self.testapp.post_json('/api/v1/collection/default-collection/dat/share?user=test', params=params, status=400)
assert res.json == {'error': 'already_updated'}
assert len(responses.calls) == 0
params = {'title': 'List Name',
'description': 'List Desc'
}
time.sleep(1.0)
# create list
res = self.testapp.post_json('/api/v1/lists?user=test&coll=default-collection', params=params)
assert res.json['list']
# no commit needed
res = self.testapp.post_json('/api/v1/collection/default-collection/commit?user=test')
assert res.json == {'success': True}
# metadata updated, dat also updated
res = self.testapp.post_json('/api/v1/collection/default-collection/dat/share?user=test', params=params, status=200)
assert res.json['dat_key'] == self.dat_info['datKey']
assert len(responses.calls) == 1
@responses.activate
def test_dat_unshare(self):
responses.add(responses.POST, 'http://dat:3000/unshare', status=200,
json={'success': True})
params = {'collDir': self.coll_store_dir}
res = self.testapp.post_json('/api/v1/collection/default-collection/dat/unshare?user=test', params=params)
assert res.json['dat_key'] == self.dat_info['datKey']
assert res.json['dat_updated_at'] <= datetime.utcnow().isoformat()
assert res.json['dat_share'] == False
assert len(responses.calls) == 1
assert responses.calls[0].request.url == 'http://dat:3000/unshare'
@responses.activate
def test_dat_unshare_not_sharing(self):
params = {'collDir': self.coll_store_dir}
res = self.testapp.post_json('/api/v1/collection/default-collection/dat/unshare?user=test', params=params)
assert res.json['dat_key'] == self.dat_info['datKey']
assert res.json['dat_updated_at'] <= datetime.utcnow().isoformat()
assert res.json['dat_share'] == False
assert len(responses.calls) == 0
def test_coll_info_without_dat(self):
res = self.testapp.get('/api/v1/collection/default-collection?user=test')
assert res.json['collection']['dat_key'] == self.dat_info['datKey']
assert res.json['collection']['dat_share'] == False
@responses.activate
def test_dat_reshare_upstream_api_error(self):
responses.add(responses.POST, 'http://dat:3000/share', status=400,
json={'error': 'unknown'})
params = {'collDir': self.coll_store_dir}
res = self.testapp.post_json('/api/v1/collection/default-collection/dat/share?user=test', params=params,
status=400)
assert res.json == {'error': 'api_error'}
assert len(responses.calls) == 1
assert responses.calls[0].request.url == 'http://dat:3000/share'
@responses.activate
def test_dat_reshare(self):
responses.add(responses.POST, 'http://dat:3000/share', status=200,
json=self.dat_info)
params = {'collDir': self.coll_store_dir}
res = self.testapp.post_json('/api/v1/collection/default-collection/dat/share?user=test', params=params)
assert res.json['dat_key'] == self.dat_info['datKey']
assert res.json['dat_updated_at'] <= datetime.utcnow().isoformat()
assert res.json['dat_share'] == True
assert len(responses.calls) == 1
assert responses.calls[0].request.url == 'http://dat:3000/share'
@responses.activate
def test_dat_sync_check_in_sync(self):
responses.add(responses.GET, 'http://dat:3000/numDats', status=200,
json={'num': 1})
DatShare.dat_share.dat_sync()
assert len(responses.calls) == 1
assert responses.calls[0].request.url == 'http://dat:3000/numDats'
@responses.activate
def test_dat_sync_check_sync_needed(self):
responses.add(responses.GET, 'http://dat:3000/numDats', status=200,
json={'num': 0})
responses.add(responses.POST, 'http://dat:3000/sync', status=200,
json={'results': [self.dat_info]})
DatShare.dat_share.dat_sync()
assert len(responses.calls) == 2
assert responses.calls[0].request.url == 'http://dat:3000/numDats'
assert responses.calls[1].request.url == 'http://dat:3000/sync'
body = json.loads(responses.calls[1].request.body.decode('utf-8'))
assert body == {'dirs': [self.coll_store_dir]}
@responses.activate
def test_dat_unshare_on_coll_delete(self):
responses.add(responses.POST, 'http://dat:3000/unshare', status=200,
json={'success': True})
res = self.testapp.delete('/api/v1/collection/default-collection?user=test')
assert res.json == {'deleted_id': 'default-collection'}
assert len(responses.calls) == 1
assert responses.calls[0].request.url == 'http://dat:3000/unshare'
body = json.loads(responses.calls[0].request.body.decode('utf-8'))
assert body == {'collDir': self.coll_store_dir}
assert self.redis.hlen(DatShare.DAT_COLLS) == 0
def test_ensure_coll_delete(self):
def wait_for_del():
assert not os.path.isdir(self.coll_store_dir)
self.sleep_try(0.1, 2.0, wait_for_del)
| {
"content_hash": "ea9df80a6568f7b1d37b062c10b91a82",
"timestamp": "",
"source": "github",
"line_count": 313,
"max_line_length": 140,
"avg_line_length": 38.11182108626198,
"alnum_prop": 0.6106966216782631,
"repo_name": "webrecorder/webrecorder",
"id": "63927bf2f3f98c80a1b3a1e8d7078ea1582b75b0",
"size": "11929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webrecorder/test/test_dat_api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "186476"
},
{
"name": "Dockerfile",
"bytes": "1370"
},
{
"name": "HTML",
"bytes": "258583"
},
{
"name": "JavaScript",
"bytes": "869251"
},
{
"name": "Python",
"bytes": "892243"
},
{
"name": "Shell",
"bytes": "2598"
}
],
"symlink_target": ""
} |
from bs4 import BeautifulSoup
import urllib
import urllib2
import cookielib
import re
import json
import os
import sys
from meiju import Meiju
from meiju import Season
from meiju import Episode
import logger
logger = logger.get_logger(__name__)
def dumper(obj):
try:
return obj.to_json()
except:
return obj.__dict__
class Collector:
def __init__(self):
self.init_url = "http://www.lm-us.com/"
self.all_meiju_file_name = "All_Meiju.js"
self.meiju_inst_list = []
self.meiju_ename_inst_dict = {}
def save_meiju_update_info(self, mix_name, meiju_url):
logger.info("save_meiju_update_info() function entry. mix_name: %s, meiju_url: %s" % (mix_name, meiju_url))
# Get the updated meiju instance
new_meiju_inst = self.save_meiju_info()
# Old meiju instance
old_meiju_inst = self.meiju_ename_inst_dict[new_meiju_inst.english_name]
for (season_id, season_inst) in new_meiju_inst.season_id_inst_dict.items():
if not season_id in old_meiju_inst.season_id_inst_dict:
sys.stdout.write("Found new Season %d in Meiju %s\n" % (season_id, new_meiju_inst.english_name))
else:
for (episode_id, episode_inst) in season_inst.episode_id_inst_dict.items():
if not episode_id in old_meiju_inst.season_id_inst_dict[season_id].episode_id_inst_dict:
sys.stdout.write("Found new Episode %d in Season %d in Meiju %s\n" %
(episode_id, season_id, new_meiju_inst.english_name))
# Save the new meiju instance
self.meiju_ename_inst_dict[new_meiju_inst.english_name] = new_meiju_inst
def save_all_meiju_update_info(self):
logger.info("save_all_meiju_update_info() function entry")
if not self.is_meiju_info_file_exist():
sys.stdout.write("We detect that you haven't downloaded any Meiju info before, they will be downloaded now.\n")
self.save_all_meiju_info()
self.write_all_meiju_info_to_file()
sys.stdout.write("All Meiju info has been downloaded successfully.\n")
else:
self.read_all_meiju_info_from_file()
request = urllib2.Request(self.init_url)
request.add_header("User-Agent",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36")
response = urllib2.urlopen(request)
resp_soup = BeautifulSoup(response.read(), 'html.parser')
a_tag_list = resp_soup.find_all(href=re.compile("http://www.lm-us.com/\?p="))
for a_tag in a_tag_list:
mix_name = unicode(a_tag.string)
meiju_url = a_tag['href']
english_name = unicode(mix_name[:mix_name.rfind(" ")]).lstrip().rstrip()
# To see if there is new Meiju
if not english_name in self.meiju_ename_inst_dict:
sys.stdout.write("Found new Meiju: %s\n" % english_name)
meiju_inst = self.save_meiju_info(mix_name, meiju_url)
self.meiju_inst_list.append(meiju_inst)
self.meiju_ename_inst_dict[meiju_inst.english_name] = meiju_inst
# If Meiju already exists, check the update seasons and episodes
else:
self.save_meiju_update_info(mix_name, meiju_url)
def save_all_meiju_info(self):
logger.info("save_all_meiju_info() function entry")
request = urllib2.Request(self.init_url)
request.add_header("User-Agent",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36")
response = urllib2.urlopen(request)
resp_soup = BeautifulSoup(response.read(), 'html.parser')
a_tag_list = resp_soup.find_all(href=re.compile("http://www.lm-us.com/\?p="))
for a_tag in a_tag_list:
meiju_inst = self.save_meiju_info(unicode(a_tag.string), a_tag['href'])
self.meiju_inst_list.append(meiju_inst)
self.meiju_ename_inst_dict[meiju_inst.english_name] = meiju_inst
def save_meiju_info(self, mix_name, meiju_url):
logger.info("save_meiju_info() function entry. mix_name: %s, meiju_url: %s" % (mix_name, meiju_url))
meiju_inst = Meiju()
meiju_inst.mix_name = unicode(mix_name).lstrip().rstrip()
meiju_inst.url = meiju_url
# Get English Name and chinese name
meiju_inst.english_name = unicode(meiju_inst.mix_name[:meiju_inst.mix_name.rfind(" ")]).lstrip().rstrip()
meiju_inst.chinese_name = unicode(meiju_inst.mix_name[meiju_inst.mix_name.rfind(" "):]).lstrip().rstrip()
logger.debug("Meiju english name: %s, chinese name: %s" % (meiju_inst.english_name, meiju_inst.chinese_name))
# Get season and episode info
request = urllib2.Request(meiju_inst.url)
response = urllib2.urlopen(request)
resp_soup = BeautifulSoup(response.read(), "html.parser")
a_tag_list = resp_soup.find_all("a", title=re.compile("s[0-9]+ep[0-9]+", re.IGNORECASE))
for a_tag in a_tag_list:
regex = re.compile("s([0-9]+)ep([0-9]+)", re.IGNORECASE)
match = regex.search(a_tag["title"])
s_ep_pair = match.groups()
if not str(int(s_ep_pair[0])) in meiju_inst.season_id_inst_dict:
season_inst = Season()
season_inst.season_id = int(s_ep_pair[0])
meiju_inst.season_id_inst_dict[str(int(s_ep_pair[0]))] = season_inst
meiju_inst.season_count += 1
logger.debug("New season instance: %s" % str(season_inst))
if not str(int(s_ep_pair[1])) in season_inst.episode_id_inst_dict:
season_inst = meiju_inst.season_id_inst_dict[str(int(s_ep_pair[0]))]
episode_inst = Episode()
episode_inst.season_id = season_inst.season_id
episode_inst.episode_id = int(s_ep_pair[1])
episode_inst.url = a_tag["href"]
season_inst.episode_id_inst_dict[str(int(s_ep_pair[1]))] = episode_inst
season_inst.episode_count += 1
logger.debug("New episode instance: %s" % str(episode_inst))
return meiju_inst
def write_all_meiju_info_to_file(self):
file_hdlr = open(self.all_meiju_file_name, 'w')
json.dump(self.meiju_inst_list, file_hdlr, default=dumper, indent=4)
file_hdlr.close()
def read_all_meiju_info_from_file(self):
file_hdlr = open(self.all_meiju_file_name, 'r')
meiju_dict_list = json.load(file_hdlr)
self.meiju_inst_list = []
self.meiju_ename_inst_dict = {}
for meiju_dict_inst in meiju_dict_list:
meiju_inst = Meiju()
meiju_inst.mix_name = meiju_dict_inst["mix_name"]
meiju_inst.english_name = meiju_dict_inst["english_name"]
meiju_inst.chinese_name = meiju_dict_inst["chinese_name"]
meiju_inst.season_count = meiju_dict_inst["season_count"]
meiju_inst.url = meiju_dict_inst["url"]
meiju_inst.season_id_inst_dict = {}
for (season_id, season_dict_inst) in meiju_dict_inst["season_id_inst_dict"].items():
season_inst = Season()
season_inst.season_id = season_dict_inst["season_id"]
season_inst.episode_count = season_dict_inst["episode_count"]
season_inst.episode_id_inst_dict = {}
for (episode_id, episode_dict_inst) in season_dict_inst["episode_id_inst_dict"].items():
episode_inst = Episode()
episode_inst.season_id = episode_dict_inst["season_id"]
episode_inst.episode_id = episode_dict_inst["episode_id"]
episode_inst.url = episode_dict_inst["url"]
season_inst.episode_id_inst_dict[episode_inst.episode_id] = episode_inst
meiju_inst.season_id_inst_dict[season_inst.season_id] = season_inst
self.meiju_inst_list.append(meiju_inst)
self.meiju_ename_inst_dict[meiju_inst.english_name] = meiju_inst
file_hdlr.close()
def is_meiju_info_file_exist(self):
if os.path.exists(self.all_meiju_file_name):
return True
return False
if __name__ == "__main__":
collector = Collector()
collector.save_all_meiju_info()
collector.write_all_meiju_info_to_file() | {
"content_hash": "c6e9eb1994143a2acd957309c4696526",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 143,
"avg_line_length": 47.651933701657455,
"alnum_prop": 0.5935072463768116,
"repo_name": "leodengyx/LeoMeijuDownloader",
"id": "d748724eff779d94c19579ae6bb7cafaeb85aa91",
"size": "8625",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "LeoMeijuDownloader/collector.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "3820517"
},
{
"name": "Python",
"bytes": "30078"
}
],
"symlink_target": ""
} |
from direct.actor.Actor import Actor
from direct.task.Task import Task
from panda3d.core import *
from panda3d.direct import *
from otp.otpbase.OTPBase import OTPBase
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from toontown.parties.DistributedPartyActivity import DistributedPartyActivity
from toontown.parties.PartyGlobals import ActivityIds, ActivityTypes, JUKEBOX_TIMEOUT
from toontown.parties.PartyGlobals import getMusicRepeatTimes, MUSIC_PATH, sanitizePhase
from toontown.parties.JukeboxGui import JukeboxGui
class DistributedPartyJukeboxActivityBase(DistributedPartyActivity):
notify = directNotify.newCategory('DistributedPartyJukeboxActivityBase')
def __init__(self, cr, actId, phaseToMusicData):
DistributedPartyActivity.__init__(self, cr, actId, ActivityTypes.Continuous)
self.phaseToMusicData = phaseToMusicData
self.jukebox = None
self.gui = None
self.tunes = []
self.music = None
self.currentSongData = None
self.localQueuedSongInfo = None
self.localQueuedSongListItem = None
return
def generateInit(self):
self.gui = JukeboxGui(self.phaseToMusicData)
def load(self):
DistributedPartyActivity.load(self)
self.jukebox = Actor('phase_13/models/parties/jukebox_model', {'dance': 'phase_13/models/parties/jukebox_dance'})
self.jukebox.reparentTo(self.root)
self.jukebox.loop('dance', fromFrame=0, toFrame=48)
self.jukebox.setBlend(frameBlend = True)
self.collNode = CollisionNode(self.getCollisionName())
self.collNode.setCollideMask(ToontownGlobals.CameraBitmask | ToontownGlobals.WallBitmask)
collTube = CollisionTube(0, 0, 0, 0.0, 0.0, 4.25, 2.25)
collTube.setTangible(1)
self.collNode.addSolid(collTube)
self.collNodePath = self.jukebox.attachNewNode(self.collNode)
self.sign.setPos(-5.0, 0, 0)
self.activate()
def unload(self):
DistributedPartyActivity.unload(self)
self.gui.unload()
if self.music is not None:
self.music.stop()
self.jukebox.stop()
self.jukebox.delete()
self.jukebox = None
self.ignoreAll()
return
def getCollisionName(self):
return self.uniqueName('jukeboxCollision')
def activate(self):
self.accept('enter' + self.getCollisionName(), self.__handleEnterCollision)
def __handleEnterCollision(self, collisionEntry):
if base.cr.playGame.getPlace().fsm.getCurrentState().getName() == 'walk':
base.cr.playGame.getPlace().fsm.request('activity')
self.d_toonJoinRequest()
def joinRequestDenied(self, reason):
DistributedPartyActivity.joinRequestDenied(self, reason)
self.showMessage(TTLocalizer.PartyJukeboxOccupied)
def handleToonJoined(self, toonId):
toon = base.cr.doId2do.get(toonId)
if toon:
self.jukebox.lookAt(base.cr.doId2do[toonId])
self.jukebox.setHpr(self.jukebox.getH() + 180.0, 0, 0)
if toonId == base.localAvatar.doId:
self.__localUseJukebox()
def handleToonExited(self, toonId):
if toonId == base.localAvatar.doId and self.gui.isLoaded():
self.__deactivateGui()
def handleToonDisabled(self, toonId):
self.notify.warning('handleToonDisabled no implementation yet')
def __localUseJukebox(self):
base.localAvatar.disableAvatarControls()
base.localAvatar.stopPosHprBroadcast()
self.__activateGui()
self.accept(JukeboxGui.CLOSE_EVENT, self.__handleGuiClose)
taskMgr.doMethodLater(0.5, self.__localToonWillExitTask, self.uniqueName('toonWillExitJukeboxOnTimeout'), extraArgs=None)
self.accept(JukeboxGui.ADD_SONG_CLICK_EVENT, self.__handleQueueSong)
if self.isUserHost():
self.accept(JukeboxGui.MOVE_TO_TOP_CLICK_EVENT, self.__handleMoveSongToTop)
return
def __localToonWillExitTask(self, task):
self.localToonExiting()
return Task.done
def __activateGui(self):
self.gui.enable(timer=JUKEBOX_TIMEOUT)
self.gui.disableAddSongButton()
if self.currentSongData is not None:
self.gui.setSongCurrentlyPlaying(self.currentSongData[0], self.currentSongData[1])
self.d_queuedSongsRequest()
return
def __deactivateGui(self):
self.ignore(JukeboxGui.CLOSE_EVENT)
self.ignore(JukeboxGui.SONG_SELECT_EVENT)
self.ignore(JukeboxGui.MOVE_TO_TOP_CLICK_EVENT)
base.cr.playGame.getPlace().setState('walk')
base.localAvatar.startPosHprBroadcast()
base.localAvatar.enableAvatarControls()
self.gui.unload()
self.__localClearQueuedSong()
def isUserHost(self):
return self.party.partyInfo.hostId == base.localAvatar.doId
def d_queuedSongsRequest(self):
self.sendUpdate('queuedSongsRequest')
def queuedSongsResponse(self, songInfoList, index):
if self.gui.isLoaded():
for i in range(len(songInfoList)):
songInfo = songInfoList[i]
self.__addSongToQueue(songInfo, isLocalQueue=index >= 0 and i == index)
self.gui.enableAddSongButton()
def __handleGuiClose(self):
self.__deactivateGui()
self.d_toonExitDemand()
def __handleQueueSong(self, name, values):
self.d_setNextSong(values[0], values[1])
def d_setNextSong(self, phase, filename):
self.sendUpdate('setNextSong', [(phase, filename)])
def setSongInQueue(self, songInfo):
if self.gui.isLoaded():
phase = sanitizePhase(songInfo[0])
filename = songInfo[1]
data = self.getMusicData(phase, filename)
if data:
if self.localQueuedSongListItem is not None:
self.localQueuedSongListItem['text'] = data[0]
else:
self.__addSongToQueue(songInfo, isLocalQueue=True)
return
def __addSongToQueue(self, songInfo, isLocalQueue = False):
isHost = isLocalQueue and self.isUserHost()
data = self.getMusicData(sanitizePhase(songInfo[0]), songInfo[1])
if data:
listItem = self.gui.addSongToQueue(data[0], highlight=isLocalQueue, moveToTopButton=isHost)
if isLocalQueue:
self.localQueuedSongInfo = songInfo
self.localQueuedSongListItem = listItem
def __localClearQueuedSong(self):
self.localQueuedSongInfo = None
self.localQueuedSongListItem = None
return
def __play(self, phase, filename, length):
self.music = base.loadMusic((MUSIC_PATH + '%s') % (phase, filename))
if self.music:
if self.__checkPartyValidity() and hasattr(base.cr.playGame.getPlace().loader, 'music') and base.cr.playGame.getPlace().loader.music:
base.cr.playGame.getPlace().loader.music.stop()
self.music.setTime(0.0)
self.music.setLoopCount(getMusicRepeatTimes(length))
self.music.play()
self.currentSongData = (phase, filename)
def __stop(self):
self.currentSongData = None
if self.music:
self.music.stop()
if self.gui.isLoaded():
self.gui.clearSongCurrentlyPlaying()
return
def setSongPlaying(self, songInfo, toonId):
phase = sanitizePhase(songInfo[0])
filename = songInfo[1]
if not filename:
self.__stop()
return
data = self.getMusicData(phase, filename)
if data:
self.__play(phase, filename, data[1])
self.setSignNote(data[0])
if self.gui.isLoaded():
item = self.gui.popSongFromQueue()
self.gui.setSongCurrentlyPlaying(phase, filename)
if item == self.localQueuedSongListItem:
self.__localClearQueuedSong()
if toonId == localAvatar.doId:
localAvatar.setSystemMessage(0, TTLocalizer.PartyJukeboxNowPlaying)
def __handleMoveSongToTop(self):
if self.isUserHost() and self.localQueuedSongListItem is not None:
self.d_moveHostSongToTopRequest()
return
def d_moveHostSongToTopRequest(self):
self.notify.debug('d_moveHostSongToTopRequest')
self.sendUpdate('moveHostSongToTopRequest')
def moveHostSongToTop(self):
self.notify.debug('moveHostSongToTop')
if self.gui.isLoaded():
self.gui.pushQueuedItemToTop(self.localQueuedSongListItem)
def getMusicData(self, phase, filename):
data = []
phase = sanitizePhase(phase)
phase = self.phaseToMusicData.get(phase)
if phase:
data = phase.get(filename, [])
return data
def __checkPartyValidity(self):
if hasattr(base.cr.playGame, 'getPlace') and base.cr.playGame.getPlace() and hasattr(base.cr.playGame.getPlace(), 'loader') and base.cr.playGame.getPlace().loader:
return True
else:
return False
| {
"content_hash": "82d22f4325488445b0acfcc5601912e6",
"timestamp": "",
"source": "github",
"line_count": 232,
"max_line_length": 171,
"avg_line_length": 39.366379310344826,
"alnum_prop": 0.6548779152523815,
"repo_name": "silly-wacky-3-town-toon/SOURCE-COD",
"id": "e143162207a9e2e7895b6b859d95f81596539803",
"size": "9133",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "toontown/parties/DistributedPartyJukeboxActivityBase.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "10249"
},
{
"name": "C",
"bytes": "1752256"
},
{
"name": "C#",
"bytes": "8440"
},
{
"name": "C++",
"bytes": "5485400"
},
{
"name": "Emacs Lisp",
"bytes": "210083"
},
{
"name": "F#",
"bytes": "2310"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "GLSL",
"bytes": "1040"
},
{
"name": "JavaScript",
"bytes": "7003"
},
{
"name": "Makefile",
"bytes": "895"
},
{
"name": "Mask",
"bytes": "969"
},
{
"name": "NSIS",
"bytes": "1009050"
},
{
"name": "Objective-C",
"bytes": "21821"
},
{
"name": "PLSQL",
"bytes": "10200"
},
{
"name": "Pascal",
"bytes": "4986"
},
{
"name": "Perl6",
"bytes": "30612"
},
{
"name": "Puppet",
"bytes": "259"
},
{
"name": "Python",
"bytes": "33566014"
},
{
"name": "Shell",
"bytes": "14642"
},
{
"name": "Tcl",
"bytes": "2084458"
}
],
"symlink_target": ""
} |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Youtube'
db.create_table(u'filer_embeddable_youtube', (
(u'embeddable_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['filer_embeddable.Embeddable'], unique=True, primary_key=True)),
))
db.send_create_signal(u'filer_embeddable', ['Youtube'])
def backwards(self, orm):
# Deleting model 'Youtube'
db.delete_table(u'filer_embeddable_youtube')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': u"orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': u"orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
u'filer_embeddable.embeddable': {
'Meta': {'object_name': 'Embeddable', '_ormbases': ['filer.File']},
u'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'})
},
u'filer_embeddable.youtube': {
'Meta': {'object_name': 'Youtube', '_ormbases': [u'filer_embeddable.Embeddable']},
u'embeddable_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['filer_embeddable.Embeddable']", 'unique': 'True', 'primary_key': 'True'})
}
}
complete_apps = ['filer_embeddable'] | {
"content_hash": "0e554b255cb71a78943dc625cee5e0c8",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 195,
"avg_line_length": 77.17,
"alnum_prop": 0.5621355449008683,
"repo_name": "sthzg/djf-embeddable",
"id": "c2dd47314e7df83c60b1a7255e61f863c6243405",
"size": "7741",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "filer_embeddable/migrations/0002_auto__add_youtube.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "30793"
}
],
"symlink_target": ""
} |
import os
import re
import subprocess
from django.utils.text import slugify
from django.conf import settings
from django.core.cache import cache
# These options are passed to Fabric as: fab task --abort-on-prompts=True --user=root ...
fabric_special_options = ['no_agent', 'forward-agent', 'config', 'disable-known-hosts', 'keepalive',
'password', 'parallel', 'no-pty', 'reject-unknown-hosts', 'skip-bad-hosts', 'timeout',
'command-timeout', 'user', 'warn-only', 'pool-size', 'key_filename']
def check_output(command, shell=False):
executable = None
if shell:
executable = getattr(settings, 'SHELL', '/bin/sh')
return subprocess.check_output(command, shell=shell, executable=executable)
def check_output_with_ssh_key(command):
if getattr(settings, 'GIT_SSH_KEY_LOCATION', None):
return check_output('ssh-agent bash -c "ssh-add {};{}"'.format(settings.GIT_SSH_KEY_LOCATION, command),
shell=True)
else:
return check_output([command], shell=True)
def update_project_git(project, cache_dir, repo_dir):
if not os.path.exists(repo_dir):
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
check_output_with_ssh_key('git clone {} {}'.format(project.repo_url, repo_dir))
else:
check_output_with_ssh_key(
'cd {0};git stash;git pull'.format(repo_dir)
)
def setup_virtual_env_if_needed(repo_dir):
env_dir = os.path.join(repo_dir, 'env')
if not os.path.exists(env_dir):
os.makedirs(env_dir)
check_output("virtualenv {}".format(env_dir), shell=True)
def update_project_requirements(project, repo_dir, activate_loc):
pip_installs = ' '.join(project.fabfile_requirements.splitlines())
check_output_with_ssh_key('source {} && cd {};pip install {}'.format(activate_loc, repo_dir, pip_installs))
def get_fabfile_path(project):
if project.use_repo_fabfile:
cache_key = 'project_{}_fabfile_path'.format(project.pk)
cached_result = cache.get(cache_key)
if cached_result:
return cached_result
cache_dir = os.path.join(settings.PUBLIC_DIR, '.repo_caches')
repo_dir = os.path.join(cache_dir, slugify(project.name))
update_project_git(project, cache_dir, repo_dir)
setup_virtual_env_if_needed(repo_dir)
activate_loc = os.path.join(repo_dir, 'env', 'bin', 'activate')
update_project_requirements(project, repo_dir, activate_loc)
result = os.path.join(repo_dir, 'fabfile.py'), activate_loc
cache.set(cache_key, result, settings.FABRIC_TASK_CACHE_TIMEOUT)
return result
else:
return settings.FABFILE_PATH, None
def parse_task_details(name, task_output):
lines = task_output.splitlines()
docstring = '\n'.join([line.strip() for line in lines[2:-2]]).strip()
arguments_line = lines[-2].strip()
if docstring == 'No docstring provided':
docstring = None
arguments_line = arguments_line[11:].strip()
arguments = []
if arguments_line:
for arg in arguments_line.split(', '):
m = re.match(r"^([^=]+)(=(\'?)([^']*)\3)?$", arg)
if m.group(2): # found argument with default value
if m.group(3) == "'": # default value is a string
arguments.append((m.group(1), m.group(4)))
else: # found an argument with some other default value.
# all fab arguments are translated to strings, so this doesnt make sense. Ignore the default.
arguments.append(m.group(1))
else:
arguments.append(m.group(1))
return name, docstring, arguments
def get_fabric_tasks(project):
"""
Generate a list of fabric tasks that are available
"""
cache_key = 'project_{}_fabfile_tasks'.format(project.pk)
cached_result = cache.get(cache_key)
if cached_result:
return cached_result
try:
fabfile_path, activate_loc = get_fabfile_path(project)
if activate_loc:
output = check_output(
'source {};fab --list --list-format=short --fabfile={}'.format(activate_loc, fabfile_path),
shell=True
)
else:
print 'here'
output = check_output(
'fab --list --list-format=short --fabfile={}'.format(fabfile_path),
shell=True
)
print 'here2'
lines = output.splitlines()
tasks = []
for line in lines:
name = line.strip()
if activate_loc:
o = check_output(
'source {};fab --display={} --fabfile={}'.format(activate_loc, name, fabfile_path),
shell=True
)
else:
o = check_output(
['fab', '--display={}'.format(name), '--fabfile={}'.format(fabfile_path)]
)
tasks.append(parse_task_details(name, o))
cache.set(cache_key, tasks, settings.FABRIC_TASK_CACHE_TIMEOUT)
except Exception as e:
tasks = []
return tasks
def get_task_details(project, task_name):
if task_name:
for details in get_fabric_tasks(project):
if details[0] == task_name:
return details
return None
def clean_key_string(key):
key = key.replace('"', '\\"') # escape double quotes
key = key.replace(',', '\,') # escape commas, that would be adding a new value
key = key.replace('=', '\=') # escape = because that would be setting a new key
return key
def clean_value_string(value):
value = value.replace('"', '\\"') # escape double quotes
value = value.replace(',', '\,') # escape commas, that would be adding a new value
value = value.replace('=', '\=') # escape = because that would be setting a new key
return value
def clean_arg_key_string(key):
# this has to be a valid python function argument, so we can get pretty strict here
key = re.sub(r'[^0-9a-zA-Z_]', '', key) # remove anything that isn't a number, letter, or underscore
return key
def get_key_value_string(key, config):
key = clean_key_string(key)
if config.data_type == config.BOOLEAN_TYPE:
return key + ('' if config.get_value() else '=')
elif config.data_type == config.NUMBER_TYPE:
return key + '=' + str(config.get_value())
else:
return '{}={}'.format(key, clean_value_string(config.get_value()))
def update_config_values_from_session(configs, session):
configs = configs.copy()
for key, config in configs.iteritems():
if session.get('configuration_values', {}).get(key, None) is not None:
config.set_value(session['configuration_values'][key])
del session['configuration_values'][key]
arg_values = session.get('configuration_values', {})
return configs, arg_values
def build_command(deployment, session, abort_on_prompts=True):
# Get the dictionary of configurations for this stage
configs = deployment.stage.get_configurations()
configs, arg_values = update_config_values_from_session(configs, session)
task_args = [key for key, config in configs.iteritems() if config.task_argument and config.task_name == deployment.task.name]
task_configs = [key for key, config in configs.iteritems() if not config.task_argument]
command_to_config = {x.replace('-', '_'): x for x in fabric_special_options}
# Take the special env variables out
normal_task_configs = list(set(task_configs) - set(command_to_config.keys()))
# Special ones get set a different way
special_task_configs = list(set(task_configs) & set(command_to_config.keys()))
command = 'fab ' + deployment.task.name
task_details = get_task_details(deployment.stage.project, deployment.task.name)
task_args = list(set(task_args + [x[0] if isinstance(x, tuple) else x for x in task_details[2]]))
if task_args:
key_value_strings = []
for key in task_args:
if key in configs:
value = unicode(configs[key].get_value())
elif key in arg_values:
value = unicode(arg_values[key])
else:
continue
cleaned_key = clean_arg_key_string(key)
value = clean_value_string(value)
key_value_strings.append('{}="{}"'.format(cleaned_key, value))
if key_value_strings:
command += ':'
command += ','.join(key_value_strings)
if normal_task_configs:
command += ' --set '
command += '"' + ','.join(get_key_value_string(key, configs[key]) for key in normal_task_configs) + '"'
if special_task_configs:
for key in special_task_configs:
if key == 'key_filename':
command += ' -i ' + configs[key].get_value()
else:
command += ' --' + get_key_value_string(command_to_config[key], configs[key])
if abort_on_prompts:
command += ' --abort-on-prompts'
hosts = deployment.stage.hosts.values_list('name', flat=True)
if hosts:
command += ' --hosts=' + ','.join(hosts)
fabfile_path, active_loc = get_fabfile_path(deployment.stage.project)
command += ' --fabfile={}'.format(fabfile_path)
if active_loc:
return 'source {};'.format(active_loc) + ' ' + command
else:
return command
| {
"content_hash": "17809ce5800394ac2c57139cc6bc09e6",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 129,
"avg_line_length": 34.35379061371841,
"alnum_prop": 0.5994115174443043,
"repo_name": "paperreduction/fabric-bolt",
"id": "c4f60625ee6806b6750f6c5ed9b88cd229e3b79b",
"size": "9516",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fabric_bolt/projects/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3076"
},
{
"name": "HTML",
"bytes": "64187"
},
{
"name": "JavaScript",
"bytes": "102452"
},
{
"name": "Python",
"bytes": "205321"
}
],
"symlink_target": ""
} |
"""
Opens a window that displays an image.
Usage:
from viewer import GlobalViewer
GlobalViewer.update(image)
"""
import numpy as np
import os
import contextlib
class TkViewer:
def __init__(self, title="HyperGAN", viewer_size=1, enabled=True):
self.screen = None
self.title = title
self.viewer_size = viewer_size
self.enabled = enabled
self.enable_menu = True
def update(self, gan, image):
if not self.enabled: return
original_image = image
if len(np.shape(image)) == 2:
s = np.shape(image)
image = np.reshape(image, [s[0], s[1], 1])
image = np.tile(image, [1,1,3])
image = np.transpose(image, [1, 0,2])
self.size = [int(image.shape[0] * self.viewer_size), int(image.shape[1] * self.viewer_size)]
def _refresh_sample(*args):
gan.cli.sample(False)
if not self.screen:
with contextlib.redirect_stdout(None):
import pygame
import tkinter as tk
import tkinter.ttk
class ResizableFrame(tk.Frame):
def __init__(self,parent,tkviewer=None,**kwargs):
tk.Frame.__init__(self,parent,**kwargs)
self.parent = parent
self.bind("<Configure>", self.on_resize)
self.height = kwargs['height']
self.width = kwargs['width']
self.tkviewer = tkviewer
self.aspect_ratio = float(self.width)/float(self.height)
def on_resize(self,event):
self._update(event.width, event.height)
self.enforce_aspect_ratio(event)
def _update(self, width, height):
self.width = width
self.height = height
self.config(width=self.width, height=self.height)
self.tkviewer.size = [self.width, self.height]
self.tkviewer.screen = self.tkviewer.pg.display.set_mode(self.tkviewer.size,self.tkviewer.pg.RESIZABLE)
surface = self.tkviewer.pg.Surface([image.shape[0],image.shape[1]])
self.tkviewer.pg.surfarray.blit_array(surface, image[:,:,:3])
self.tkviewer.screen.blit(self.tkviewer.pg.transform.scale(surface,self.tkviewer.size),(0,0))
self.tkviewer.pg.display.flip()
def resize(self, size):
self.parent.geometry(str(size[0])+"x"+str(size[1]))
self._update(size[0], size[1])
def enforce_aspect_ratio(self, event):
desired_width = event.width
desired_height = int(event.width / self.aspect_ratio)
if desired_height > event.height:
desired_height = event.height
desired_width = int(event.height * self.aspect_ratio)
self.config(width=desired_width, height=desired_height)
self.tkviewer.size = [desired_width, desired_height]
self.tkviewer.screen = self.tkviewer.pg.display.set_mode(self.tkviewer.size,self.tkviewer.pg.RESIZABLE)
self.pg = pygame
self.tk = tk
root = tk.Tk(className=self.title)
embed = ResizableFrame(root, width=self.size[0], height=self.size[1], tkviewer=self)
self.resizable_frame = embed
root.rowconfigure(0,weight=1)
root.rowconfigure(1,weight=1)
root.columnconfigure(0,weight=1)
root.columnconfigure(1,weight=1)
embed.pack(expand=tk.YES, fill=tk.BOTH)
def _save_model(*args):
gan.save(gan.save_file)
def _exit(*args):
gan.exit()
def _select_sampler(gan, name, value, submenu):
def _select_sampler_proc():
gan.cli.sampler = gan.sampler_for(name)(gan)
gan.cli.sample(False)
_refresh_sampler_submenu(submenu)
return _select_sampler_proc
def _refresh_sampler_submenu(submenu):
if submenu.count > 0:
submenu.delete(0, submenu.count)
for (k, v) in gan.get_registered_samplers().items():
showall = tk.BooleanVar()
showall.set(gan.selected_sampler == k)
if v.compatible_with(gan):
state = tk.NORMAL
else:
state = tk.DISABLED
print("Selected", gan.selected_sampler, k, gan.selected_sampler == k)
submenu.add_checkbutton(label=k, onvalue=True, offvalue=False, variable=showall, command=_select_sampler(gan, k, showall, submenu), state=state)
num_samplers = len(gan.get_registered_samplers())
submenu.count = num_samplers
def _create_status_bar(root):
statusbar = tk.Frame(root, height=24)
statusbar.pack(side=tk.BOTTOM, fill=tk.X)
label_training = tk.Label(statusbar, text="Training", font=12)
label_training.grid(row=0,column=0)
sep = tkinter.ttk.Separator(statusbar, orient=tk.VERTICAL).grid(column=1, row=0, sticky='ns')
label = tk.Label(statusbar, text="Starting", font=12)
label.grid(row=0, column=2)
def __update_step():
if hasattr(gan, 'step_count'):
label['text']=("Step " + str(gan.step_count))
root.after(1000, __update_step)
__update_step()
return statusbar
menubar = tk.Menu(root)
filemenu = tk.Menu(menubar, tearoff=0)
filemenu.add_command(label="Save", command=_save_model, underline=0, accelerator="Ctrl+s")
filemenu.add_separator()
samplemenu = tk.Menu(menubar, tearoff=0)
samplemenu.add_command(label="Refresh", command=_refresh_sample, underline=0, accelerator="Ctrl+r")
filemenu.add_command(label="Save and Exit", command=_exit, underline=10, accelerator="Ctrl+q")
menubar.add_cascade(label="File", menu=filemenu, underline=0)
menubar.add_cascade(label="Sample", menu=samplemenu, underline=0)
samplermenu = tk.Menu(samplemenu)
samplemenu.add_cascade(label="Sampler", menu=samplermenu, underline=0)
samplermenu.count = 0
_refresh_sampler_submenu(samplermenu)
root.bind_all("<Control-q>", _exit)
root.bind_all("<Control-r>", _refresh_sample)
root.bind_all("<Control-s>", _save_model)
if self.enable_menu:
root.config(menu=menubar)
self.status_bar = _create_status_bar(root)
else:
self.status_bar = None
# Tell pygame's SDL window which window ID to use
os.environ['SDL_WINDOWID'] = str(self.resizable_frame.winfo_id())
# Show the window so it's assigned an ID.
root.update()
self.root = root
# Usual pygame initialization
if self.viewer_size <= 0:
self.viewer_size = 0.1
self.aspect_w = image.shape[1] / image.shape[0]
self.aspect_h = image.shape[0] / image.shape[1]
self.temp_size = self.size
self.screen = self.pg.display.set_mode(self.size,self.pg.RESIZABLE)
self.pg.display.set_caption(self.title)
root.title(self.title)
root.wm_title(self.title)
self.resizable_frame.winfo_toplevel().title(self.title)
window_size = [self.resizable_frame.parent.winfo_width(), self.resizable_frame.parent.winfo_height()]
if self.status_bar is not None:
window_size[1] -= self.status_bar.winfo_height()
if self.size[0] != window_size[0] or self.size[1] != window_size[1]:
print("SIZE:", self.size, "WINDOW_SIZE", window_size)
self.resizable_frame.resize(self.size)
#self.resizable_frame.after(1, _refresh_sample)
padw = 0
padh = 0
if original_image.shape[0] > original_image.shape[1]:
padh = (original_image.shape[0] - original_image.shape[1])//2
if original_image.shape[1] > original_image.shape[0]:
padw = (original_image.shape[1] - original_image.shape[0])//2
pad_image = np.pad(original_image, [(padw, padw), (padh,padh), (0,0)], 'constant')
w = pad_image.shape[0]
h = pad_image.shape[1]
#xdata = b'P6 ' + str(w).encode() + b' ' + str(h).encode() + b' 255 ' + pad_image.tobytes()
#tk_image = self.tk.PhotoImage(data=xdata, format="PPM", width=w, height=h)
#self.root.tk.call('wm', 'iconphoto', self.root._w, tk_image.subsample(max(1, w//256), max(1, h//256)))
surface = self.pg.Surface([image.shape[0],image.shape[1]])
self.pg.surfarray.blit_array(surface, image[:,:,:3])
self.screen.blit(self.pg.transform.scale(surface,self.size),(0,0))
self.pg.display.flip()
def tick(self):
"""
Called repeatedly regardless of gan state.
"""
if hasattr(self, 'root'):
self.root.update()
| {
"content_hash": "0a2cc907e06c6374a233e691bd27386a",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 164,
"avg_line_length": 41.72687224669603,
"alnum_prop": 0.5482474662162162,
"repo_name": "255BITS/HyperGAN",
"id": "5c9c9b69a77fc40fd2e41b431969a91758803185",
"size": "9472",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hypergan/tk_viewer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "204346"
},
{
"name": "Shell",
"bytes": "117"
}
],
"symlink_target": ""
} |
from arcgis.gis import GIS
from arcgis import features
from arcgis.features import FeatureLayerCollection
import glob, os
#Credentials
gis = GIS("https://www.arcgis.com",'Username', "Password")
#ZipGDB file path
#create list of paths
dir = r"C:\Users\SROSS-C\Documents\ArcGIS\Projects\UpdateSericeTest\Shps"
dirlist = []
for file in os.listdir(dir):
if file.endswith(".zip"):
dirlist.append(os.path.join(dir,file))
#Add item to AGOL
i = 0
count = 0
while i < len(dirlist):
# Add item to AGOL
csv_item1 = gis.content.add({},dirlist[i])
csv_item1
#Publish hosted feature layer
csvlayer1 = csv_item1.publish()
csvlayer1
i += 1
print (i)
if i >= len(dirlist):
break
fin = "Finished"
print (fin)
| {
"content_hash": "dcfdd501cfe175d523bd3e3aa10607cf",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 73,
"avg_line_length": 21.90909090909091,
"alnum_prop": 0.7040110650069157,
"repo_name": "TxDOT/python",
"id": "ade4b3f7e07eed5e40dcc8bc76a3ec355534cc07",
"size": "723",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ArcGIS Python API/Bulk Upload Shapefiles.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "87763"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from . import _base
class Filter(_base.Filter):
def __init__(self, source, encoding):
_base.Filter.__init__(self, source)
self.encoding = encoding
__init__.func_annotations = {}
def __iter__(self):
state = u"pre_head"
meta_found = (self.encoding is None)
pending = []
for token in _base.Filter.__iter__(self):
type = token[u"type"]
if type == u"StartTag":
if token[u"name"].lower() == u"head":
state = u"in_head"
elif type == u"EmptyTag":
if token[u"name"].lower() == u"meta":
# replace charset with actual encoding
has_http_equiv_content_type = False
for (namespace,name),value in token[u"data"].items():
if namespace != None:
continue
elif name.lower() == u'charset':
token[u"data"][(namespace,name)] = self.encoding
meta_found = True
break
elif name == u'http-equiv' and value.lower() == u'content-type':
has_http_equiv_content_type = True
else:
if has_http_equiv_content_type and (None, u"content") in token[u"data"]:
token[u"data"][(None, u"content")] = u'text/html; charset=%s' % self.encoding
meta_found = True
elif token[u"name"].lower() == u"head" and not meta_found:
# insert meta into empty head
yield {u"type": u"StartTag", u"name": u"head",
u"data": token[u"data"]}
yield {u"type": u"EmptyTag", u"name": u"meta",
u"data": {(None, u"charset"): self.encoding}}
yield {u"type": u"EndTag", u"name": u"head"}
meta_found = True
continue
elif type == u"EndTag":
if token[u"name"].lower() == u"head" and pending:
# insert meta into head (if necessary) and flush pending queue
yield pending.pop(0)
if not meta_found:
yield {u"type": u"EmptyTag", u"name": u"meta",
u"data": {(None, u"charset"): self.encoding}}
while pending:
yield pending.pop(0)
meta_found = True
state = u"post_head"
if state == u"in_head":
pending.append(token)
else:
yield token
__iter__.func_annotations = {}
| {
"content_hash": "30eacf7d0ba5e2c9b73f87847560cfad",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 104,
"avg_line_length": 43.215384615384615,
"alnum_prop": 0.4421502313990744,
"repo_name": "rcarmo/soup-strainer",
"id": "106ca33a34cd0aa4b767ca061351bb99a5dd54c8",
"size": "2809",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "html5lib/filters/inject_meta_charset.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1289960"
},
{
"name": "Shell",
"bytes": "36"
}
],
"symlink_target": ""
} |
'''ordering services keys'''
from callchain.services.queue import KService
class KRandom(KService):
'''random key'''
def choice():
'''random choice of/from incoming things'''
def sample(n):
'''
random sampling drawn from `n` incoming things
@param n: number of incoming things
'''
def shuffle():
'''randomly order incoming things'''
class KOrder(KService):
'''ordering key'''
def group():
'''group incoming things using call for key function'''
def grouper(n, fill=None):
'''
split incoming things into sequences of length `n`, using `fill` thing
to pad incomplete sequences
@param n: number of things
@param fill: fill thing (default: None)
'''
def reverse():
'''reverse order of incoming things'''
def sort():
'''order incoming things using call for key function'''
| {
"content_hash": "1165bf1458b1c17300cc994e3f2f720e",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 78,
"avg_line_length": 21.454545454545453,
"alnum_prop": 0.590042372881356,
"repo_name": "lcrees/callchain",
"id": "2e40794f3a2cfc5a258e476b7ab440eea0b56cb2",
"size": "1028",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "callchain/services/order.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "158421"
}
],
"symlink_target": ""
} |
"""
==================
`match_wrapper.io`
==================
I/O for MATCH-related files.
Classes
-------
======================= ==================================================
`CMDParam` Class for storing CMD information.
`CalcsfhParam` Class for storing calcsfh parameter file
information.
`CalcsfhParamFormatter` Formatter to assist in writing calcsfh parameter
files.
======================= ==================================================
Functions
---------
====================== ====================================================
`open_cmdfile` Load data from a calcsfh ".cmd" file.
`open_zcbfile` Load a zcombine/zcmerge output file into an
`astropy.table.Table`.
`write_zcbfile` Write a zcombine/zcmerge output file from an
`astropy.table.Table`.
`write_zcombine_param` Write a zcombine parameter file for a given set of
age bin edges.
====================== ====================================================
"""
from astropy.table import Table
import numpy as np
class CMDParam(object):
"""Class for storing CMD information.
Most parameter and attribute names are taken from the MATCH 2.5 README.
The name from the MATCH 2.5 README is given in quotes if the parameter
or attribute name is different.
Each parameter initializes a corresponding attribute and defaults to
None unless stated otherwise.
Parameters
----------
Vname : str, optional
Iname : str, optional
Vstep : float, optional
VImin : float, optional
VImax : float, optional
VIstep : float, optional
fake_sm : int, optional
exclude_gates : list, optional
Default is an empty list.
combine_gates : list, optional
Default is an empty list.
Attributes
----------
Vname : str
"V". `CalcsfhParam.filters` must have a filter dictionary with the
same name.
Iname : str
"I". `CalcsfhParam.filters` must have a filter dictionary with the
same name.
Vstep : float
VImin : float
"V-Imin"
VImax : float
"V-Imax"
VIstep : float
"V-Istep"
fake_sm : float
Nexclude_gates
exclude_gates : list
List of exclude gates. Each gate is a set of coordinates of the
form ``[(x1, y1), (x2, y2), (x3, y3), (x4, y4)]``.
Ncombine_gates
combine_gates : list
List of combine gates (see `exclude_gates`).
"""
def __init__(self, **kwargs):
self.Vname = kwargs.get('Vname')
self.Iname = kwargs.get('Iname')
self.Vstep = kwargs.get('Vstep')
self.VImin = kwargs.get('VImin')
self.VImax = kwargs.get('VImax')
self.VIstep = kwargs.get('VIstep')
self.fake_sm = kwargs.get('fake_sm')
gates = kwargs.get('exclude_gates')
try:
gates[0][0][0]
except TypeError:
try:
gates[0][0]
except TypeError:
gates = []
else:
gates = [gates]
self.exclude_gates = gates
gates = kwargs.get('combine_gates')
try:
gates[0][0][0]
except TypeError:
try:
gates[0][0]
except TypeError:
gates = []
else:
gates = [gates]
self.combine_gates = gates
@property
def Nexclude_gates(self):
"""Length of the `exclude_gates` list."""
return len(self.exclude_gates)
@property
def Ncombine_gates(self):
"""Length of the `combine_gates` list."""
return len(self.combine_gates)
class CalcsfhParam(object):
"""Class for storing calcsfh parameter file information.
Most parameter and attribute names are taken from the MATCH 2.5 README.
The name from the MATCH 2.5 README is given in quotes if the parameter
or attribute name is different.
Each parameter initializes a corresponding attribute and defaults to
None unless stated otherwise.
Parameters
----------
IMF : float or str, optional
Valid string values are 'Kroupa', 'Salpeter', which are equivalent
to -1.0 and 1.35, respectively.
dmodmin : float, optional
dmodmax : float, optional
Avmin : float, optional
Avmax : float, optional
step : float, optional
logZmin : float, optional
See `mode`.
logZmax : float, optional
See `mode`.
logZstep : float, optional
See `mode`.
logZimin : float, optional
See `mode`.
logZimax : float, optional
See `mode`.
logZfmin : float, optional
See `mode`.
logZfmax : float, optional
See `mode`.
logZspread : float, optional
See `mode`.
BF : float, optional
Bad0 : float, optional
Bad1 : float, optional
CMDs : CMDParam or list, optional
A `CMDParam` instance, or a list of one or more `CMDParam`
instances. Default is an empty list.
filters : list, optional
A list of dictionaries for the filters referenced by the `CMDParam`
instances in `CMDs`. Default is an empty list.
agebins : list, optional
Default is an empty list.
linage : bool, optional
Default is False.
logZcentral : list, optional
Default is a list of length `Ntbins` where each element is None.
SFR : list, optional
Default is a list of length `Ntbins` where each element is None.
bgCMDs : list, optional
A list of dictionaries, one for each background/foreground CMD.
Default is an empty list.
mode : {None, 'zinc', 'setz'}, optional
Attributes
----------
IMF : float
dmodmin : float
"m-Mmin".
dmodmax : float
"m-Mmax".
Avmin : float
Avmax : float
step : float
Both "d(m-M)" and "dAv".
logZmin : float
See `mode`.
logZmax : float
See `mode`.
logZstep : float
"dlogZ"; see `mode`.
logZimin : float
Minimum initial (oldest age) metallicity for 'zinc' mode; see `mode`.
logZimax : float
Maximum initial (oldest age) metallicity for 'zinc' mode; see `mode`.
logZfmin : float
Minimum final (youngest age) metallicity for 'zinc' mode; see `mode`.
logZfmax : float
Maximum final (youngest age) metallicity for 'zinc' mode; see `mode`.
logZspread : float
Metallicity spread for 'setz' mode; see `mode`.
BF : float
Bad0 : float
Bad1 : float
CMDs : list
List of `CMDParam` instances, one per CMD.
Ncmds
filters : list
List of filters referenced by the `CMDParam` instances in `CMDs`.
Each filter is a dictionary containing the keys,
- 'name': "V" or "I"
- 'min': Bright magnitude limit; "Vmin" or "Imin"
- 'max': Faint magnitude limit; "Vmax" or "Imax"
A filter corresponds to V or I depending on whether it is the bluer
or redder filter in a given CMD.
agebins : list
List of edges of the age bins (either in yr or as log10(t/yr),
depending on `linage`). The ith and i+1th elements correspond to
"To" (youngest edge) and "Tf" (oldest edge) of the ith age bin.
Ntbins
To
Tf
linage : bool
True if the `agebins` values are in linear years, False if the
values are ``log10(age/yr)``.
logZcentral : list
Central metallicity values of the age bins for 'setz' mode; see
`mode`. The ith value corresponds to the ith age bin, and a value
must be specified for each age bin. Length is `Ntbins`.
SFR : list
Force the SFR in particular age bins to the values in this list.
The ith element corresponds to the ith age bin, and, if not None,
appears in the parameter file as the last number in the line for
the age bin. SFR is only fixed where the value in the list is not
None. Length is `Ntbins`.
.. note:: This feature is not documented in the MATCH 2.5 README!
bgCMDs : list
List of background/foreground CMD dictionaries. Each dictionary
contains the keys,
- 'nbins': Size of the smoothing kernel.
- 'scale': Set the scaling for the background CMD. If negative, a
variable number of stars is used.
- 'filename': Optional; path to the file containing the background
CMD data. If None (default), a smoothed version of the observed
CMD is used.
- 'cmdfile': Optional; True if 'filename' is formatted like a
".cmd" file output by calcsfh. Default is False, i.e.,
'filename' has two-columns like an input photometry file for
calcsfh.
mode : str
The following attributes are either required or ignored depending
on the mode:
None (default):
- required: `logZmin`, `logZmax`, `logZstep`
- ignored: `logZimin`, `logZimax`, `logZfmin`, `logZfmax`,
`logZspread`, `logZcentral`
'zinc':
- required: `logZmin`, `logZmax`, `logZstep`, `logZimin`,
`logZimax`, `logZfmin`, `logZfmax`
- ignored: `logZspread`, `logZcentral`
'setz':
- required: `logZspread`, `logZcentral`
- ignored: `logZmin`, `logZmax`, `logZstep`, `logZimin`,
`logZimax`, `logZfmin`, `logZfmax`
Methods
-------
read
write
"""
def __init__(self, **kwargs):
IMF = kwargs.get('IMF')
if IMF == 'Kroupa':
IMF = -1.0
elif IMF == 'Salpeter':
IMF = 1.35
self.IMF = IMF
self.dmodmin = kwargs.get('dmodmin')
self.dmodmax = kwargs.get('dmodmax')
self.Avmin = kwargs.get('Avmin')
self.Avmax = kwargs.get('Avmax')
self.step = kwargs.get('step')
self.logZmin = kwargs.get('logZmin') # default or zinc; not setz
self.logZmax = kwargs.get('logZmax') # default or zinc; not setz
self.logZstep = kwargs.get('logZstep') # default or zinc; not setz
self.logZimin = kwargs.get('logZimin') # zinc only
self.logZimax = kwargs.get('logZimax') # zinc only
self.logZfmin = kwargs.get('logZfmin') # zinc only
self.logZfmax = kwargs.get('logZfmax') # zinc only
self.logZspread = kwargs.get('logZspread') # setz only
self.BF = kwargs.get('BF')
self.Bad0 = kwargs.get('Bad0')
self.Bad1 = kwargs.get('Bad1')
CMDs = kwargs.get('CMDs', [])
self.CMDs = [CMDs] if isinstance(CMDs, CMDParam) else CMDs
self.filters = kwargs.get('filters', [])
self.agebins = kwargs.get('agebins', [])
self.linage = kwargs.get('linage', False)
self.logZcentral = kwargs.get('logZcentral', [None]*self.Ntbins)
self.SFR = kwargs.get('SFR', [None]*self.Ntbins)
bgCMDs = kwargs.get('bgCMDs', [])
try:
bgCMDs['nbins'] # just a test for a dict or a list of dict
except TypeError:
pass
else:
bgCMDs = [bgCMDs]
self.bgCMDs = bgCMDs
self.mode = kwargs.get('mode')
@property
def Ncmds(self):
"""Length of the `CMDs` list."""
return len(self.CMDs)
@property
def To(self):
"""Young/most recent edges of the age bins."""
return self.agebins[:-1]
@property
def Tf(self):
"""Oldest edges of the age bins."""
return self.agebins[1:]
@property
def Ntbins(self):
"""One less than the length of the `agebins` list (length of `To`
and `Tf`).
"""
l = len(self.agebins)-1
return l if l>0 else 0
def read(self, filename):
"""Create a `CalcsfhParam` instance from a calcsfh parameter file.
Parameters
----------
filename : str
Absolute path to the input parameter file.
Returns
-------
CalcsfhParam
"""
with open(filename, 'r') as f:
lines = f.readlines()
items = []
n = 0
# IMF, dmod, Av parameters
keys = ['IMF', 'dmodmin', 'dmodmax', 'step', 'Avmin', 'Avmax']
vals = [float(val) for val in lines[n].split()[:-1]]
items += zip(keys, vals)
n += 1
# Metallicity parameters
vals = [float(val) for val in lines[n].split()]
if len(vals) == 1:
mode = 'setz'
keys = ['logZspread']
elif len(vals) == 7:
mode = 'zinc'
keys = ['logZmin', 'logZmax', 'logZstep', 'logZimin',
'logZimax', 'logZfmin', 'logZfmax']
else:
mode = None
keys = ['logZmin', 'logZmax', 'logZstep']
items += zip(keys, vals)
n += 1
# Binary fraction and upper/lower bad fractions
keys = ['BF', 'Bad0', 'Bad1']
vals = [float(val) for val in lines[n].split()]
items += zip(keys, vals)
n += 1
# CMDs (part 1 of 2)
Ncmds = int(lines[n])
n += 1
CMDs, filternames = [], []
for i in range(Ncmds):
vals = lines[n].split()
Vname, Iname = vals[5].split(',')
filternames += [Vname, Iname]
CMDitems = [
('Vstep', float(vals[0])),
('VIstep', float(vals[1])),
('fake_sm', int(vals[2])),
('VImin', float(vals[3])),
('VImax', float(vals[4])),
('Vname', Vname),
('Iname', Iname)
]
CMDs.append(CMDitems)
n += 1
# Filter list
filters = []
while lines[n].split()[-1] in filternames:
vals = lines[n].split()
fdict = {'min': float(vals[0]), 'max': float(vals[1]), 'name': vals[2]}
filters.append(fdict)
n += 1
items.append(('filters', filters))
# Gate list
for i in range(Ncmds):
vals = lines[n].split()
Nexc = int(vals[0])
if Nexc > 0:
points = vals[1:Nexc*8+1]
gates = []
for j in range(Nexc):
xy = [float(pnt) for pnt in points[j*8:(j+1)*8]]
gates.append(zip(xy[0::2], xy[1::2]))
CMDs[i].append(('exclude_gates', gates))
vals = vals[Nexc*8+1]
Ncom = int(vals[0])
if Ncom > 0:
points = vals[1:Ncom*8+1]
gates = []
for j in range(Nexc):
xy = [float(pnt) for pnt in points[j*8:(j+1)*8]]
gates.append(zip(xy[0::2], xy[1::2]))
CMDs[i].append(('combine_gates', gates))
n += 1
# CMDs (part 2 of 2)
CMDs = [CMDParam(**dict(CMDitems)) for CMDitems in CMDs]
items.append(('CMDs', CMDs))
# Age bins
Ntbins = int(lines[n])
n += 1
bins = [tuple(line.split()) for line in lines[n:n+Ntbins]]
age1, age2, logZcentral, SFR = [], [], [], []
sfrcol = 3 if mode == 'setz' else 2
for vals in bins:
vals = [float(val) for val in vals]
age1.append(vals[0])
age2.append(vals[1])
if mode == 'setz':
logZcentral.append(vals[2])
else:
logZcentral.append(None)
if len(vals) == sfrcol+1:
SFR.append(vals[sfrcol])
else:
SFR.append(None)
agebins = age1 + [age2[-1]]
linage = True if agebins[0] < 0 else False
keys = ['agebins', 'linage', 'logZcentral', 'SFR']
vals = [agebins, linage, logZcentral, SFR]
items += zip(keys, vals)
n += Ntbins
# Background/foreground CMDs
Nbgcmds = len(lines) - n
if Nbgcmds > 0:
bgCMDs = []
for i in range(Nbgcmds):
vals = lines[n].split()
bgdict = {}
nbins = int(vals[1])
if nbins < 0:
bgdict['cmdfile'] = True
nbins *= -1
bgdict['nbins'] = nbins
bgdict['scale'] = float(vals[2]) if '.' in vals[2] else int(vals[2])
if len(vals) == 4:
bgdict['filename'] = vals[3]
bgCMDs.append(bgdict)
items.append(('bgCMDs', bgCMDs))
items.append(('mode', mode))
return CalcsfhParam(**dict(items))
def write(self, filename, formatter=None):
"""Write to a calcsfh parameter file.
Parameters
----------
filename : str
Absolute path to the output parameter file.
formatter : CalcsfhParamFormatter or function, optional
Any function that takes a parameter name (`key`) and a value
(`val`) as the first and second arguments, and returns a string
representation of the value. `CalcsfhParamFormatter` is used by
default.
Returns
-------
None
"""
if formatter is None:
formatter = CalcsfhParamFormatter()
# IMF, dmod, Av parameters
pars = ['IMF', 'dmodmin', 'dmodmax', 'step', 'Avmin', 'Avmax', 'step']
line = [formatter(key, self.__getattribute__(key)) for key in pars]
line = ' '.join(line)
lines = [line]
# Metallicity parameters
if self.mode == 'zinc':
pars = ['logZmin', 'logZmax', 'logZstep',
'logZimin', 'logZimax', 'logZfmin', 'logZfmax']
elif self.mode == 'setz':
pars = ['logZspread']
else:
pars = ['logZmin', 'logZmax', 'logZstep']
line = [formatter(key, self.__getattribute__(key)) for key in pars]
line = ' '.join(line)
lines.append(line)
# Binary fraction and upper/lower bad fractions
pars = ['BF', 'Bad0', 'Bad1']
line = [formatter(key, self.__getattribute__(key)) for key in pars]
line = ' '.join(line)
lines.append(line)
# Number of CMDs
line = formatter('Ncmds', self.Ncmds)
lines.append(line)
# CMD list
pars = ['Vstep', 'VIstep', 'fake_sm', 'VImin', 'VImax', 'Vname', 'Iname']
for CMD in self.CMDs:
line = [formatter(key, CMD.__getattribute__(key)) for key in pars]
line = '{0:s} {1:s},{2:s}'.format(' '.join(line[:-2]), line[-2], line[-1])
lines.append(line)
# Filter list
pars = ['min', 'max', 'name']
for filt in self.filters:
line = [formatter(key, filt[key]) for key in pars]
line = ' '.join(line)
lines.append(line)
# Gate list
for CMD in self.CMDs:
Nexc = formatter('Nexclude_gates', CMD.Nexclude_gates)
exc = ' '.join([formatter('exclude_gates', x)
for gate in CMD.exclude_gates
for point in gate for x in point])
line1 = '{0:s} {1:s}'.format(Nexc, exc) if exc else Nexc
Ncom = formatter('Ncombine_gates', CMD.Ncombine_gates)
com = ' '.join([formatter('combine_gates', x)
for gate in CMD.combine_gates
for point in gate for x in point])
line2 = '{0:s} {1:s}'.format(Ncom, com) if com else Ncom
line = '{0:s} {1:s}'.format(line1, line2)
lines.append(line)
# Number of age bins
line = formatter('Ntbins', self.Ntbins)
lines.append(line)
# Age bins
linage = -1 if self.linage else 1
for i in range(self.Ntbins):
edge1 = formatter('To', linage*self.To[i])
edge2 = formatter('Tf', linage*self.Tf[i])
logZc = formatter('logZcentral', self.logZcentral[i]) if self.mode == 'setz' else ''
SFR = formatter('SFR', self.SFR[i]) if self.SFR[i] else ''
row = [edge1, edge2, logZc, SFR]
line = ' '.join(val for val in row if val) # non-empty strs only
lines.append(line)
# Background/foreground CMDs
for CMD in self.bgCMDs:
cmdfile = -1 if CMD.get('cmdfile') else 1
nbins = formatter('nbins', cmdfile*CMD['nbins'])
scale = formatter('scale', CMD['scale'])
fname = CMD.get('filename', '')
row = ['-1', nbins, scale, fname]
line = ' '.join(val for val in row if val) # non-empty strs only
# Space, or no space, between scale and filename?
lines.append(line)
with open(filename, 'w') as f:
f.writelines('{:s}\n'.format(line) for line in lines)
return None
class CalcsfhParamFormatter(object):
"""Formatter to assist in writing calcsfh parameter files.
The `CalcsfhParam.write` method formats each value in the output
parameter file using a formatter function that takes a key and a value
and returns a string. `CalcsfhParamFormatter` has a call method that
looks up a format string based on the key, and uses it to format the
value. Each key has a corresponding attribute in `CalcsfhParam`. The
format strings may be adjusted from their default values, shown in the
Parameters section below.
Parameters
----------
IMF : '{ :.2f}', optional
dmodmin : '{ :.2f}', optional
dmodmax : '{ :.2f}', optional
Avmin : '{ :.2f}', optional
Avmax : '{ :.2f}', optional
step : '{ :.2f}', optioanl
logZmin : '{ :.1f}', optional
logZmax : '{ :.1f}', optional
logZstep : '{ :.1f}', optional
logZimin : '{ :.1f}', optional
logZimax : '{ :.1f}', optional
logZfmin : '{ :.1f}', optional
logZfmax : '{ :.1f}', optional
logZspread : '{ :.1f}', optional
BF : '{ :.2f}', optional
Bad0 : '{ :.6f}', optional
Bad1 : '{ :.6f}', optional
Ncmds : '{ :d}', optional
Vstep : '{ :.2f}', optional
VIstep : '{ :.2f}', optional
fake_sm : '{ :d}', optional
VImin : '{ :.2f}', optional
VImax : '{ :.2f}', optional
Vname : '{ :s}', optional
Iname : '{ :s}', optional
min : '{ :.2f}', optional
max : '{ :.2f}', optional
name : '{ :s}', optional
Nexclude_gates : '{ :d}', optional
exclude_gates : '{ :.2f}', optional
Ncombine_gates : '{ :d}', optional
combine_gates : '{ :.2f}', optional
Ntbins : '{ :d}', optional
To : '{ :.2f}', optional
Tf : '{ :.2f}', optional
logZcentral : '{ :.1f}', optional
SFR : '{ :.3e}', optional
nbins : '{ :d}', optional
scale : '{ :d}', optional
filename : '{ :s}', optional
Attributes
----------
fmt_dict : dict
Dictionary of format strings for all values in a calcsfh parameter
file.
Methods
-------
__call__
"""
def __init__(self, **kwargs):
fmt_dict = {
'IMF': '{:.2f}',
'dmodmin': '{:.2f}',
'dmodmax': '{:.2f}',
'Avmin': '{:.2f}',
'Avmax': '{:.2f}',
'step': '{:.2f}',
'logZmin': '{:.1f}',
'logZmax': '{:.1f}',
'logZstep': '{:.1f}',
'logZimin': '{:.1f}',
'logZimax': '{:.1f}',
'logZfmin': '{:.1f}',
'logZfmax': '{:.1f}',
'logZspread': '{:.1f}',
'BF': '{:.2f}',
'Bad0': '{:.6f}',
'Bad1': '{:.6f}',
'Ncmds': '{:d}',
'Vstep': '{:.2f}',
'VIstep': '{:.2f}',
'fake_sm': '{:d}',
'VImin': '{:.2f}',
'VImax': '{:.2f}',
'Vname': '{:s}',
'Iname': '{:s}',
'min': '{:.2f}',
'max': '{:.2f}',
'name': '{:s}',
'Nexclude_gates': '{:d}',
'exclude_gates': '{:.2f}',
'Ncombine_gates': '{:d}',
'combine_gates': '{:.2f}',
'Ntbins': '{:d}',
'To': '{:.2f}',
'Tf': '{:.2f}',
'logZcentral': '{:.1f}',
'SFR': '{:.3e}',
'nbins': '{:d}',
'scale': '{:d}',
'filename': '{:s}',
}
for key, val in kwargs.items():
if key in fmt_dict:
fmt_dict[key] = val
def __call__(self, key, val):
"""Return a string for the given key and value.
Parameters
----------
key : str
The key corresponding to a format string in `fmt_dict`.
val :
The value to be formatted.
Returns
-------
str
The formatted value.
"""
return self.fmt_dict[key].format(val)
def open_cmdfile(filename):
"""Load data from a calcsfh ".cmd" file.
Parameters
----------
filename : str
Path to a calcsfh ".cmd" file.
Returns
-------
tuple
The returned tuple contains the following:
- Edges of the CMD magnitude bins (1d array)
- Edges of the CMD color bins (1d array)
- Hess diagram of the observed CMD (2d array)
- Modeled Hess diagram (2d array)
- Residual Hess diagram; obs - mod (2d array)
- Residual significance Hess diagram (2d array)
Notes
-----
The residual significance is based on::
(Nobs - Nmodel) / sigma
except that it uses the correct Poisson-based formulation::
sqrt(2*(Nmodel - Nobs + Nobs*ln(Nobs/Nmodel)))
and is multiplied by -1 if Nmodel > Nobs to match the sense of the
first equation. In the case where Nobs=0, it is::
- sqrt(2 * Nmodel)
"""
with open(filename, 'r') as f:
f.readline() # Skip
# Number of bins
line = f.readline().split()
nmag, ncol = int(line[0]), int(line[1])
f.readline() # Skip
f.readline() # Skip
# CMD data
row_list = [row.split() for row in f]
col_list = zip(*row_list)
magbins = np.array(col_list[0], 'float')[::nmag] # Bin centers
dmag = magbins[1] - magbins[0]
mag1, mag2 = magbins[0] - dmag/2.0, magbins[-1] + dmag/2.0
magbins = np.linspace(mag1, mag2, (mag2-mag1)/dmag+1) # Bin edges
colbins = np.array(col_list[1], 'float')[:ncol] # Bin centers
dcol = colbins[1]-colbins[0]
col1, col2 = colbins[0] - dcol/2.0, colbins[-1] + dcol/2.0
colbins = np.linspace(col1, col2, (col2-col1)/dcol+1) # Bin edges
obs_arr = np.array(col_list[2], 'float').reshape((nmag, ncol))
mod_arr = np.array(col_list[3], 'float').reshape((nmag, ncol))
res_arr = np.array(col_list[4], 'float').reshape((nmag, ncol))
sig_arr = np.array(col_list[5], 'float').reshape((nmag, ncol))
return magbins, colbins, obs_arr, mod_arr, res_arr, sig_arr
def open_zcbfile(filename):
"""Load a zcombine/zcmerge output file into an `astropy.table.Table`.
Parameters
----------
filename : str
Path to a zcombine or zcmerge file.
Returns
-------
astropy.table.Table
See Notes for the columns.
Notes
-----
The columns in the output table are,
========== ======= ======================================================
columns units description
========== ======= ======================================================
log(age_i) Log age/yr of the young (most recent) edge of each
bin
log(age_f) Log age/yr of the old edge of each bin.
dmod Distance modulus
SFR Msun/yr Star formation rate
SFR_eu Msun/yr Upper error for SFR
SFR_el Msun/yr Lower error for SFR
[M/H] Metallicity, where the solar value is [M/H] = 0 [1]_
[M/H]_eu Upper error for [M/H]
[M/H]_el Lower error for [M/H]
d[M/H] Metallicity spread
d[M/H]_eu Upper error for d[M/H]
d[M/H]_el Lower error for d[M/H]
CSF Cumulative mass formed as a fraction of total mass
CSF_eu Upper error for CSF
CSF_el Lower error for CSF
========== ======= ======================================================
.. [1] The MATCH README uses "logZ" for metallicity, but Z is typically
reserved for metal abundance, for which the solar value is 0.02.
"""
names = ['log(age_i)', 'log(age_f)', 'dmod',
'SFR', 'SFR_eu', 'SFR_el',
'[M/H]', '[M/H]_eu', '[M/H]_el',
'd[M/H]', 'd[M/H]_eu', 'd[M/H]_el',
'CSF', 'CSF_eu', 'CSF_el']
dtypes = ['float'] * 15
data = []
with open(filename, 'r') as f:
for row in f:
row = row.split()
if row: # Skip blank lines
try:
float(row[0])
except ValueError: # Header line
continue
data.append(row)
table = Table(zip(*data), names=names, dtype=dtypes)
return table
def write_zcbfile(table, filename):
"""Write a zcombine/zcmerge output file from an `astropy.table.Table`.
`Table` instances have a `write` method which could be used directly,
but this function uses the 'ascii.no_header' format and the appropriate
format strings so that the resulting file looks like it was produced by
zcmerge.
Parameters
----------
table : astropy.table.Table
`Table` instance containting SFH data.
filename : str
Path to the output file.
Returns
-------
None
"""
formats = ['{:.2f}', '{:.2f}', '{:.2f}',
'{:.4e}', '{:.4e}', '{:.4e}',
'{:.3f}', '{:.3f}', '{:.3f}',
'{:.3f}', '{:.3f}', '{:.3f}',
'{:.4f}', '{:.4f}', '{:.4f}'
]
old_formats = [col.format for col in table.columns.values()]
for col, fmt in zip(table.columns.values(), formats):
col.format = fmt
table.write(filename, format='ascii.no_header')
# Restore original formats
for col, fmt in zip(table.columns.values(), old_formats):
col.format = fmt
return None
def write_zcombine_param(input_edges, output_edges, filename):
"""Write a zcombine parameter file for a given set of age bin edges.
Parameters
----------
input_edges : array
Values of the edges of the input age bins.
output_edges : array
Values of the edges of the desired output age bins. The ends of
this array are automatically trimmed so that the first and last
values match the first and last values of `input_edges`.
filename : str
Path to the output zcombine parameter file.
Returns
-------
None
"""
# Last value where output_edges is less than the first input edge,
# first value where output_edges is greater than the last input edge
test = np.where(output_edges < input_edges[0])[0]
if test.size:
i = test[-1]
else:
i = None
test = np.where(output_edges > input_edges[-1])[0]
if test.size:
j = test[0] + 1
else:
j = None
if i or j:
# Match the first and last edges
output_edges = output_edges[i:j]
output_edges[0], output_edges[-1] = input_edges[0], input_edges[-1]
nbins = len(output_edges) - 1
idx = np.digitize(inpit_edges, output_edges) - 1
with open(filename, 'w') as f:
f.write('{:d}\n'.format(nbins))
for i in idx:
f.write('{:d}\n'.format(i))
return None
| {
"content_hash": "a0d2c258c828ea035d5604b6182cb5a1",
"timestamp": "",
"source": "github",
"line_count": 985,
"max_line_length": 96,
"avg_line_length": 32.169543147208124,
"alnum_prop": 0.5210338624672579,
"repo_name": "jesaerys/match-wrapper",
"id": "4187b0056140fe717cb76b2b775913d7cf77daca",
"size": "31687",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "match_wrapper/io.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "104657"
}
],
"symlink_target": ""
} |
import os
import shutil
from PyQt4 import QtCore, QtGui
from serieswatcher.const import SERIES_IMG, SERIES_BANNERS
from serieswatcher.models import Serie, Episode
from serieswatcher.widgets.listseries import ListSeries
from serieswatcher.widgets.selectfolder import SelectFolder
class EditSeries(QtGui.QDialog):
"""Class to create and manipulate the window 'Edit serie'."""
edited = QtCore.pyqtSignal()
def __init__(self, parent=None):
"""Create the window layout."""
super(EditSeries, self).__init__(parent)
self.setWindowTitle(u'Editer les séries')
# Select serie pannel
self.listSeries = ListSeries()
self.listSeries.itemSelectionChanged.connect(self.itemSelectionChanged)
# Edit serie pannel
self.title = QtGui.QLineEdit()
self.title.textChanged.connect(self.listSeries.setTitle)
self.lang = QtGui.QLineEdit()
self.lang.textChanged.connect(self.listSeries.setLang)
self.path = SelectFolder()
self.path.label.textChanged.connect(self.listSeries.setPath)
groupSerie = QtGui.QGroupBox(u'Information de la série')
form = QtGui.QFormLayout()
form.addRow('Titre', self.title)
form.addRow('Langue', self.lang)
groupSerie.setLayout(form)
groupDownload = QtGui.QGroupBox(u'Répertoire')
layoutDl = QtGui.QVBoxLayout()
layoutDl.addWidget(self.path)
groupDownload.setLayout(layoutDl)
buttonBox = QtGui.QDialogButtonBox()
buttonBox.addButton('Sauvegarder', QtGui.QDialogButtonBox.AcceptRole)
buttonBox.accepted.connect(self.save)
buttonBox.addButton('Annuler', QtGui.QDialogButtonBox.RejectRole)
buttonBox.rejected.connect(self.close)
editSeriePannel = QtGui.QVBoxLayout()
editSeriePannel.addWidget(groupSerie)
editSeriePannel.addWidget(groupDownload)
# Make a layout and go...
layout = QtGui.QHBoxLayout()
layout.addWidget(self.listSeries)
layout.addLayout(editSeriePannel)
bigLayout = QtGui.QVBoxLayout()
bigLayout.addLayout(layout)
bigLayout.addWidget(buttonBox)
self.setLayout(bigLayout)
# Select the first serie
self.listSeries.listWidget.setCurrentRow(0)
def itemSelectionChanged(self, title, path, lang):
"""Trigged when the selection change. Update informations
in the form.
"""
self.title.setText(title)
self.lang.setText(lang)
self.path.setPath(path)
def save(self):
"""Save the modifications in the database."""
for pos, serie in enumerate(self.listSeries.getItems()):
uuid, title, path, tvdbID, lang = serie
sdb = list(Serie.select(Serie.q.uuid==uuid))[0]
sdb.title = title
sdb.path = path
sdb.tvdbID = tvdbID
sdb.lang = lang
sdb.pos = pos
for item in self.listSeries.getItemsDeleted():
sdb = list(Serie.select(Serie.q.uuid==item.uuid))[0]
Episode.deleteBy(serie=sdb)
Serie.delete(sdb.id)
img = '%s/%s' % (SERIES_IMG, item.uuid)
if os.path.isdir(img):
shutil.rmtree('%s/%s' % (SERIES_IMG, item.uuid))
banners = '%s/%s.jpg' % (SERIES_BANNERS, item.uuid)
if os.path.isfile(banners):
os.remove(banners)
self.edited.emit()
self.close()
| {
"content_hash": "c9d69fb301f873b226b845f8836d5f7a",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 79,
"avg_line_length": 36.61052631578947,
"alnum_prop": 0.6408855664174813,
"repo_name": "lightcode/SeriesWatcher",
"id": "3da1a6cb88242937f6c72c1906d2a3ed71e364ee",
"size": "3506",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "serieswatcher/serieswatcher/windows/editseries.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2594"
},
{
"name": "JavaScript",
"bytes": "1771"
},
{
"name": "Python",
"bytes": "1561181"
},
{
"name": "R",
"bytes": "2748"
}
],
"symlink_target": ""
} |
import pickle
import sys, os
from GPSReader import *
from VideoReader import *
from WGS84toENU import *
from GPSReprojection import *
from transformations import euler_matrix
from numpy import array, dot, zeros, around, divide, nonzero, float32, maximum
import numpy as np
from cv2 import imshow, waitKey, resize, warpPerspective, getPerspectiveTransform, transpose, Canny, namedWindow
import cv
import cv2
import time
from scipy.io import savemat
left_frames = []
frame_data = []
frameWaitTime = 100
if __name__ == '__main__':
video_filename = sys.argv[1]
existing_lanes = []
path, vfname = os.path.split(video_filename)
vidname = vfname.split('.')[0]
cam_num = int(vidname[-1])
gps_filename = path + '/' + vidname[0:-1] + '_gps.out'
out_name = sys.argv[2]
display = True
if '--quiet' in sys.argv:
display = False
cv2.namedWindow('video')
num_imgs_fwd = 200;
video_reader = VideoReader(video_filename, num_splits=1)
gps_reader = GPSReader(gps_filename)
gps_dat = gps_reader.getNumericData()
cam = pickle.load(open('cam_params.pickle', 'rb'))[cam_num - 1]
framenum = 0
lastTime = time.time()
video_reader.setFrame(framenum)
skip_frame = 5
seconds_back = 4
default_offset = 60
left_present = -1
right_present = -1
while True:
framenum = framenum + 1;
(success, I) = video_reader.getNextFrame()
if success == False:
print framenum, 'finished'
break
if framenum % skip_frame != 0:
continue
if framenum % 150 == 0 and False:
r = np.arange(9,len(frame_data)*skip_frame,skip_frame)
export_data = -1*np.ones((len(frame_data)*skip_frame+1,2))
export_data[r,:] = frame_data
left_data = -1*np.ones((len(left_frames)*skip_frame+1,2))
left_data[r, :] = left_frames
savemat(out_name, dict(left=left_data,right=export_data))
I = resize(I, (320, 240))
cv2.putText(I, 'L: ' + str(left_present) + ', R: ' + str(right_present), (100, 10), cv2.FONT_HERSHEY_PLAIN, 0.8, (255, 255, 255))
key = ''
if display:
imshow('video',I)
key = (waitKey(frameWaitTime) & 255)
frameWaitTime = max(int(frameWaitTime / 1.5), 5)
if key == ord('q'):
break;
switched_left = True
if key == ord('s') and left_present != 2:
left_present = 2
elif key == ord('d') and left_present != 1:
left_present = 1
elif key == ord('f') and left_present != 0:
left_present = 0
else:
switched_left = False
switched_right = True
if key == ord('l') and right_present != 2:
right_present = 2
elif key == ord('k') and right_present != 1:
right_present = 1
elif key == ord('j') and right_present != 0:
right_present = 0
else:
switched_right = False
if switched_left and switched_right:
for i in xrange(len(existing_lanes) - 1, max(-1, len(existing_lanes) - 1 - 10 * skip_frame * seconds_back), -1):
existing_lanes[i] = [-1, -1]
for i in xrange(10 * skip_frame - 1):
existing_lanes.append([-1, -1])
existing_lanes.append([left_present, right_present])
elif switched_left:
print 'left', framenum
for i in xrange(len(existing_lanes) - 1, max(-1, len(existing_lanes) - 1 - 10 * skip_frame * seconds_back), -1):
prev_lane = existing_lanes[i]
existing_lanes[i] = [-1, prev_lane[1]]
for i in xrange(10 * skip_frame - 1):
existing_lanes.append([-1, right_present])
existing_lanes.append([left_present, right_present])
elif switched_right:
print 'right', framenum
for i in xrange(len(existing_lanes) - 1, max(-1, len(existing_lanes) - 1 - 10 * skip_frame * seconds_back), -1):
prev_lane = existing_lanes[i]
existing_lanes[i] = [prev_lane[0], -1]
for i in xrange(10 * skip_frame - 1):
existing_lanes.append([left_present, -1])
existing_lanes.append([left_present, right_present])
else:
for i in xrange(10 * skip_frame):
existing_lanes.append([left_present, right_present])
currentTime = time.time();
if (currentTime - lastTime > 10):
lastTime = currentTime
print framenum
existing_lanes = np.array(existing_lanes)
savemat(out_name, dict(left = existing_lanes[:,0], right=existing_lanes[:, 1]))
"""
r = np.arange(9,len(frame_data)*skip_frame,skip_frame)
export_data = -1*np.ones((len(frame_data)*skip_frame+1,2))
export_data[r,:] = frame_data
left_data = -1*np.ones((len(left_frames)*skip_frame+1,2))
left_data[r, :] = left_frames
savemat(out_name, dict(left=left_data,right=export_data))
"""
| {
"content_hash": "0b1a672d03082dc2c4357ee42a5cb166",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 133,
"avg_line_length": 32.178082191780824,
"alnum_prop": 0.6121753937845892,
"repo_name": "sameeptandon/sail-car-log",
"id": "e3e876048173d90bbbaaa1a79d8428e1a0607252",
"size": "4698",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "process/honda-label/LateralLabel.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "283486"
},
{
"name": "C++",
"bytes": "428270"
},
{
"name": "CMake",
"bytes": "75122"
},
{
"name": "CSS",
"bytes": "1110"
},
{
"name": "Cuda",
"bytes": "31989"
},
{
"name": "HTML",
"bytes": "2414"
},
{
"name": "JavaScript",
"bytes": "12886"
},
{
"name": "Matlab",
"bytes": "14794"
},
{
"name": "Protocol Buffer",
"bytes": "4913"
},
{
"name": "Python",
"bytes": "870911"
},
{
"name": "Shell",
"bytes": "2144"
}
],
"symlink_target": ""
} |
"""
Interface for monitoring clients
"""
import pghoard
class Metrics:
def __init__(self, **configs):
self.clients = self._init_clients(configs)
def _init_clients(self, configs):
clients = []
if not isinstance(configs, dict):
return clients
map_client = pghoard.mapping.clients
for k, config in configs.items():
if isinstance(config, dict) and k in map_client:
path, classname = map_client[k]
mod = __import__(path, fromlist=[classname])
klass = getattr(mod, classname)
clients.append(klass(config))
return clients
def gauge(self, metric, value, tags=None):
for client in self.clients:
client.gauge(metric, value, tags)
def increase(self, metric, inc_value=1, tags=None):
for client in self.clients:
client.increase(metric, inc_value, tags)
def timing(self, metric, value, tags=None):
for client in self.clients:
client.timing(metric, value, tags)
def unexpected_exception(self, ex, where, tags=None):
for client in self.clients:
client.unexpected_exception(ex, where, tags)
| {
"content_hash": "e73ce432d6e3dfa933fefa6a43a10950",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 60,
"avg_line_length": 29.878048780487806,
"alnum_prop": 0.5975510204081632,
"repo_name": "saaros/pghoard",
"id": "735eeb8f6b4e3fa19fce402a1d832af37acad1af",
"size": "1225",
"binary": false,
"copies": "1",
"ref": "refs/heads/basebackup_schedule",
"path": "pghoard/metrics.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1692"
},
{
"name": "Python",
"bytes": "344087"
}
],
"symlink_target": ""
} |
"""
Set or get caller's attrubutes.
"""
from muddery.statements.statement_function import StatementFunction
class FuncSetAttr(StatementFunction):
"""
Set the caller's attribute.
Args:
args[0]: (string) attribute's key
args[1]: attribute value. Optional, default: None
Returns:
(boolean) can set attribute
"""
key = "set_attr"
const = False
def func(self):
"""
Implement the function.
"""
if not self.args:
return False
attr_key = self.args[0]
value = None
if len(self.args) > 1:
value = self.args[1]
self.caller.statement_attr.set(attr_key, value)
return True
class FuncGetAttr(StatementFunction):
"""
Get the caller's attribute.
Args:
args[0]: (string) attribute's key
args[1]: default value if the caller does not have this attribute. Optional, default: None
Returns:
Attribute's value
"""
key = "get_attr"
const = True
def func(self):
"""
Implement the function.
"""
if not self.args:
return None
key = self.args[0]
default = None
if len(self.args) > 1:
default = self.args[1]
return self.caller.statement_attr.get(key, default)
class FuncRemoveAttr(StatementFunction):
"""
Remove the caller's attribute.
Args:
args[0]: (string) attribute's key
Returns:
(boolean) remove success
"""
key = "remove_attr"
const = False
def func(self):
"""
Implement the function.
"""
if not self.args:
return False
key = self.args[0]
return self.caller.statement_attr.remove(key)
class FuncHasAttr(StatementFunction):
"""
Does this attribute exist.
Args:
args[0]: (string) attribute key
Returns:
boolean result
"""
key = "has_attr"
const = True
def func(self):
"""
Implement the function.
"""
if not self.args:
return False
attr_key = self.args[0]
return self.caller.statement_attr.has(attr_key)
class FuncCheckAttr(StatementFunction):
"""
Does this attribute match the value.
Args:
args[0]: (string) attribute's key
args[1]: attribute's value
If only give one args, it works the same as FuncHasAttr.
Returns:
boolean result
"""
key = "check_attr"
const = True
def func(self):
"""
Implement the function.
"""
if not self.args:
return False
attr_key = self.args[0]
if len(self.args) < 2:
return self.caller.statement_attr.has(attr_key)
else:
value = self.args[1]
return self.caller.statement_attr.check_value(attr_key, value)
| {
"content_hash": "b240c62e7b57391293cf509b2d65a99a",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 98,
"avg_line_length": 19.533333333333335,
"alnum_prop": 0.5535836177474402,
"repo_name": "MarsZone/DreamLand",
"id": "81adb5c106f84afc3a348e97e103f23d551e442c",
"size": "2930",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "muddery/statements/attribute.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "106"
},
{
"name": "CSS",
"bytes": "90990"
},
{
"name": "Emacs Lisp",
"bytes": "2734"
},
{
"name": "HTML",
"bytes": "204486"
},
{
"name": "JavaScript",
"bytes": "157093"
},
{
"name": "Python",
"bytes": "3191395"
},
{
"name": "Shell",
"bytes": "4237"
}
],
"symlink_target": ""
} |
"""
Wavefront REST API Documentation
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from wavefront_api_client.configuration import Configuration
class ResponseContainerMetricsPolicyReadModel(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'response': 'MetricsPolicyReadModel',
'status': 'ResponseStatus'
}
attribute_map = {
'response': 'response',
'status': 'status'
}
def __init__(self, response=None, status=None, _configuration=None): # noqa: E501
"""ResponseContainerMetricsPolicyReadModel - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._response = None
self._status = None
self.discriminator = None
if response is not None:
self.response = response
self.status = status
@property
def response(self):
"""Gets the response of this ResponseContainerMetricsPolicyReadModel. # noqa: E501
:return: The response of this ResponseContainerMetricsPolicyReadModel. # noqa: E501
:rtype: MetricsPolicyReadModel
"""
return self._response
@response.setter
def response(self, response):
"""Sets the response of this ResponseContainerMetricsPolicyReadModel.
:param response: The response of this ResponseContainerMetricsPolicyReadModel. # noqa: E501
:type: MetricsPolicyReadModel
"""
self._response = response
@property
def status(self):
"""Gets the status of this ResponseContainerMetricsPolicyReadModel. # noqa: E501
:return: The status of this ResponseContainerMetricsPolicyReadModel. # noqa: E501
:rtype: ResponseStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this ResponseContainerMetricsPolicyReadModel.
:param status: The status of this ResponseContainerMetricsPolicyReadModel. # noqa: E501
:type: ResponseStatus
"""
if self._configuration.client_side_validation and status is None:
raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResponseContainerMetricsPolicyReadModel, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResponseContainerMetricsPolicyReadModel):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ResponseContainerMetricsPolicyReadModel):
return True
return self.to_dict() != other.to_dict()
| {
"content_hash": "787ab2645f1126634f37eda751642889",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 409,
"avg_line_length": 33.189189189189186,
"alnum_prop": 0.6140065146579805,
"repo_name": "wavefrontHQ/python-client",
"id": "c90630f49085919cb005abae2513e734824f7dba",
"size": "4929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wavefront_api_client/models/response_container_metrics_policy_read_model.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4642252"
},
{
"name": "Shell",
"bytes": "3458"
}
],
"symlink_target": ""
} |
from api.decorators import api_view, request_data
from api.permissions import IsAdmin, IsSuperAdminOrReadOnly
from api.dc.storage.api_views import DcStorageView
__all__ = ('dc_storage_list', 'dc_storage')
@api_view(('GET',))
@request_data(permissions=(IsAdmin, IsSuperAdminOrReadOnly))
def dc_storage_list(request, data=None):
"""
List (:http:get:`GET </dc/(dc)/storage>`) available node storages in current datacenter.
.. http:get:: /dc/(dc)/storage
:DC-bound?:
* |dc-yes|
:Permissions:
* |Admin|
:Asynchronous?:
* |async-no|
:arg request.dc: **required** - Datacenter name
:type request.dc: string
:arg data.full: Return list of objects with all storage details (default: false)
:type data.full: boolean
:arg data.extended: Return list of objects with extended storage details (default: false)
:type data.extended: boolean
:arg data.order_by: :ref:`Available fields for sorting <order_by>`: ``hostname``, ``zpool`` \
(default: ``hostname,zpool``)
:type data.order_by: string
:status 200: SUCCESS
:status 403: Forbidden
:status 404: Datacenter not found
"""
return DcStorageView(request, None, data).get(many=True)
# noinspection PyUnusedLocal
@api_view(('GET', 'POST', 'DELETE'))
@request_data(permissions=(IsAdmin, IsSuperAdminOrReadOnly))
def dc_storage(request, zpool_node, data=None):
"""
Show (:http:get:`GET </dc/(dc)/storage/(zpool@node)>`),
create (:http:post:`POST </dc/(dc)/storage/(zpool@node)>`) or
delete (:http:delete:`DELETE </dc/(dc)/storage/(zpool@node)>`)
a node storage (zpool@node) association with a datacenter (dc).
.. http:get:: /dc/(dc)/storage/(zpool@node)
:DC-bound?:
* |dc-yes|
:Permissions:
* |Admin|
:Asynchronous?:
* |async-no|
:arg request.dc: **required** - Datacenter name
:type request.dc: string
:arg zpool@node: **required** - Storage pool name @ Compute node hostname
:type zpool@node: string
:arg data.extended: Display extended storage details (default: false)
:type data.extended: boolean
:status 200: SUCCESS
:status 403: Forbidden
:status 404: Datacenter not found / Storage not found
.. http:post:: /dc/(dc)/storage/(zpool@node)
:DC-bound?:
* |dc-yes|
:Permissions:
* |SuperAdmin|
:Asynchronous?:
* |async-no|
:arg request.dc: **required** - Datacenter name
:type request.dc: string
:arg zpool@node: **required** - Storage pool name @ Compute node hostname
:type zpool@node: string
:status 201: SUCCESS
:status 403: Forbidden
:status 404: Datacenter not found / Storage not found
:status 406: Storage already exists
:status 428: Compute node is not available
.. http:delete:: /dc/(dc)/storage/(zpool@node)
:DC-bound?:
* |dc-yes|
:Permissions:
* |SuperAdmin|
:Asynchronous?:
* |async-no|
:arg request.dc: **required** - Datacenter name
:type request.dc: string
:arg zpool@node: **required** - Storage pool name @ Compute node hostname
:type zpool@node: string
:status 200: SUCCESS
:status 403: Forbidden
:status 404: Datacenter not found / Storage not found
:status 428: Storage is used by some VMs / Storage is used by some VM backups
"""
return DcStorageView(request, zpool_node, data).response()
| {
"content_hash": "45d1e9a1f5cea675299d18cd058df338",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 101,
"avg_line_length": 35.47572815533981,
"alnum_prop": 0.6048166392993979,
"repo_name": "erigones/esdc-ce",
"id": "be832b244dbbf8dae39692562ceff95badd64923",
"size": "3654",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/dc/storage/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "2728"
},
{
"name": "C",
"bytes": "8581"
},
{
"name": "CSS",
"bytes": "146461"
},
{
"name": "DTrace",
"bytes": "2250"
},
{
"name": "Erlang",
"bytes": "18842"
},
{
"name": "HTML",
"bytes": "473343"
},
{
"name": "JavaScript",
"bytes": "679240"
},
{
"name": "Jinja",
"bytes": "29584"
},
{
"name": "PLpgSQL",
"bytes": "17954"
},
{
"name": "Perl",
"bytes": "93955"
},
{
"name": "Python",
"bytes": "3124524"
},
{
"name": "Ruby",
"bytes": "56"
},
{
"name": "SCSS",
"bytes": "82814"
},
{
"name": "Shell",
"bytes": "281885"
}
],
"symlink_target": ""
} |
from django.apps import AppConfig
class CodeSnippetsConfig(AppConfig):
name = 'code_snippets'
| {
"content_hash": "d326f457ed3fe6b18a2ba2fdc1b7e14d",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 36,
"avg_line_length": 20,
"alnum_prop": 0.77,
"repo_name": "thjwhite/personal_website",
"id": "e7530d72602a55455a40871a44f801840e4c0e3c",
"size": "100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "personal_website/code_snippets/apps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4559"
},
{
"name": "Python",
"bytes": "18302"
}
],
"symlink_target": ""
} |
import sqlite3
conn = sqlite3.connect('database name')
print("Opened database successfully")
conn.execute('''CREATE TABLE IF NOT EXISTS QUERY_TABLE
(Q_ID INT NOT NULL ,
Q_STRING VARCHAR(40) PRIMARY KEY );''')
print("Table created successfully")
conn.execute('''CREATE TABLE IF NOT EXISTS RESULT_TABLE
(Q_ID INT NOT NULL,
TITLE VARCHAR(80) ,
URL VARCHAR(1024) ,
SNIPPET VARCHAR(256));''')
print("Table created successfully")
conn.execute('''CREATE TABLE IF NOT EXISTS ENTITY_TABLE
(Q_ID INT NOT NULL,
ENTITY VARCHAR(40),
ENTITY_TYPE VARCHAR(60) );''')
print("Table created successfully")
#(SELECT max(a)FROM t1)+1)
conn.close()
| {
"content_hash": "ef65ce3116df3f76f292012f7fa57731",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 57,
"avg_line_length": 32.95652173913044,
"alnum_prop": 0.6094986807387863,
"repo_name": "mzmmoazam/Entity-Extraction-from-Google-searches-use-database-",
"id": "df707d9afab391c5b90d3abd27de8054cadc97a1",
"size": "758",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Create Tables.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4774"
}
],
"symlink_target": ""
} |
import math
import re
import textwrap
import operator
import numpy as np
import unittest
from numba.core.compiler import compile_isolated
from numba import jit
from numba.core import types
from numba.core.errors import TypingError
from numba.core.types.functions import _header_lead
from numba.tests.support import TestCase
def what():
pass
def foo():
return what()
def bar(x):
return x.a
def issue_868(a):
return a.shape * 2
def impossible_return_type(x):
if x > 0:
return ()
else:
return 1j
def bad_hypot_usage():
return math.hypot(1)
def imprecise_list():
l = []
return len(l)
def using_imprecise_list():
a = np.array([])
return a.astype(np.int32)
def unknown_module():
return numpyz.int32(0)
def nop(x, y, z):
pass
def array_setitem_invalid_cast():
arr = np.empty(1, dtype=np.float64)
arr[0] = 1j # invalid cast from complex to float
return arr
class Foo(object):
def __repr__(self):
return "<Foo instance>"
class TestTypingError(unittest.TestCase):
def test_unknown_function(self):
try:
compile_isolated(foo, ())
except TypingError as e:
self.assertIn("Untyped global name 'what'", str(e))
else:
self.fail("Should raise error")
def test_unknown_attrs(self):
try:
compile_isolated(bar, (types.int32,))
except TypingError as e:
self.assertIn("Unknown attribute 'a' of type int32", str(e))
else:
self.fail("Should raise error")
def test_unknown_module(self):
# This used to print "'object' object has no attribute 'int32'"
with self.assertRaises(TypingError) as raises:
compile_isolated(unknown_module, ())
self.assertIn("name 'numpyz' is not defined", str(raises.exception))
def test_issue_868(self):
'''
Summary: multiplying a scalar by a non-scalar would cause a crash in
type inference because TimeDeltaMixOp always assumed at least one of
its operands was an NPTimeDelta in its generic() method.
'''
with self.assertRaises(TypingError) as raises:
compile_isolated(issue_868, (types.Array(types.int32, 1, 'C'),))
expected = ((_header_lead + " Function(<built-in function mul>) found "
"for signature:\n \n >>> mul(UniTuple({} x 1), {})")
.format(str(types.intp), types.IntegerLiteral(2)))
self.assertIn(expected, str(raises.exception))
self.assertIn("During: typing of", str(raises.exception))
def test_return_type_unification(self):
with self.assertRaises(TypingError) as raises:
compile_isolated(impossible_return_type, (types.int32,))
msg = ("Can't unify return type from the following types: Tuple(), "
"complex128")
self.assertIn(msg, str(raises.exception))
def test_bad_hypot_usage(self):
with self.assertRaises(TypingError) as raises:
compile_isolated(bad_hypot_usage, ())
errmsg = str(raises.exception)
# Make sure it listed the known signatures.
# This is sensitive to the formatting of the error message.
self.assertIn(" * (float64, float64) -> float64", errmsg)
# find the context lines
ctx_lines = [x for x in errmsg.splitlines() if "During:" in x ]
# Check contextual msg
self.assertTrue(re.search(r'.*During: resolving callee type: Function.*hypot', ctx_lines[0]))
self.assertTrue(re.search(r'.*During: typing of call .*test_typingerror.py', ctx_lines[1]))
def test_imprecise_list(self):
"""
Type inference should catch that a list type's remain imprecise,
instead of letting lowering fail.
"""
with self.assertRaises(TypingError) as raises:
compile_isolated(imprecise_list, ())
errmsg = str(raises.exception)
msg = ("Cannot infer the type of variable 'l', have imprecise type: "
"list(undefined)")
self.assertIn(msg, errmsg)
# check help message has gone in
self.assertIn("For Numba to be able to compile a list", errmsg)
def test_using_imprecise_list(self):
"""
Type inference should report informative error about untyped list.
TODO: #2931
"""
with self.assertRaises(TypingError) as raises:
compile_isolated(using_imprecise_list, ())
errmsg = str(raises.exception)
self.assertIn("Undecided type", errmsg)
def test_array_setitem_invalid_cast(self):
with self.assertRaises(TypingError) as raises:
compile_isolated(array_setitem_invalid_cast, ())
errmsg = str(raises.exception)
self.assertIn(
_header_lead + " Function({})".format(operator.setitem),
errmsg,
)
self.assertIn(
"(array(float64, 1d, C), Literal[int](0), complex128)",
errmsg,
)
def test_template_rejection_error_message_cascade(self):
from numba import njit
@njit
def foo():
z = 1
for a, b in enumerate(z):
pass
return z
with self.assertRaises(TypingError) as raises:
foo()
errmsg = str(raises.exception)
expected = "No match."
self.assertIn(expected, errmsg)
ctx_lines = [x for x in errmsg.splitlines() if "During:" in x ]
search = [r'.*During: resolving callee type: Function.*enumerate',
r'.*During: typing of call .*test_typingerror.py']
for i, x in enumerate(search):
self.assertTrue(re.search(x, ctx_lines[i]))
class TestArgumentTypingError(unittest.TestCase):
"""
Test diagnostics of typing errors caused by argument inference failure.
"""
def test_unsupported_array_dtype(self):
# See issue #1943
cfunc = jit(nopython=True)(nop)
a = np.ones(3)
a = a.astype(a.dtype.newbyteorder())
with self.assertRaises(TypingError) as raises:
cfunc(1, a, a)
expected = textwrap.dedent("""\
This error may have been caused by the following argument(s):
- argument 1: Unsupported array dtype: {0}
- argument 2: Unsupported array dtype: {0}"""
).format(a.dtype)
self.assertIn(expected, str(raises.exception))
def test_unsupported_type(self):
cfunc = jit(nopython=True)(nop)
foo = Foo()
with self.assertRaises(TypingError) as raises:
cfunc(1, foo, 1)
expected=re.compile(("This error may have been caused by the following "
"argument\(s\):\\n- argument 1:.*cannot determine "
"Numba type of "
"<class \'numba.tests.test_typingerror.Foo\'>"))
self.assertTrue(expected.search(str(raises.exception)) is not None)
class TestCallError(unittest.TestCase):
def test_readonly_array(self):
@jit("(f8[:],)", nopython=True)
def inner(x):
return x
@jit(nopython=True)
def outer():
return inner(gvalues)
gvalues = np.ones(10, dtype=np.float64)
with self.assertRaises(TypingError) as raises:
outer()
got = str(raises.exception)
pat = r"Invalid use of.*readonly array\(float64, 1d, C\)"
self.assertIsNotNone(re.search(pat, got))
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "4d809fb3d0df64e1101a7260b743b93f",
"timestamp": "",
"source": "github",
"line_count": 241,
"max_line_length": 101,
"avg_line_length": 31.439834024896264,
"alnum_prop": 0.6018213013065857,
"repo_name": "sklam/numba",
"id": "ce3bcc6dddc9e3212226da8b35372f7edd89cdf3",
"size": "7577",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "numba/tests/test_typingerror.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "6783"
},
{
"name": "C",
"bytes": "638283"
},
{
"name": "C++",
"bytes": "52741"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "GDB",
"bytes": "101"
},
{
"name": "HTML",
"bytes": "3464"
},
{
"name": "Python",
"bytes": "7918676"
},
{
"name": "Shell",
"bytes": "7823"
}
],
"symlink_target": ""
} |
"""
Grafeas API
An API to insert and retrieve annotations on cloud artifacts. # noqa: E501
OpenAPI spec version: v1alpha1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import grafeas
from grafeas.models.build_signature_key_type import BuildSignatureKeyType # noqa: E501
from grafeas.rest import ApiException
class TestBuildSignatureKeyType(unittest.TestCase):
"""BuildSignatureKeyType unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testBuildSignatureKeyType(self):
"""Test BuildSignatureKeyType"""
# FIXME: construct object with mandatory attributes with example values
# model = grafeas.models.build_signature_key_type.BuildSignatureKeyType() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "fb5b807db1309b6581f7cc95d023d04a",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 95,
"avg_line_length": 23.973684210526315,
"alnum_prop": 0.6981339187705817,
"repo_name": "grafeas/client-python",
"id": "690f6d7908ead0be73fb9289b44bd196ee245cf0",
"size": "928",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_build_signature_key_type.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "558375"
},
{
"name": "Shell",
"bytes": "2063"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
# Copyright (c) 2010-2015 openpyxl
import pytest
from openpyxl.xml.functions import fromstring, tostring
from openpyxl.tests.helper import compare_xml
@pytest.fixture
def ChartsheetProperties():
from ..properties import ChartsheetProperties
return ChartsheetProperties
class TestChartsheetPr:
def test_read(self, ChartsheetProperties):
src = """
<sheetPr codeName="Chart1">
<tabColor rgb="FFDCD8F4" />
</sheetPr>
"""
xml = fromstring(src)
chartsheetPr = ChartsheetProperties.from_tree(xml)
assert chartsheetPr.codeName == "Chart1"
assert chartsheetPr.tabColor.rgb == "FFDCD8F4"
def test_write(self, ChartsheetProperties):
from openpyxl.styles import Color
chartsheetPr = ChartsheetProperties()
chartsheetPr.codeName = "Chart Openpyxl"
tabColor = Color(rgb="FFFFFFF4")
chartsheetPr.tabColor = tabColor
expected = """
<sheetPr codeName="Chart Openpyxl">
<tabColor rgb="FFFFFFF4" />
</sheetPr>
"""
xml = tostring(chartsheetPr.to_tree())
diff = compare_xml(xml, expected)
assert diff is None, diff
| {
"content_hash": "431b2609afe5ad4f4a6b54854ce1e6e6",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 58,
"avg_line_length": 28.790697674418606,
"alnum_prop": 0.6534733441033925,
"repo_name": "cgimenop/Excel2Testlink",
"id": "3ba3874ec87594f2cdd3d209c12db793dcf99f9f",
"size": "1238",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ExcelParser/lib/openpyxl/chartsheet/tests/test_properties.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "154"
},
{
"name": "Python",
"bytes": "1172250"
}
],
"symlink_target": ""
} |
import datetime
from typing import List, Dict, TYPE_CHECKING
from collections import deque
from gravity.models import TiltConfiguration, GravityLogPoint, GravitySensor
# from asgiref.sync import sync_to_async
class TiltHydrometer(object):
# These are all the UUIDs currently available as Tilt colors
tilt_colors = {
'Red': "a495bb10-c5b1-4b44-b512-1370f02d74de",
'Green': "a495bb20-c5b1-4b44-b512-1370f02d74de",
'Black': "a495bb30-c5b1-4b44-b512-1370f02d74de",
'Purple': "a495bb40-c5b1-4b44-b512-1370f02d74de",
'Orange': "a495bb50-c5b1-4b44-b512-1370f02d74de",
'Blue': "a495bb60-c5b1-4b44-b512-1370f02d74de",
'Yellow': "a495bb70-c5b1-4b44-b512-1370f02d74de",
'Pink': "a495bb80-c5b1-4b44-b512-1370f02d74de",
} # type: Dict[str, str]
# color_lookup is created at first use in color_lookup
color_lookup_table = {} # type: Dict[str, str]
color_lookup_table_no_dash = {} # type: Dict[str, str]
def __init__(self, color: str):
self.color = color # type: str
# The smoothing_window is set in the TiltConfiguration object - just defaulting it here for now
self.smoothing_window = 60 # type: int
self.gravity_list = deque(maxlen=self.smoothing_window) # type: deque[float]
self.temp_list = deque(maxlen=self.smoothing_window) # type: deque[int]
self.last_value_received = datetime.datetime.now() - self._cache_expiry_seconds() # type: datetime.datetime
self.last_saved_value = datetime.datetime.now() # type: datetime.datetime
self.gravity = 0.0 # type: float
self.raw_gravity = 0.0 # type: float
# Note - temp is always in fahrenheit
self.temp = 0 # type: int
self.raw_temp = 0 # type: int
self.rssi = 0 # type: int
# v3 and newer Tilts use the tx_pwr field to send the battery life
self.sends_battery = False # type: bool
self.weeks_on_battery = 0 # type: int
self.firmware_version = 0
# Tilt Pros are determined when we receive a gravity reading > 5000
self.tilt_pro = False # type: bool
self.obj = None # type: TiltConfiguration
# Let's load the object from Fermentrack as part of the initialization
self.load_obj_from_fermentrack()
if self.obj is not None:
self.temp_format = self.obj.sensor.temp_format
else:
self.temp_format = GravitySensor.TEMP_FAHRENHEIT # Defaulting to Fahrenheit as that's what the Tilt sends
def __str__(self):
return self.color
def _cache_expiry_seconds(self) -> datetime.timedelta:
# Assume we get 1 out of every 4 readings
return datetime.timedelta(seconds=(self.smoothing_window * 1.2 * 4))
def _cache_expired(self) -> bool:
if self.obj is not None:
# The other condition we want to explicitly clear the cache is if the temp format has changed between what
# was loaded from the sensor object & what we previously had cached when the object was loaded
if self.temp_format != self.obj.sensor.temp_format:
# Clear the cached temp/gravity values &
self.temp_format = self.obj.sensor.temp_format # Cache the new temp format
return True
return self.last_value_received <= datetime.datetime.now() - self._cache_expiry_seconds()
def _add_to_list(self, gravity, temp):
# This adds a gravity/temp value to the list for smoothing/averaging
if self._cache_expired():
# The cache expired (we lost contact with the Tilt for too long). Clear the lists.
self.gravity_list.clear()
self.temp_list.clear()
# Thankfully, deque enforces queue length, so all we need to do is add the value
self.last_value_received = datetime.datetime.now()
self.gravity_list.append(gravity)
self.temp_list.append(temp)
def should_save(self) -> bool:
if self.obj is None:
return False
return self.last_saved_value <= datetime.datetime.now() - datetime.timedelta(seconds=(self.obj.polling_frequency))
# def process_ibeacon_info(self, ibeacon_info: IBeaconAdvertisement, rssi):
# self.raw_gravity = ibeacon_info.minor / 1000
# if self.obj is None:
# # If there is no TiltConfiguration object set, just use the raw gravity the Tilt provided
# self.gravity = self.raw_gravity
# else:
# # Otherwise, apply the calibration
# self.gravity = self.obj.apply_gravity_calibration(self.raw_gravity)
#
# # Temps are always provided in degrees fahrenheit - Convert to Celsius if required
# # Note - convert_temp_to_sensor returns as a tuple (with units) - we only want the degrees not the units
# self.raw_temp, _ = self.obj.sensor.convert_temp_to_sensor_format(ibeacon_info.major,
# GravitySensor.TEMP_FAHRENHEIT)
# self.temp = self.raw_temp
# self.rssi = rssi
# self._add_to_list(self.gravity, self.temp)
def process_decoded_values(self, sensor_gravity: int, sensor_temp: int, rssi: int, tx_pwr: int):
if sensor_temp == 999:
# For the latest Tilts, this is now actually a special code indicating that the gravity is the version info.
# Regardless of whether or not we end up doing anything with that information, we definitely do not want to
# add it to the list
self.firmware_version = sensor_gravity
return
if sensor_gravity >= 5000:
# Tilt Pro support
self.tilt_pro = True
self.raw_gravity = sensor_gravity / 10000
usable_temp = sensor_temp / 10
else:
# Tilt "Classic" support
self.tilt_pro = False
self.raw_gravity = sensor_gravity / 1000
usable_temp = sensor_temp
# v3 Tilts send battery age in weeks using the tx_pwr field, but they have a hack in place to maintain
# compatibility with iPhones where they alternate sending "197" (unsigned) or "-59" (signed) with the actual
# number of weeks since the battery was changed. If we see the 197 (-59) then we'll set "sends_battery" to true
# and then update the weeks_on_battery the next time we see a beacon
if tx_pwr == 197:
self.sends_battery = True
elif self.sends_battery:
self.weeks_on_battery = tx_pwr
if self.obj is None:
# If there is no TiltConfiguration object set, just use the raw gravity the Tilt provided
self.gravity = self.raw_gravity
self.raw_temp = usable_temp
else:
# Otherwise, apply the calibration
self.gravity = self.obj.apply_gravity_calibration(self.raw_gravity)
# Temps are always provided in degrees fahrenheit - Convert to Celsius if required
# Note - convert_temp_to_sensor returns as a tuple (with units) - we only want the degrees not the units
self.raw_temp, _ = self.obj.sensor.convert_temp_to_sensor_format(usable_temp,
GravitySensor.TEMP_FAHRENHEIT)
self.temp = self.raw_temp
self.rssi = rssi
self._add_to_list(self.gravity, self.temp)
def smoothed_gravity(self):
# Return the average gravity in gravity_list
if len(self.gravity_list) <= 0:
return None
grav_total = 0
for grav in self.gravity_list:
grav_total += grav
return round(grav_total / len(self.gravity_list), 3) # Average it out & round
def smoothed_temp(self):
# Return the average temp in temp_list
if len(self.temp_list) <= 0:
return None
temp_total = 0
for temp in self.temp_list:
temp_total += temp
return round(temp_total / len(self.temp_list), 3) # Average it out & round
@classmethod
def color_lookup(cls, color):
if len(cls.color_lookup_table) <= 0:
cls.color_lookup_table = {cls.tilt_colors[x]: x for x in cls.tilt_colors}
if len(cls.color_lookup_table_no_dash) <= 0:
cls.color_lookup_table_no_dash = {cls.tilt_colors[x].replace("-",""): x for x in cls.tilt_colors}
if color in cls.color_lookup_table:
return cls.color_lookup_table[color]
elif color in cls.color_lookup_table_no_dash:
return cls.color_lookup_table_no_dash[color]
else:
return None
def print_data(self):
print("{} Tilt: {} ({}) / {} F".format(self.color, self.smoothed_gravity(), self.gravity, self.temp))
# @sync_to_async
def load_obj_from_fermentrack(self, obj: TiltConfiguration = None):
if obj is None:
# If we weren't handed the object itself, try to load it
try:
obj = TiltConfiguration.objects.get(color=self.color,
connection_type=TiltConfiguration.CONNECTION_BLUETOOTH)
except:
# TODO - Rewrite this slightly
self.obj = None
return False
# If the smoothing window changed, just recreate the deque objects
if obj.smoothing_window_vals != self.smoothing_window:
self.smoothing_window = obj.smoothing_window_vals
self.gravity_list = deque(maxlen=self.smoothing_window)
self.temp_list = deque(maxlen=self.smoothing_window)
self.obj = obj
# @sync_to_async
def save_value_to_fermentrack(self, verbose=False):
if self.obj is None:
# If we don't have a TiltConfiguration object loaded, we can't save the data point
if verbose:
print("{} Tilt: No object loaded for this color".format(self.color))
return False
if self._cache_expired():
if verbose:
print("{} Tilt: Cache is expired/No data available to save".format(self.color))
return False
if self.smoothed_gravity() is None or self.smoothed_temp() is None:
if verbose:
print("{} Tilt: No data available to save".format(self.color))
return False
# TODO - Test that temp_format actually works as intended here
new_point = GravityLogPoint(
gravity=self.smoothed_gravity(),
gravity_latest=self.gravity,
temp=self.smoothed_temp(),
temp_latest=self.temp,
temp_format=self.obj.sensor.temp_format,
temp_is_estimate=False,
associated_device=self.obj.sensor,
)
if self.obj.sensor.active_log is not None:
new_point.associated_log = self.obj.sensor.active_log
new_point.save()
# Also, set/save the RSSI/Raw Temp/Raw Gravity so we can load it for debugging
self.obj.rssi = self.rssi
self.obj.raw_gravity = self.raw_gravity
self.obj.raw_temp = self.raw_temp
self.obj.tilt_pro = self.tilt_pro
self.obj.sends_battery = self.sends_battery
self.obj.weeks_on_battery = self.weeks_on_battery
self.obj.firmware_version = self.firmware_version
self.obj.save_extras_to_redis()
self.last_saved_value = datetime.datetime.now()
if verbose:
print("{} Tilt: Logging {}".format(self.color, self.smoothed_gravity()))
else:
if verbose:
print("No data received.") | {
"content_hash": "72f524d771757da3a9e29c9131b65691",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 122,
"avg_line_length": 43.54275092936803,
"alnum_prop": 0.6108597285067874,
"repo_name": "thorrak/fermentrack",
"id": "7b2a2699920139ba4b25fece9cf6af8bf05189a1",
"size": "11713",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gravity/tilt/TiltHydrometer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6186"
},
{
"name": "CSS",
"bytes": "19929"
},
{
"name": "Dockerfile",
"bytes": "3095"
},
{
"name": "HTML",
"bytes": "341762"
},
{
"name": "JavaScript",
"bytes": "1728"
},
{
"name": "Python",
"bytes": "888851"
},
{
"name": "Shell",
"bytes": "20031"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from ..base import StdOutCommandLine
def test_StdOutCommandLine_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
out_file=dict(argstr='> %s',
genfile=True,
position=-1,
),
terminal_output=dict(deprecated='1.0.0',
nohash=True,
),
)
inputs = StdOutCommandLine.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
| {
"content_hash": "9c4f0e7691aab966baaf68661611a71a",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 66,
"avg_line_length": 24.40740740740741,
"alnum_prop": 0.629742033383915,
"repo_name": "mick-d/nipype",
"id": "ad49a04abb16c7977efeabb592add9a0dba4c534",
"size": "713",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nipype/interfaces/tests/test_auto_StdOutCommandLine.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9823"
},
{
"name": "KiCad",
"bytes": "3797"
},
{
"name": "Makefile",
"bytes": "1854"
},
{
"name": "Matlab",
"bytes": "1999"
},
{
"name": "Python",
"bytes": "4607773"
},
{
"name": "Shell",
"bytes": "380"
},
{
"name": "Tcl",
"bytes": "43408"
}
],
"symlink_target": ""
} |
import copy
import uuid
import ldap
import mock
from testtools import matchers
from keystone import assignment
from keystone.common import cache
from keystone.common import ldap as common_ldap
from keystone.common.ldap import core as common_ldap_core
from keystone.common import sql
from keystone import config
from keystone import exception
from keystone import identity
from keystone.identity.mapping_backends import mapping as map
from keystone import tests
from keystone.tests import default_fixtures
from keystone.tests import fakeldap
from keystone.tests import identity_mapping as mapping_sql
from keystone.tests.ksfixtures import database
from keystone.tests import test_backend
CONF = config.CONF
class BaseLDAPIdentity(test_backend.IdentityTests):
def setUp(self):
super(BaseLDAPIdentity, self).setUp()
self.clear_database()
common_ldap.register_handler('fake://', fakeldap.FakeLdap)
self.load_backends()
self.load_fixtures(default_fixtures)
self.addCleanup(common_ldap_core._HANDLERS.clear)
def _get_domain_fixture(self):
"""Domains in LDAP are read-only, so just return the static one."""
return self.assignment_api.get_domain(CONF.identity.default_domain_id)
def clear_database(self):
for shelf in fakeldap.FakeShelves:
fakeldap.FakeShelves[shelf].clear()
def reload_backends(self, domain_id):
# Only one backend unless we are using separate domain backends
self.load_backends()
def get_config(self, domain_id):
# Only one conf structure unless we are using separate domain backends
return CONF
def config_overrides(self):
super(BaseLDAPIdentity, self).config_overrides()
self.config_fixture.config(
group='identity',
driver='keystone.identity.backends.ldap.Identity')
def config_files(self):
config_files = super(BaseLDAPIdentity, self).config_files()
config_files.append(tests.dirs.tests_conf('backend_ldap.conf'))
return config_files
def get_user_enabled_vals(self, user):
user_dn = (
self.identity_api.driver.user._id_to_dn_string(user['id']))
enabled_attr_name = CONF.ldap.user_enabled_attribute
ldap_ = self.identity_api.driver.user.get_connection()
res = ldap_.search_s(user_dn,
ldap.SCOPE_BASE,
u'(sn=%s)' % user['name'])
if enabled_attr_name in res[0][1]:
return res[0][1][enabled_attr_name]
else:
return None
def test_build_tree(self):
"""Regression test for building the tree names
"""
user_api = identity.backends.ldap.UserApi(CONF)
self.assertTrue(user_api)
self.assertEqual("ou=Users,%s" % CONF.ldap.suffix, user_api.tree_dn)
def test_configurable_allowed_user_actions(self):
user = {'name': u'fäké1',
'password': u'fäképass1',
'domain_id': CONF.identity.default_domain_id,
'tenants': ['bar']}
user = self.identity_api.create_user(user)
self.identity_api.get_user(user['id'])
user['password'] = u'fäképass2'
self.identity_api.update_user(user['id'], user)
self.identity_api.delete_user(user['id'])
self.assertRaises(exception.UserNotFound,
self.identity_api.get_user,
user['id'])
def test_configurable_forbidden_user_actions(self):
conf = self.get_config(CONF.identity.default_domain_id)
conf.ldap.user_allow_create = False
conf.ldap.user_allow_update = False
conf.ldap.user_allow_delete = False
self.reload_backends(CONF.identity.default_domain_id)
user = {'name': u'fäké1',
'password': u'fäképass1',
'domain_id': CONF.identity.default_domain_id,
'tenants': ['bar']}
self.assertRaises(exception.ForbiddenAction,
self.identity_api.create_user,
user)
self.user_foo['password'] = u'fäképass2'
self.assertRaises(exception.ForbiddenAction,
self.identity_api.update_user,
self.user_foo['id'],
self.user_foo)
self.assertRaises(exception.ForbiddenAction,
self.identity_api.delete_user,
self.user_foo['id'])
def test_configurable_forbidden_create_existing_user(self):
conf = self.get_config(CONF.identity.default_domain_id)
conf.ldap.user_allow_create = False
self.reload_backends(CONF.identity.default_domain_id)
self.assertRaises(exception.ForbiddenAction,
self.identity_api.create_user,
self.user_foo)
def test_user_filter(self):
user_ref = self.identity_api.get_user(self.user_foo['id'])
self.user_foo.pop('password')
self.assertDictEqual(user_ref, self.user_foo)
conf = self.get_config(user_ref['domain_id'])
conf.ldap.user_filter = '(CN=DOES_NOT_MATCH)'
self.reload_backends(user_ref['domain_id'])
self.assertRaises(exception.UserNotFound,
self.identity_api.get_user,
self.user_foo['id'])
def test_remove_role_grant_from_user_and_project(self):
self.assignment_api.create_grant(user_id=self.user_foo['id'],
project_id=self.tenant_baz['id'],
role_id='member')
roles_ref = self.assignment_api.list_grants(
user_id=self.user_foo['id'],
project_id=self.tenant_baz['id'])
self.assertDictEqual(roles_ref[0], self.role_member)
self.assignment_api.delete_grant(user_id=self.user_foo['id'],
project_id=self.tenant_baz['id'],
role_id='member')
roles_ref = self.assignment_api.list_grants(
user_id=self.user_foo['id'],
project_id=self.tenant_baz['id'])
self.assertEqual(0, len(roles_ref))
self.assertRaises(exception.NotFound,
self.assignment_api.delete_grant,
user_id=self.user_foo['id'],
project_id=self.tenant_baz['id'],
role_id='member')
def test_get_and_remove_role_grant_by_group_and_project(self):
new_domain = self._get_domain_fixture()
new_group = {'domain_id': new_domain['id'],
'name': uuid.uuid4().hex}
new_group = self.identity_api.create_group(new_group)
new_user = {'name': 'new_user', 'enabled': True,
'domain_id': new_domain['id']}
new_user = self.identity_api.create_user(new_user)
self.identity_api.add_user_to_group(new_user['id'],
new_group['id'])
roles_ref = self.assignment_api.list_grants(
group_id=new_group['id'],
project_id=self.tenant_bar['id'])
self.assertEqual([], roles_ref)
self.assertEqual(0, len(roles_ref))
self.assignment_api.create_grant(group_id=new_group['id'],
project_id=self.tenant_bar['id'],
role_id='member')
roles_ref = self.assignment_api.list_grants(
group_id=new_group['id'],
project_id=self.tenant_bar['id'])
self.assertNotEmpty(roles_ref)
self.assertDictEqual(roles_ref[0], self.role_member)
self.assignment_api.delete_grant(group_id=new_group['id'],
project_id=self.tenant_bar['id'],
role_id='member')
roles_ref = self.assignment_api.list_grants(
group_id=new_group['id'],
project_id=self.tenant_bar['id'])
self.assertEqual(0, len(roles_ref))
self.assertRaises(exception.NotFound,
self.assignment_api.delete_grant,
group_id=new_group['id'],
project_id=self.tenant_bar['id'],
role_id='member')
def test_get_and_remove_role_grant_by_group_and_domain(self):
self.skipTest('N/A: LDAP does not support multiple domains')
def test_get_and_remove_role_grant_by_user_and_domain(self):
self.skipTest('N/A: LDAP does not support multiple domains')
def test_get_and_remove_correct_role_grant_from_a_mix(self):
self.skipTest('Blocked by bug 1101287')
def test_get_and_remove_role_grant_by_group_and_cross_domain(self):
self.skipTest('N/A: LDAP does not support multiple domains')
def test_get_and_remove_role_grant_by_user_and_cross_domain(self):
self.skipTest('N/A: LDAP does not support multiple domains')
def test_role_grant_by_group_and_cross_domain_project(self):
self.skipTest('N/A: LDAP does not support multiple domains')
def test_role_grant_by_user_and_cross_domain_project(self):
self.skipTest('N/A: LDAP does not support multiple domains')
def test_multi_role_grant_by_user_group_on_project_domain(self):
self.skipTest('N/A: LDAP does not support multiple domains')
def test_delete_role_with_user_and_group_grants(self):
self.skipTest('Blocked by bug 1101287')
def test_delete_user_with_group_project_domain_links(self):
self.skipTest('N/A: LDAP does not support multiple domains')
def test_delete_group_with_user_project_domain_links(self):
self.skipTest('N/A: LDAP does not support multiple domains')
def test_list_projects_for_user(self):
domain = self._get_domain_fixture()
user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
'domain_id': domain['id'], 'enabled': True}
user1 = self.identity_api.create_user(user1)
user_projects = self.assignment_api.list_projects_for_user(user1['id'])
self.assertThat(user_projects, matchers.HasLength(0))
# new grant(user1, role_member, tenant_bar)
self.assignment_api.create_grant(user_id=user1['id'],
project_id=self.tenant_bar['id'],
role_id=self.role_member['id'])
# new grant(user1, role_member, tenant_baz)
self.assignment_api.create_grant(user_id=user1['id'],
project_id=self.tenant_baz['id'],
role_id=self.role_member['id'])
user_projects = self.assignment_api.list_projects_for_user(user1['id'])
self.assertThat(user_projects, matchers.HasLength(2))
# Now, check number of projects through groups
user2 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
'domain_id': domain['id'], 'enabled': True}
user2 = self.identity_api.create_user(user2)
group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
group1 = self.identity_api.create_group(group1)
self.identity_api.add_user_to_group(user2['id'], group1['id'])
# new grant(group1(user2), role_member, tenant_bar)
self.assignment_api.create_grant(group_id=group1['id'],
project_id=self.tenant_bar['id'],
role_id=self.role_member['id'])
# new grant(group1(user2), role_member, tenant_baz)
self.assignment_api.create_grant(group_id=group1['id'],
project_id=self.tenant_baz['id'],
role_id=self.role_member['id'])
user_projects = self.assignment_api.list_projects_for_user(user2['id'])
self.assertThat(user_projects, matchers.HasLength(2))
# new grant(group1(user2), role_other, tenant_bar)
self.assignment_api.create_grant(group_id=group1['id'],
project_id=self.tenant_bar['id'],
role_id=self.role_other['id'])
user_projects = self.assignment_api.list_projects_for_user(user2['id'])
self.assertThat(user_projects, matchers.HasLength(2))
def test_list_projects_for_user_and_groups(self):
domain = self._get_domain_fixture()
# Create user1
user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
'domain_id': domain['id'], 'enabled': True}
user1 = self.identity_api.create_user(user1)
# Create new group for user1
group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
group1 = self.identity_api.create_group(group1)
# Add user1 to group1
self.identity_api.add_user_to_group(user1['id'], group1['id'])
# Now, add grant to user1 and group1 in tenant_bar
self.assignment_api.create_grant(user_id=user1['id'],
project_id=self.tenant_bar['id'],
role_id=self.role_member['id'])
self.assignment_api.create_grant(group_id=group1['id'],
project_id=self.tenant_bar['id'],
role_id=self.role_member['id'])
# The result is user1 has only one project granted
user_projects = self.assignment_api.list_projects_for_user(user1['id'])
self.assertThat(user_projects, matchers.HasLength(1))
# Now, delete user1 grant into tenant_bar and check
self.assignment_api.delete_grant(user_id=user1['id'],
project_id=self.tenant_bar['id'],
role_id=self.role_member['id'])
# The result is user1 has only one project granted.
# Granted through group1.
user_projects = self.assignment_api.list_projects_for_user(user1['id'])
self.assertThat(user_projects, matchers.HasLength(1))
def test_list_projects_for_user_with_grants(self):
domain = self._get_domain_fixture()
new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
'enabled': True, 'domain_id': domain['id']}
new_user = self.identity_api.create_user(new_user)
group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
group1 = self.identity_api.create_group(group1)
group2 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
group2 = self.identity_api.create_group(group2)
project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'domain_id': domain['id']}
self.assignment_api.create_project(project1['id'], project1)
project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'domain_id': domain['id']}
self.assignment_api.create_project(project2['id'], project2)
self.identity_api.add_user_to_group(new_user['id'],
group1['id'])
self.identity_api.add_user_to_group(new_user['id'],
group2['id'])
self.assignment_api.create_grant(user_id=new_user['id'],
project_id=self.tenant_bar['id'],
role_id=self.role_member['id'])
self.assignment_api.create_grant(user_id=new_user['id'],
project_id=project1['id'],
role_id=self.role_admin['id'])
self.assignment_api.create_grant(group_id=group2['id'],
project_id=project2['id'],
role_id=self.role_admin['id'])
user_projects = self.assignment_api.list_projects_for_user(
new_user['id'])
self.assertEqual(3, len(user_projects))
def test_create_duplicate_user_name_in_different_domains(self):
self.skipTest('Domains are read-only against LDAP')
def test_create_duplicate_project_name_in_different_domains(self):
self.skipTest('Domains are read-only against LDAP')
def test_create_duplicate_group_name_in_different_domains(self):
self.skipTest(
'N/A: LDAP does not support multiple domains')
def test_move_user_between_domains(self):
self.skipTest('Domains are read-only against LDAP')
def test_move_user_between_domains_with_clashing_names_fails(self):
self.skipTest('Domains are read-only against LDAP')
def test_move_group_between_domains(self):
self.skipTest(
'N/A: LDAP does not support multiple domains')
def test_move_group_between_domains_with_clashing_names_fails(self):
self.skipTest('Domains are read-only against LDAP')
def test_move_project_between_domains(self):
self.skipTest('Domains are read-only against LDAP')
def test_move_project_between_domains_with_clashing_names_fails(self):
self.skipTest('Domains are read-only against LDAP')
def test_get_roles_for_user_and_domain(self):
self.skipTest('N/A: LDAP does not support multiple domains')
def test_get_roles_for_groups_on_domain(self):
self.skipTest('Blocked by bug: 1390125')
def test_get_roles_for_groups_on_project(self):
self.skipTest('Blocked by bug: 1390125')
def test_list_domains_for_groups(self):
self.skipTest('N/A: LDAP does not support multiple domains')
def test_list_projects_for_groups(self):
self.skipTest('Blocked by bug: 1390125')
def test_list_role_assignments_unfiltered(self):
new_domain = self._get_domain_fixture()
new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
'enabled': True, 'domain_id': new_domain['id']}
new_user = self.identity_api.create_user(new_user)
new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
new_group = self.identity_api.create_group(new_group)
new_project = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': new_domain['id']}
self.assignment_api.create_project(new_project['id'], new_project)
# First check how many role grant already exist
existing_assignments = len(self.assignment_api.list_role_assignments())
self.assignment_api.create_grant(user_id=new_user['id'],
project_id=new_project['id'],
role_id='other')
self.assignment_api.create_grant(group_id=new_group['id'],
project_id=new_project['id'],
role_id='admin')
# Read back the list of assignments - check it is gone up by 2
after_assignments = len(self.assignment_api.list_role_assignments())
self.assertEqual(existing_assignments + 2, after_assignments)
def test_list_role_assignments_dumb_member(self):
self.config_fixture.config(group='ldap', use_dumb_member=True)
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
new_domain = self._get_domain_fixture()
new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
'enabled': True, 'domain_id': new_domain['id']}
new_user = self.identity_api.create_user(new_user)
new_project = {'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': new_domain['id']}
self.assignment_api.create_project(new_project['id'], new_project)
self.assignment_api.create_grant(user_id=new_user['id'],
project_id=new_project['id'],
role_id='other')
# Read back the list of assignments and ensure
# that the LDAP dumb member isn't listed.
assignment_ids = [a['user_id'] for a in
self.assignment_api.list_role_assignments()]
dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
self.assertNotIn(dumb_id, assignment_ids)
def test_list_user_ids_for_project_dumb_member(self):
self.config_fixture.config(group='ldap', use_dumb_member=True)
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex,
'enabled': True, 'domain_id': test_backend.DEFAULT_DOMAIN_ID}
user = self.identity_api.create_user(user)
self.assignment_api.add_user_to_project(self.tenant_baz['id'],
user['id'])
user_ids = self.assignment_api.list_user_ids_for_project(
self.tenant_baz['id'])
self.assertIn(user['id'], user_ids)
dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
self.assertNotIn(dumb_id, user_ids)
def test_multi_group_grants_on_project_domain(self):
self.skipTest('Blocked by bug 1101287')
def test_list_group_members_missing_entry(self):
"""List group members with deleted user.
If a group has a deleted entry for a member, the non-deleted members
are returned.
"""
# Create a group
group = dict(name=uuid.uuid4().hex,
domain_id=CONF.identity.default_domain_id)
group_id = self.identity_api.create_group(group)['id']
# Create a couple of users and add them to the group.
user = dict(name=uuid.uuid4().hex,
domain_id=CONF.identity.default_domain_id)
user_1_id = self.identity_api.create_user(user)['id']
self.identity_api.add_user_to_group(user_1_id, group_id)
user = dict(name=uuid.uuid4().hex,
domain_id=CONF.identity.default_domain_id)
user_2_id = self.identity_api.create_user(user)['id']
self.identity_api.add_user_to_group(user_2_id, group_id)
# Delete user 2
# NOTE(blk-u): need to go directly to user interface to keep from
# updating the group.
unused, driver, entity_id = (
self.identity_api._get_domain_driver_and_entity_id(user_2_id))
driver.user.delete(entity_id)
# List group users and verify only user 1.
res = self.identity_api.list_users_in_group(group_id)
self.assertEqual(1, len(res), "Expected 1 entry (user_1)")
self.assertEqual(user_1_id, res[0]['id'], "Expected user 1 id")
def test_list_group_members_when_no_members(self):
# List group members when there is no member in the group.
# No exception should be raised.
group = {
'domain_id': CONF.identity.default_domain_id,
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex}
group = self.identity_api.create_group(group)
# If this doesn't raise, then the test is successful.
self.identity_api.list_users_in_group(group['id'])
def test_list_group_members_dumb_member(self):
self.config_fixture.config(group='ldap', use_dumb_member=True)
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
# Create a group
group = dict(name=uuid.uuid4().hex,
domain_id=CONF.identity.default_domain_id)
group_id = self.identity_api.create_group(group)['id']
# Create a user
user = dict(name=uuid.uuid4().hex,
domain_id=CONF.identity.default_domain_id)
user_id = self.identity_api.create_user(user)['id']
# Add user to the group
self.identity_api.add_user_to_group(user_id, group_id)
user_ids = self.identity_api.list_users_in_group(group_id)
dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
self.assertNotIn(dumb_id, user_ids)
def test_list_domains(self):
domains = self.assignment_api.list_domains()
self.assertEqual(
[assignment.calc_default_domain()],
domains)
def test_list_domains_non_default_domain_id(self):
# If change the default_domain_id, the ID of the default domain
# returned by list_domains changes is the new default_domain_id.
new_domain_id = uuid.uuid4().hex
self.config_fixture.config(group='identity',
default_domain_id=new_domain_id)
domains = self.assignment_api.list_domains()
self.assertEqual(new_domain_id, domains[0]['id'])
def test_authenticate_requires_simple_bind(self):
user = {
'name': 'NO_META',
'domain_id': test_backend.DEFAULT_DOMAIN_ID,
'password': 'no_meta2',
'enabled': True,
}
user = self.identity_api.create_user(user)
self.assignment_api.add_user_to_project(self.tenant_baz['id'],
user['id'])
driver = self.identity_api._select_identity_driver(
user['domain_id'])
driver.user.LDAP_USER = None
driver.user.LDAP_PASSWORD = None
self.assertRaises(AssertionError,
self.identity_api.authenticate,
context={},
user_id=user['id'],
password=None)
# (spzala)The group and domain crud tests below override the standard ones
# in test_backend.py so that we can exclude the update name test, since we
# do not yet support the update of either group or domain names with LDAP.
# In the tests below, the update is demonstrated by updating description.
# Refer to bug 1136403 for more detail.
def test_group_crud(self):
group = {
'domain_id': CONF.identity.default_domain_id,
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex}
group = self.identity_api.create_group(group)
group_ref = self.identity_api.get_group(group['id'])
self.assertDictEqual(group_ref, group)
group['description'] = uuid.uuid4().hex
self.identity_api.update_group(group['id'], group)
group_ref = self.identity_api.get_group(group['id'])
self.assertDictEqual(group_ref, group)
self.identity_api.delete_group(group['id'])
self.assertRaises(exception.GroupNotFound,
self.identity_api.get_group,
group['id'])
def test_create_user_none_mapping(self):
# When create a user where an attribute maps to None, the entry is
# created without that attribute and it doesn't fail with a TypeError.
conf = self.get_config(CONF.identity.default_domain_id)
conf.ldap.user_attribute_ignore = ['enabled', 'email',
'tenants', 'tenantId']
self.reload_backends(CONF.identity.default_domain_id)
user = {'name': u'fäké1',
'password': u'fäképass1',
'domain_id': CONF.identity.default_domain_id,
'default_project_id': 'maps_to_none',
}
# If this doesn't raise, then the test is successful.
user = self.identity_api.create_user(user)
def test_unignored_user_none_mapping(self):
# Ensure that an attribute that maps to None that is not explicitly
# ignored in configuration is implicitly ignored without triggering
# an error.
conf = self.get_config(CONF.identity.default_domain_id)
conf.ldap.user_attribute_ignore = ['enabled', 'email',
'tenants', 'tenantId']
self.reload_backends(CONF.identity.default_domain_id)
user = {'name': u'fäké1',
'password': u'fäképass1',
'domain_id': CONF.identity.default_domain_id,
}
user_ref = self.identity_api.create_user(user)
# If this doesn't raise, then the test is successful.
self.identity_api.get_user(user_ref['id'])
def test_update_user_name(self):
"""A user's name cannot be changed through the LDAP driver."""
self.assertRaises(exception.Conflict,
super(BaseLDAPIdentity, self).test_update_user_name)
def test_arbitrary_attributes_are_returned_from_get_user(self):
self.skipTest("Using arbitrary attributes doesn't work under LDAP")
def test_new_arbitrary_attributes_are_returned_from_update_user(self):
self.skipTest("Using arbitrary attributes doesn't work under LDAP")
def test_updated_arbitrary_attributes_are_returned_from_update_user(self):
self.skipTest("Using arbitrary attributes doesn't work under LDAP")
def test_cache_layer_domain_crud(self):
# TODO(morganfainberg): This also needs to be removed when full LDAP
# implementation is submitted. No need to duplicate the above test,
# just skip this time.
self.skipTest('Domains are read-only against LDAP')
def test_user_id_comma(self):
"""Even if the user has a , in their ID, groups can be listed."""
# Create a user with a , in their ID
# NOTE(blk-u): the DN for this user is hard-coded in fakeldap!
# Since we want to fake up this special ID, we'll squirt this
# direct into the driver and bypass the manager layer.
user_id = u'Doe, John'
user = {
'id': user_id,
'name': self.getUniqueString(),
'password': self.getUniqueString(),
'domain_id': CONF.identity.default_domain_id,
}
user = self.identity_api.driver.create_user(user_id, user)
# Now we'll use the manager to discover it, which will create a
# Public ID for it.
ref_list = self.identity_api.list_users()
public_user_id = None
for ref in ref_list:
if ref['name'] == user['name']:
public_user_id = ref['id']
break
# Create a group
group_id = uuid.uuid4().hex
group = {
'id': group_id,
'name': self.getUniqueString(prefix='tuidc'),
'description': self.getUniqueString(),
'domain_id': CONF.identity.default_domain_id,
}
group = self.identity_api.driver.create_group(group_id, group)
# Now we'll use the manager to discover it, which will create a
# Public ID for it.
ref_list = self.identity_api.list_groups()
public_group_id = None
for ref in ref_list:
if ref['name'] == group['name']:
public_group_id = ref['id']
break
# Put the user in the group
self.identity_api.add_user_to_group(public_user_id, public_group_id)
# List groups for user.
ref_list = self.identity_api.list_groups_for_user(public_user_id)
group['id'] = public_group_id
self.assertThat(ref_list, matchers.Equals([group]))
def test_user_id_comma_grants(self):
"""Even if the user has a , in their ID, can get user and group grants.
"""
# Create a user with a , in their ID
# NOTE(blk-u): the DN for this user is hard-coded in fakeldap!
# Since we want to fake up this special ID, we'll squirt this
# direct into the driver and bypass the manager layer
user_id = u'Doe, John'
user = {
'id': user_id,
'name': self.getUniqueString(),
'password': self.getUniqueString(),
'domain_id': CONF.identity.default_domain_id,
}
self.identity_api.driver.create_user(user_id, user)
# Now we'll use the manager to discover it, which will create a
# Public ID for it.
ref_list = self.identity_api.list_users()
public_user_id = None
for ref in ref_list:
if ref['name'] == user['name']:
public_user_id = ref['id']
break
# Grant the user a role on a project.
role_id = 'member'
project_id = self.tenant_baz['id']
self.assignment_api.create_grant(role_id, user_id=public_user_id,
project_id=project_id)
role_ref = self.assignment_api.get_grant(role_id,
user_id=public_user_id,
project_id=project_id)
self.assertEqual(role_id, role_ref['id'])
def test_user_enabled_ignored_disable_error(self):
# When the server is configured so that the enabled attribute is
# ignored for users, users cannot be disabled.
self.config_fixture.config(group='ldap',
user_attribute_ignore=['enabled'])
# Need to re-load backends for the config change to take effect.
self.load_backends()
# Attempt to disable the user.
self.assertRaises(exception.ForbiddenAction,
self.identity_api.update_user, self.user_foo['id'],
{'enabled': False})
user_info = self.identity_api.get_user(self.user_foo['id'])
# If 'enabled' is ignored then 'enabled' isn't returned as part of the
# ref.
self.assertNotIn('enabled', user_info)
def test_group_enabled_ignored_disable_error(self):
# When the server is configured so that the enabled attribute is
# ignored for groups, groups cannot be disabled.
self.config_fixture.config(group='ldap',
group_attribute_ignore=['enabled'])
# Need to re-load backends for the config change to take effect.
self.load_backends()
# There's no group fixture so create a group.
new_domain = self._get_domain_fixture()
new_group = {'domain_id': new_domain['id'],
'name': uuid.uuid4().hex}
new_group = self.identity_api.create_group(new_group)
# Attempt to disable the group.
self.assertRaises(exception.ForbiddenAction,
self.identity_api.update_group, new_group['id'],
{'enabled': False})
group_info = self.identity_api.get_group(new_group['id'])
# If 'enabled' is ignored then 'enabled' isn't returned as part of the
# ref.
self.assertNotIn('enabled', group_info)
def test_project_enabled_ignored_disable_error(self):
# When the server is configured so that the enabled attribute is
# ignored for projects, projects cannot be disabled.
self.config_fixture.config(group='ldap',
project_attribute_ignore=['enabled'])
# Need to re-load backends for the config change to take effect.
self.load_backends()
# Attempt to disable the project.
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.update_project,
self.tenant_baz['id'], {'enabled': False})
project_info = self.assignment_api.get_project(self.tenant_baz['id'])
# Unlike other entities, if 'enabled' is ignored then 'enabled' is
# returned as part of the ref.
self.assertIs(True, project_info['enabled'])
class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
def setUp(self):
# NOTE(dstanek): The database must be setup prior to calling the
# parent's setUp. The parent's setUp uses services (like
# credentials) that require a database.
self.useFixture(database.Database())
super(LDAPIdentity, self).setUp()
def test_configurable_allowed_project_actions(self):
tenant = {'id': u'fäké1', 'name': u'fäké1', 'enabled': True}
self.assignment_api.create_project(u'fäké1', tenant)
tenant_ref = self.assignment_api.get_project(u'fäké1')
self.assertEqual(u'fäké1', tenant_ref['id'])
tenant['enabled'] = False
self.assignment_api.update_project(u'fäké1', tenant)
self.assignment_api.delete_project(u'fäké1')
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project,
u'fäké1')
def test_configurable_subtree_delete(self):
self.config_fixture.config(group='ldap', allow_subtree_delete=True)
self.load_backends()
project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id}
self.assignment_api.create_project(project1['id'], project1)
role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.assignment_api.create_role(role1['id'], role1)
user1 = {'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id,
'password': uuid.uuid4().hex,
'enabled': True}
user1 = self.identity_api.create_user(user1)
self.assignment_api.add_role_to_user_and_project(
user_id=user1['id'],
tenant_id=project1['id'],
role_id=role1['id'])
self.assignment_api.delete_project(project1['id'])
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project,
project1['id'])
self.assignment_api.create_project(project1['id'], project1)
list = self.assignment_api.get_roles_for_user_and_project(
user1['id'],
project1['id'])
self.assertEqual(0, len(list))
def test_configurable_forbidden_project_actions(self):
self.config_fixture.config(
group='ldap', project_allow_create=False,
project_allow_update=False, project_allow_delete=False)
self.load_backends()
tenant = {'id': u'fäké1', 'name': u'fäké1'}
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.create_project,
u'fäké1',
tenant)
self.tenant_bar['enabled'] = False
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.update_project,
self.tenant_bar['id'],
self.tenant_bar)
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.delete_project,
self.tenant_bar['id'])
def test_configurable_allowed_role_actions(self):
role = {'id': u'fäké1', 'name': u'fäké1'}
self.assignment_api.create_role(u'fäké1', role)
role_ref = self.assignment_api.get_role(u'fäké1')
self.assertEqual(u'fäké1', role_ref['id'])
role['name'] = u'fäké2'
self.assignment_api.update_role(u'fäké1', role)
self.assignment_api.delete_role(u'fäké1')
self.assertRaises(exception.RoleNotFound,
self.assignment_api.get_role,
u'fäké1')
def test_configurable_forbidden_role_actions(self):
self.config_fixture.config(
group='ldap', role_allow_create=False, role_allow_update=False,
role_allow_delete=False)
self.load_backends()
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.create_role,
role['id'],
role)
self.role_member['name'] = uuid.uuid4().hex
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.update_role,
self.role_member['id'],
self.role_member)
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.delete_role,
self.role_member['id'])
def test_project_filter(self):
tenant_ref = self.assignment_api.get_project(self.tenant_bar['id'])
self.assertDictEqual(tenant_ref, self.tenant_bar)
self.config_fixture.config(group='ldap',
project_filter='(CN=DOES_NOT_MATCH)')
self.load_backends()
# NOTE(morganfainberg): CONF.ldap.project_filter will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_role.invalidate(self.assignment_api,
self.role_member['id'])
self.assignment_api.get_role(self.role_member['id'])
self.assignment_api.get_project.invalidate(self.assignment_api,
self.tenant_bar['id'])
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project,
self.tenant_bar['id'])
def test_role_filter(self):
role_ref = self.assignment_api.get_role(self.role_member['id'])
self.assertDictEqual(role_ref, self.role_member)
self.config_fixture.config(group='ldap',
role_filter='(CN=DOES_NOT_MATCH)')
self.load_backends()
# NOTE(morganfainberg): CONF.ldap.role_filter will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_role.invalidate(self.assignment_api,
self.role_member['id'])
self.assertRaises(exception.RoleNotFound,
self.assignment_api.get_role,
self.role_member['id'])
def test_dumb_member(self):
self.config_fixture.config(group='ldap', use_dumb_member=True)
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
self.assertRaises(exception.UserNotFound,
self.identity_api.get_user,
dumb_id)
def test_project_attribute_mapping(self):
self.config_fixture.config(
group='ldap', project_name_attribute='ou',
project_desc_attribute='description',
project_enabled_attribute='enabled')
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
# NOTE(morganfainberg): CONF.ldap.project_name_attribute,
# CONF.ldap.project_desc_attribute, and
# CONF.ldap.project_enabled_attribute will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_project.invalidate(self.assignment_api,
self.tenant_baz['id'])
tenant_ref = self.assignment_api.get_project(self.tenant_baz['id'])
self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
self.assertEqual(self.tenant_baz['name'], tenant_ref['name'])
self.assertEqual(
self.tenant_baz['description'],
tenant_ref['description'])
self.assertEqual(self.tenant_baz['enabled'], tenant_ref['enabled'])
self.config_fixture.config(group='ldap',
project_name_attribute='description',
project_desc_attribute='ou')
self.load_backends()
# NOTE(morganfainberg): CONF.ldap.project_name_attribute,
# CONF.ldap.project_desc_attribute, and
# CONF.ldap.project_enabled_attribute will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_project.invalidate(self.assignment_api,
self.tenant_baz['id'])
tenant_ref = self.assignment_api.get_project(self.tenant_baz['id'])
self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
self.assertEqual(self.tenant_baz['description'], tenant_ref['name'])
self.assertEqual(self.tenant_baz['name'], tenant_ref['description'])
self.assertEqual(self.tenant_baz['enabled'], tenant_ref['enabled'])
def test_project_attribute_ignore(self):
self.config_fixture.config(
group='ldap',
project_attribute_ignore=['name', 'description', 'enabled'])
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
# NOTE(morganfainberg): CONF.ldap.project_attribute_ignore will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change configs values in tests
# that could affect what the drivers would return up to the manager.
# This solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_project.invalidate(self.assignment_api,
self.tenant_baz['id'])
tenant_ref = self.assignment_api.get_project(self.tenant_baz['id'])
self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
self.assertNotIn('name', tenant_ref)
self.assertNotIn('description', tenant_ref)
self.assertNotIn('enabled', tenant_ref)
def test_role_attribute_mapping(self):
self.config_fixture.config(group='ldap', role_name_attribute='ou')
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
# NOTE(morganfainberg): CONF.ldap.role_name_attribute will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_role.invalidate(self.assignment_api,
self.role_member['id'])
role_ref = self.assignment_api.get_role(self.role_member['id'])
self.assertEqual(self.role_member['id'], role_ref['id'])
self.assertEqual(self.role_member['name'], role_ref['name'])
self.config_fixture.config(group='ldap', role_name_attribute='sn')
self.load_backends()
# NOTE(morganfainberg): CONF.ldap.role_name_attribute will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_role.invalidate(self.assignment_api,
self.role_member['id'])
role_ref = self.assignment_api.get_role(self.role_member['id'])
self.assertEqual(self.role_member['id'], role_ref['id'])
self.assertNotIn('name', role_ref)
def test_role_attribute_ignore(self):
self.config_fixture.config(group='ldap',
role_attribute_ignore=['name'])
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
# NOTE(morganfainberg): CONF.ldap.role_attribute_ignore will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_role.invalidate(self.assignment_api,
self.role_member['id'])
role_ref = self.assignment_api.get_role(self.role_member['id'])
self.assertEqual(self.role_member['id'], role_ref['id'])
self.assertNotIn('name', role_ref)
def test_user_enable_attribute_mask(self):
self.config_fixture.config(group='ldap', user_enabled_mask=2,
user_enabled_default='512')
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
user = {'name': u'fäké1', 'enabled': True,
'domain_id': CONF.identity.default_domain_id}
user_ref = self.identity_api.create_user(user)
# Use assertIs rather than assertTrue because assertIs will assert the
# value is a Boolean as expected.
self.assertIs(user_ref['enabled'], True)
self.assertNotIn('enabled_nomask', user_ref)
enabled_vals = self.get_user_enabled_vals(user_ref)
self.assertEqual([512], enabled_vals)
user_ref = self.identity_api.get_user(user_ref['id'])
self.assertIs(user_ref['enabled'], True)
self.assertNotIn('enabled_nomask', user_ref)
user['enabled'] = False
user_ref = self.identity_api.update_user(user_ref['id'], user)
self.assertIs(user_ref['enabled'], False)
self.assertNotIn('enabled_nomask', user_ref)
enabled_vals = self.get_user_enabled_vals(user_ref)
self.assertEqual([514], enabled_vals)
user_ref = self.identity_api.get_user(user_ref['id'])
self.assertIs(user_ref['enabled'], False)
self.assertNotIn('enabled_nomask', user_ref)
user['enabled'] = True
user_ref = self.identity_api.update_user(user_ref['id'], user)
self.assertIs(user_ref['enabled'], True)
self.assertNotIn('enabled_nomask', user_ref)
enabled_vals = self.get_user_enabled_vals(user_ref)
self.assertEqual([512], enabled_vals)
user_ref = self.identity_api.get_user(user_ref['id'])
self.assertIs(user_ref['enabled'], True)
self.assertNotIn('enabled_nomask', user_ref)
def test_user_enabled_invert(self):
self.config_fixture.config(group='ldap', user_enabled_invert=True,
user_enabled_default=False)
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
user1 = {'name': u'fäké1', 'enabled': True,
'domain_id': CONF.identity.default_domain_id}
user2 = {'name': u'fäké2', 'enabled': False,
'domain_id': CONF.identity.default_domain_id}
user3 = {'name': u'fäké3',
'domain_id': CONF.identity.default_domain_id}
# Ensure that the LDAP attribute is False for a newly created
# enabled user.
user_ref = self.identity_api.create_user(user1)
self.assertIs(True, user_ref['enabled'])
enabled_vals = self.get_user_enabled_vals(user_ref)
self.assertEqual([False], enabled_vals)
user_ref = self.identity_api.get_user(user_ref['id'])
self.assertIs(True, user_ref['enabled'])
# Ensure that the LDAP attribute is True for a disabled user.
user1['enabled'] = False
user_ref = self.identity_api.update_user(user_ref['id'], user1)
self.assertIs(False, user_ref['enabled'])
enabled_vals = self.get_user_enabled_vals(user_ref)
self.assertEqual([True], enabled_vals)
# Enable the user and ensure that the LDAP attribute is True again.
user1['enabled'] = True
user_ref = self.identity_api.update_user(user_ref['id'], user1)
self.assertIs(True, user_ref['enabled'])
enabled_vals = self.get_user_enabled_vals(user_ref)
self.assertEqual([False], enabled_vals)
# Ensure that the LDAP attribute is True for a newly created
# disabled user.
user_ref = self.identity_api.create_user(user2)
self.assertIs(False, user_ref['enabled'])
enabled_vals = self.get_user_enabled_vals(user_ref)
self.assertEqual([True], enabled_vals)
user_ref = self.identity_api.get_user(user_ref['id'])
self.assertIs(False, user_ref['enabled'])
# Ensure that the LDAP attribute is inverted for a newly created
# user when the user_enabled_default setting is used.
user_ref = self.identity_api.create_user(user3)
self.assertIs(True, user_ref['enabled'])
enabled_vals = self.get_user_enabled_vals(user_ref)
self.assertEqual([False], enabled_vals)
user_ref = self.identity_api.get_user(user_ref['id'])
self.assertIs(True, user_ref['enabled'])
@mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
def test_user_enabled_invert_no_enabled_value(self, mock_ldap_get):
self.config_fixture.config(group='ldap', user_enabled_invert=True,
user_enabled_default=False)
# Mock the search results to return an entry with
# no enabled value.
mock_ldap_get.return_value = (
'cn=junk,dc=example,dc=com',
{
'sn': [uuid.uuid4().hex],
'email': [uuid.uuid4().hex],
'cn': ['junk']
}
)
user_api = identity.backends.ldap.UserApi(CONF)
user_ref = user_api.get('junk')
# Ensure that the model enabled attribute is inverted
# from the resource default.
self.assertIs(not CONF.ldap.user_enabled_default, user_ref['enabled'])
@mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
def test_user_enabled_invert_default_str_value(self, mock_ldap_get):
self.config_fixture.config(group='ldap', user_enabled_invert=True,
user_enabled_default='False')
# Mock the search results to return an entry with
# no enabled value.
mock_ldap_get.return_value = (
'cn=junk,dc=example,dc=com',
{
'sn': [uuid.uuid4().hex],
'email': [uuid.uuid4().hex],
'cn': ['junk']
}
)
user_api = identity.backends.ldap.UserApi(CONF)
user_ref = user_api.get('junk')
# Ensure that the model enabled attribute is inverted
# from the resource default.
self.assertIs(True, user_ref['enabled'])
@mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'simple_bind_s')
def test_user_api_get_connection_no_user_password(self, mocked_method):
"""Don't bind in case the user and password are blank."""
# Ensure the username/password are in-fact blank
self.config_fixture.config(group='ldap', user=None, password=None)
user_api = identity.backends.ldap.UserApi(CONF)
user_api.get_connection(user=None, password=None)
self.assertFalse(mocked_method.called,
msg='`simple_bind_s` method was unexpectedly called')
@mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect')
def test_chase_referrals_off(self, mocked_fakeldap):
self.config_fixture.config(
group='ldap',
url='fake://memory',
chase_referrals=False)
user_api = identity.backends.ldap.UserApi(CONF)
user_api.get_connection(user=None, password=None)
# The last call_arg should be a dictionary and should contain
# chase_referrals. Check to make sure the value of chase_referrals
# is as expected.
self.assertFalse(mocked_fakeldap.call_args[-1]['chase_referrals'])
@mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect')
def test_chase_referrals_on(self, mocked_fakeldap):
self.config_fixture.config(
group='ldap',
url='fake://memory',
chase_referrals=True)
user_api = identity.backends.ldap.UserApi(CONF)
user_api.get_connection(user=None, password=None)
# The last call_arg should be a dictionary and should contain
# chase_referrals. Check to make sure the value of chase_referrals
# is as expected.
self.assertTrue(mocked_fakeldap.call_args[-1]['chase_referrals'])
@mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect')
def test_debug_level_set(self, mocked_fakeldap):
level = 12345
self.config_fixture.config(
group='ldap',
url='fake://memory',
debug_level=level)
user_api = identity.backends.ldap.UserApi(CONF)
user_api.get_connection(user=None, password=None)
# The last call_arg should be a dictionary and should contain
# debug_level. Check to make sure the value of debug_level
# is as expected.
self.assertEqual(level, mocked_fakeldap.call_args[-1]['debug_level'])
def test_wrong_ldap_scope(self):
self.config_fixture.config(group='ldap', query_scope=uuid.uuid4().hex)
self.assertRaisesRegexp(
ValueError,
'Invalid LDAP scope: %s. *' % CONF.ldap.query_scope,
identity.backends.ldap.Identity)
def test_wrong_alias_dereferencing(self):
self.config_fixture.config(group='ldap',
alias_dereferencing=uuid.uuid4().hex)
self.assertRaisesRegexp(
ValueError,
'Invalid LDAP deref option: %s\.' % CONF.ldap.alias_dereferencing,
identity.backends.ldap.Identity)
def test_is_dumb_member(self):
self.config_fixture.config(group='ldap',
use_dumb_member=True)
self.load_backends()
dn = 'cn=dumb,dc=nonexistent'
self.assertTrue(self.identity_api.driver.user._is_dumb_member(dn))
def test_is_dumb_member_upper_case_keys(self):
self.config_fixture.config(group='ldap',
use_dumb_member=True)
self.load_backends()
dn = 'CN=dumb,DC=nonexistent'
self.assertTrue(self.identity_api.driver.user._is_dumb_member(dn))
def test_is_dumb_member_with_false_use_dumb_member(self):
self.config_fixture.config(group='ldap',
use_dumb_member=False)
self.load_backends()
dn = 'cn=dumb,dc=nonexistent'
self.assertFalse(self.identity_api.driver.user._is_dumb_member(dn))
def test_is_dumb_member_not_dumb(self):
self.config_fixture.config(group='ldap',
use_dumb_member=True)
self.load_backends()
dn = 'ou=some,dc=example.com'
self.assertFalse(self.identity_api.driver.user._is_dumb_member(dn))
def test_user_extra_attribute_mapping(self):
self.config_fixture.config(
group='ldap',
user_additional_attribute_mapping=['description:name'])
self.load_backends()
user = {
'name': 'EXTRA_ATTRIBUTES',
'password': 'extra',
'domain_id': CONF.identity.default_domain_id
}
user = self.identity_api.create_user(user)
dn, attrs = self.identity_api.driver.user._ldap_get(user['id'])
self.assertThat([user['name']], matchers.Equals(attrs['description']))
def test_user_extra_attribute_mapping_description_is_returned(self):
# Given a mapping like description:description, the description is
# returned.
self.config_fixture.config(
group='ldap',
user_additional_attribute_mapping=['description:description'])
self.load_backends()
description = uuid.uuid4().hex
user = {
'name': uuid.uuid4().hex,
'description': description,
'password': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id
}
user = self.identity_api.create_user(user)
res = self.identity_api.driver.user.get_all()
new_user = [u for u in res if u['id'] == user['id']][0]
self.assertThat(new_user['description'], matchers.Equals(description))
@mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
def test_user_mixed_case_attribute(self, mock_ldap_get):
# Mock the search results to return attribute names
# with unexpected case.
mock_ldap_get.return_value = (
'cn=junk,dc=example,dc=com',
{
'sN': [uuid.uuid4().hex],
'MaIl': [uuid.uuid4().hex],
'cn': ['junk']
}
)
user = self.identity_api.get_user('junk')
self.assertEqual(mock_ldap_get.return_value[1]['sN'][0],
user['name'])
self.assertEqual(mock_ldap_get.return_value[1]['MaIl'][0],
user['email'])
def test_parse_extra_attribute_mapping(self):
option_list = ['description:name', 'gecos:password',
'fake:invalid', 'invalid1', 'invalid2:',
'description:name:something']
mapping = self.identity_api.driver.user._parse_extra_attrs(option_list)
expected_dict = {'description': 'name', 'gecos': 'password',
'fake': 'invalid', 'invalid2': ''}
self.assertDictEqual(expected_dict, mapping)
# TODO(henry-nash): These need to be removed when the full LDAP implementation
# is submitted - see Bugs 1092187, 1101287, 1101276, 1101289
def test_domain_crud(self):
domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'enabled': True, 'description': uuid.uuid4().hex}
self.assertRaises(exception.Forbidden,
self.assignment_api.create_domain,
domain['id'],
domain)
self.assertRaises(exception.Conflict,
self.assignment_api.create_domain,
CONF.identity.default_domain_id,
domain)
self.assertRaises(exception.DomainNotFound,
self.assignment_api.get_domain,
domain['id'])
domain['description'] = uuid.uuid4().hex
self.assertRaises(exception.DomainNotFound,
self.assignment_api.update_domain,
domain['id'],
domain)
self.assertRaises(exception.Forbidden,
self.assignment_api.update_domain,
CONF.identity.default_domain_id,
domain)
self.assertRaises(exception.DomainNotFound,
self.assignment_api.get_domain,
domain['id'])
self.assertRaises(exception.DomainNotFound,
self.assignment_api.delete_domain,
domain['id'])
self.assertRaises(exception.Forbidden,
self.assignment_api.delete_domain,
CONF.identity.default_domain_id)
self.assertRaises(exception.DomainNotFound,
self.assignment_api.get_domain,
domain['id'])
@tests.skip_if_no_multiple_domains_support
def test_create_domain_case_sensitivity(self):
# domains are read-only, so case sensitivity isn't an issue
ref = {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex}
self.assertRaises(exception.Forbidden,
self.assignment_api.create_domain,
ref['id'],
ref)
def test_cache_layer_domain_crud(self):
# TODO(morganfainberg): This also needs to be removed when full LDAP
# implementation is submitted. No need to duplicate the above test,
# just skip this time.
self.skipTest('Domains are read-only against LDAP')
def test_domain_rename_invalidates_get_domain_by_name_cache(self):
parent = super(LDAPIdentity, self)
self.assertRaises(
exception.Forbidden,
parent.test_domain_rename_invalidates_get_domain_by_name_cache)
def test_project_rename_invalidates_get_project_by_name_cache(self):
parent = super(LDAPIdentity, self)
self.assertRaises(
exception.Forbidden,
parent.test_project_rename_invalidates_get_project_by_name_cache)
def test_project_crud(self):
# NOTE(topol): LDAP implementation does not currently support the
# updating of a project name so this method override
# provides a different update test
project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id,
'description': uuid.uuid4().hex, 'enabled': True
}
self.assignment_api.create_project(project['id'], project)
project_ref = self.assignment_api.get_project(project['id'])
self.assertDictEqual(project_ref, project)
project['description'] = uuid.uuid4().hex
self.assignment_api.update_project(project['id'], project)
project_ref = self.assignment_api.get_project(project['id'])
self.assertDictEqual(project_ref, project)
self.assignment_api.delete_project(project['id'])
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project,
project['id'])
@tests.skip_if_cache_disabled('assignment')
def test_cache_layer_project_crud(self):
# NOTE(morganfainberg): LDAP implementation does not currently support
# updating project names. This method override provides a different
# update test.
project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id,
'description': uuid.uuid4().hex}
project_id = project['id']
# Create a project
self.assignment_api.create_project(project_id, project)
self.assignment_api.get_project(project_id)
updated_project = copy.deepcopy(project)
updated_project['description'] = uuid.uuid4().hex
# Update project, bypassing assignment_api manager
self.assignment_api.driver.update_project(project_id,
updated_project)
# Verify get_project still returns the original project_ref
self.assertDictContainsSubset(
project, self.assignment_api.get_project(project_id))
# Invalidate cache
self.assignment_api.get_project.invalidate(self.assignment_api,
project_id)
# Verify get_project now returns the new project
self.assertDictContainsSubset(
updated_project,
self.assignment_api.get_project(project_id))
# Update project using the assignment_api manager back to original
self.assignment_api.update_project(project['id'], project)
# Verify get_project returns the original project_ref
self.assertDictContainsSubset(
project, self.assignment_api.get_project(project_id))
# Delete project bypassing assignment_api
self.assignment_api.driver.delete_project(project_id)
# Verify get_project still returns the project_ref
self.assertDictContainsSubset(
project, self.assignment_api.get_project(project_id))
# Invalidate cache
self.assignment_api.get_project.invalidate(self.assignment_api,
project_id)
# Verify ProjectNotFound now raised
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project,
project_id)
# recreate project
self.assignment_api.create_project(project_id, project)
self.assignment_api.get_project(project_id)
# delete project
self.assignment_api.delete_project(project_id)
# Verify ProjectNotFound is raised
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project,
project_id)
def test_multi_role_grant_by_user_group_on_project_domain(self):
# This is a partial implementation of the standard test that
# is defined in test_backend.py. It omits both domain and
# group grants. since neither of these are yet supported by
# the ldap backend.
role_list = []
for _ in range(2):
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.assignment_api.create_role(role['id'], role)
role_list.append(role)
user1 = {'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id,
'password': uuid.uuid4().hex,
'enabled': True}
user1 = self.identity_api.create_user(user1)
project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id}
self.assignment_api.create_project(project1['id'], project1)
self.assignment_api.add_role_to_user_and_project(
user_id=user1['id'],
tenant_id=project1['id'],
role_id=role_list[0]['id'])
self.assignment_api.add_role_to_user_and_project(
user_id=user1['id'],
tenant_id=project1['id'],
role_id=role_list[1]['id'])
# Although list_grants are not yet supported, we can test the
# alternate way of getting back lists of grants, where user
# and group roles are combined. Only directly assigned user
# roles are available, since group grants are not yet supported
combined_list = self.assignment_api.get_roles_for_user_and_project(
user1['id'],
project1['id'])
self.assertEqual(2, len(combined_list))
self.assertIn(role_list[0]['id'], combined_list)
self.assertIn(role_list[1]['id'], combined_list)
# Finally, although domain roles are not implemented, check we can
# issue the combined get roles call with benign results, since thus is
# used in token generation
combined_role_list = self.assignment_api.get_roles_for_user_and_domain(
user1['id'], CONF.identity.default_domain_id)
self.assertEqual(0, len(combined_role_list))
def test_list_projects_for_alternate_domain(self):
self.skipTest(
'N/A: LDAP does not support multiple domains')
def test_get_default_domain_by_name(self):
domain = self._get_domain_fixture()
domain_ref = self.assignment_api.get_domain_by_name(domain['name'])
self.assertEqual(domain_ref, domain)
def test_base_ldap_connection_deref_option(self):
def get_conn(deref_name):
self.config_fixture.config(group='ldap',
alias_dereferencing=deref_name)
base_ldap = common_ldap.BaseLdap(CONF)
return base_ldap.get_connection()
conn = get_conn('default')
self.assertEqual(ldap.get_option(ldap.OPT_DEREF),
conn.get_option(ldap.OPT_DEREF))
conn = get_conn('always')
self.assertEqual(ldap.DEREF_ALWAYS,
conn.get_option(ldap.OPT_DEREF))
conn = get_conn('finding')
self.assertEqual(ldap.DEREF_FINDING,
conn.get_option(ldap.OPT_DEREF))
conn = get_conn('never')
self.assertEqual(ldap.DEREF_NEVER,
conn.get_option(ldap.OPT_DEREF))
conn = get_conn('searching')
self.assertEqual(ldap.DEREF_SEARCHING,
conn.get_option(ldap.OPT_DEREF))
def test_list_users_no_dn(self):
users = self.identity_api.list_users()
self.assertEqual(len(default_fixtures.USERS), len(users))
user_ids = set(user['id'] for user in users)
expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
for user in default_fixtures.USERS)
for user_ref in users:
self.assertNotIn('dn', user_ref)
self.assertEqual(expected_user_ids, user_ids)
def test_list_groups_no_dn(self):
# Create some test groups.
domain = self._get_domain_fixture()
expected_group_ids = []
numgroups = 3
for _ in range(numgroups):
group = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
group = self.identity_api.create_group(group)
expected_group_ids.append(group['id'])
# Fetch the test groups and ensure that they don't contain a dn.
groups = self.identity_api.list_groups()
self.assertEqual(numgroups, len(groups))
group_ids = set(group['id'] for group in groups)
for group_ref in groups:
self.assertNotIn('dn', group_ref)
self.assertEqual(set(expected_group_ids), group_ids)
def test_list_groups_for_user_no_dn(self):
# Create a test user.
user = {'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id,
'password': uuid.uuid4().hex, 'enabled': True}
user = self.identity_api.create_user(user)
# Create some test groups and add the test user as a member.
domain = self._get_domain_fixture()
expected_group_ids = []
numgroups = 3
for _ in range(numgroups):
group = {'name': uuid.uuid4().hex, 'domain_id': domain['id']}
group = self.identity_api.create_group(group)
expected_group_ids.append(group['id'])
self.identity_api.add_user_to_group(user['id'], group['id'])
# Fetch the groups for the test user
# and ensure they don't contain a dn.
groups = self.identity_api.list_groups_for_user(user['id'])
self.assertEqual(numgroups, len(groups))
group_ids = set(group['id'] for group in groups)
for group_ref in groups:
self.assertNotIn('dn', group_ref)
self.assertEqual(set(expected_group_ids), group_ids)
def test_user_id_attribute_in_create(self):
conf = self.get_config(CONF.identity.default_domain_id)
conf.ldap.user_id_attribute = 'mail'
self.reload_backends(CONF.identity.default_domain_id)
user = {'name': u'fäké1',
'password': u'fäképass1',
'domain_id': CONF.identity.default_domain_id}
user = self.identity_api.create_user(user)
user_ref = self.identity_api.get_user(user['id'])
# 'email' attribute should've created because it is also being used
# as user_id
self.assertEqual(user_ref['id'], user_ref['email'])
def test_user_id_attribute_map(self):
conf = self.get_config(CONF.identity.default_domain_id)
conf.ldap.user_id_attribute = 'mail'
self.reload_backends(CONF.identity.default_domain_id)
user_ref = self.identity_api.get_user(self.user_foo['email'])
# the user_id_attribute map should be honored, which means
# user_ref['id'] should contains the email attribute
self.assertEqual(self.user_foo['email'], user_ref['id'])
@mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
def test_get_id_from_dn_for_multivalued_attribute_id(self, mock_ldap_get):
conf = self.get_config(CONF.identity.default_domain_id)
conf.ldap.user_id_attribute = 'mail'
self.reload_backends(CONF.identity.default_domain_id)
# make 'email' multivalued so we can test the error condition
email1 = uuid.uuid4().hex
email2 = uuid.uuid4().hex
mock_ldap_get.return_value = (
'cn=nobodycares,dc=example,dc=com',
{
'sn': [uuid.uuid4().hex],
'mail': [email1, email2],
'cn': 'nobodycares'
}
)
user_ref = self.identity_api.get_user(email1)
# make sure we get the ID from DN (old behavior) if the ID attribute
# has multiple values
self.assertEqual('nobodycares', user_ref['id'])
@mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
def test_id_attribute_not_found(self, mock_ldap_get):
mock_ldap_get.return_value = (
'cn=nobodycares,dc=example,dc=com',
{
'sn': [uuid.uuid4().hex],
}
)
user_api = identity.backends.ldap.UserApi(CONF)
self.assertRaises(exception.NotFound,
user_api.get,
'nobodycares')
@mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
def test_user_id_not_in_dn(self, mock_ldap_get):
conf = self.get_config(CONF.identity.default_domain_id)
conf.ldap.user_id_attribute = 'uid'
conf.ldap.user_name_attribute = 'cn'
self.reload_backends(CONF.identity.default_domain_id)
mock_ldap_get.return_value = (
'foo=bar,dc=example,dc=com',
{
'sn': [uuid.uuid4().hex],
'foo': ['bar'],
'cn': ['junk'],
'uid': ['crap']
}
)
user_ref = self.identity_api.get_user('crap')
self.assertEqual('crap', user_ref['id'])
self.assertEqual('junk', user_ref['name'])
@mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get')
def test_user_name_in_dn(self, mock_ldap_get):
conf = self.get_config(CONF.identity.default_domain_id)
conf.ldap.user_id_attribute = 'sAMAccountName'
conf.ldap.user_name_attribute = 'cn'
self.reload_backends(CONF.identity.default_domain_id)
mock_ldap_get.return_value = (
'cn=Foo Bar,dc=example,dc=com',
{
'sn': [uuid.uuid4().hex],
'cn': ['Foo Bar'],
'SAMAccountName': ['crap']
}
)
user_ref = self.identity_api.get_user('crap')
self.assertEqual('crap', user_ref['id'])
self.assertEqual('Foo Bar', user_ref['name'])
class LDAPIdentityEnabledEmulation(LDAPIdentity):
def setUp(self):
super(LDAPIdentityEnabledEmulation, self).setUp()
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
for obj in [self.tenant_bar, self.tenant_baz, self.user_foo,
self.user_two, self.user_badguy]:
obj.setdefault('enabled', True)
def config_files(self):
config_files = super(LDAPIdentityEnabledEmulation, self).config_files()
config_files.append(tests.dirs.tests_conf('backend_ldap.conf'))
return config_files
def config_overrides(self):
super(LDAPIdentityEnabledEmulation, self).config_overrides()
self.config_fixture.config(group='ldap',
user_enabled_emulation=True,
project_enabled_emulation=True)
def test_project_crud(self):
# NOTE(topol): LDAPIdentityEnabledEmulation will create an
# enabled key in the project dictionary so this
# method override handles this side-effect
project = {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id,
'description': uuid.uuid4().hex}
self.assignment_api.create_project(project['id'], project)
project_ref = self.assignment_api.get_project(project['id'])
# self.assignment_api.create_project adds an enabled
# key with a value of True when LDAPIdentityEnabledEmulation
# is used so we now add this expected key to the project dictionary
project['enabled'] = True
self.assertDictEqual(project_ref, project)
project['description'] = uuid.uuid4().hex
self.assignment_api.update_project(project['id'], project)
project_ref = self.assignment_api.get_project(project['id'])
self.assertDictEqual(project_ref, project)
self.assignment_api.delete_project(project['id'])
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project,
project['id'])
def test_user_crud(self):
user_dict = {
'domain_id': CONF.identity.default_domain_id,
'name': uuid.uuid4().hex,
'password': uuid.uuid4().hex}
user = self.identity_api.create_user(user_dict)
user_dict['enabled'] = True
user_ref = self.identity_api.get_user(user['id'])
del user_dict['password']
user_ref_dict = dict((x, user_ref[x]) for x in user_ref)
self.assertDictContainsSubset(user_dict, user_ref_dict)
user_dict['password'] = uuid.uuid4().hex
self.identity_api.update_user(user['id'], user)
user_ref = self.identity_api.get_user(user['id'])
del user_dict['password']
user_ref_dict = dict((x, user_ref[x]) for x in user_ref)
self.assertDictContainsSubset(user_dict, user_ref_dict)
self.identity_api.delete_user(user['id'])
self.assertRaises(exception.UserNotFound,
self.identity_api.get_user,
user['id'])
def test_user_auth_emulated(self):
self.config_fixture.config(group='ldap',
user_enabled_emulation_dn='cn=test,dc=test')
self.reload_backends(CONF.identity.default_domain_id)
self.identity_api.authenticate(
context={},
user_id=self.user_foo['id'],
password=self.user_foo['password'])
def test_user_enable_attribute_mask(self):
self.skipTest(
"Enabled emulation conflicts with enabled mask")
def test_user_enabled_invert(self):
self.config_fixture.config(group='ldap', user_enabled_invert=True,
user_enabled_default=False)
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
user1 = {'name': u'fäké1', 'enabled': True,
'domain_id': CONF.identity.default_domain_id}
user2 = {'name': u'fäké2', 'enabled': False,
'domain_id': CONF.identity.default_domain_id}
user3 = {'name': u'fäké3',
'domain_id': CONF.identity.default_domain_id}
# Ensure that the enabled LDAP attribute is not set for a
# newly created enabled user.
user_ref = self.identity_api.create_user(user1)
self.assertIs(True, user_ref['enabled'])
self.assertIsNone(self.get_user_enabled_vals(user_ref))
user_ref = self.identity_api.get_user(user_ref['id'])
self.assertIs(True, user_ref['enabled'])
# Ensure that an enabled LDAP attribute is not set for a disabled user.
user1['enabled'] = False
user_ref = self.identity_api.update_user(user_ref['id'], user1)
self.assertIs(False, user_ref['enabled'])
self.assertIsNone(self.get_user_enabled_vals(user_ref))
# Enable the user and ensure that the LDAP enabled
# attribute is not set.
user1['enabled'] = True
user_ref = self.identity_api.update_user(user_ref['id'], user1)
self.assertIs(True, user_ref['enabled'])
self.assertIsNone(self.get_user_enabled_vals(user_ref))
# Ensure that the LDAP enabled attribute is not set for a
# newly created disabled user.
user_ref = self.identity_api.create_user(user2)
self.assertIs(False, user_ref['enabled'])
self.assertIsNone(self.get_user_enabled_vals(user_ref))
user_ref = self.identity_api.get_user(user_ref['id'])
self.assertIs(False, user_ref['enabled'])
# Ensure that the LDAP enabled attribute is not set for a newly created
# user when the user_enabled_default setting is used.
user_ref = self.identity_api.create_user(user3)
self.assertIs(True, user_ref['enabled'])
self.assertIsNone(self.get_user_enabled_vals(user_ref))
user_ref = self.identity_api.get_user(user_ref['id'])
self.assertIs(True, user_ref['enabled'])
def test_user_enabled_invert_no_enabled_value(self):
self.skipTest(
"N/A: Covered by test_user_enabled_invert")
def test_user_enabled_invert_default_str_value(self):
self.skipTest(
"N/A: Covered by test_user_enabled_invert")
class LdapIdentitySqlAssignment(BaseLDAPIdentity, tests.SQLDriverOverrides,
tests.TestCase):
def config_files(self):
config_files = super(LdapIdentitySqlAssignment, self).config_files()
config_files.append(tests.dirs.tests_conf('backend_ldap_sql.conf'))
return config_files
def setUp(self):
self.useFixture(database.Database())
super(LdapIdentitySqlAssignment, self).setUp()
self.clear_database()
self.load_backends()
cache.configure_cache_region(cache.REGION)
self.engine = sql.get_engine()
self.addCleanup(sql.cleanup)
sql.ModelBase.metadata.create_all(bind=self.engine)
self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
self.load_fixtures(default_fixtures)
# defaulted by the data load
self.user_foo['enabled'] = True
def config_overrides(self):
super(LdapIdentitySqlAssignment, self).config_overrides()
self.config_fixture.config(
group='identity',
driver='keystone.identity.backends.ldap.Identity')
self.config_fixture.config(
group='assignment',
driver='keystone.assignment.backends.sql.Assignment')
def test_domain_crud(self):
pass
def test_list_domains(self):
domains = self.assignment_api.list_domains()
self.assertEqual([assignment.calc_default_domain()], domains)
def test_list_domains_non_default_domain_id(self):
# If change the default_domain_id, the ID of the default domain
# returned by list_domains doesn't change because the SQL identity
# backend reads it from the database, which doesn't get updated by
# config change.
orig_default_domain_id = CONF.identity.default_domain_id
new_domain_id = uuid.uuid4().hex
self.config_fixture.config(group='identity',
default_domain_id=new_domain_id)
domains = self.assignment_api.list_domains()
self.assertEqual(orig_default_domain_id, domains[0]['id'])
def test_create_domain(self):
domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'enabled': True}
self.assertRaises(exception.Forbidden,
self.assignment_api.create_domain,
domain['id'],
domain)
def test_get_and_remove_role_grant_by_group_and_domain(self):
# TODO(henry-nash): We should really rewrite the tests in test_backend
# to be more flexible as to where the domains are sourced from, so
# that we would not need to override such tests here. This is raised
# as bug 1373865.
new_domain = self._get_domain_fixture()
new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex}
new_group = self.identity_api.create_group(new_group)
new_user = {'name': 'new_user', 'password': uuid.uuid4().hex,
'enabled': True, 'domain_id': new_domain['id']}
new_user = self.identity_api.create_user(new_user)
self.identity_api.add_user_to_group(new_user['id'],
new_group['id'])
roles_ref = self.assignment_api.list_grants(
group_id=new_group['id'],
domain_id=new_domain['id'])
self.assertEqual(0, len(roles_ref))
self.assignment_api.create_grant(group_id=new_group['id'],
domain_id=new_domain['id'],
role_id='member')
roles_ref = self.assignment_api.list_grants(
group_id=new_group['id'],
domain_id=new_domain['id'])
self.assertDictEqual(roles_ref[0], self.role_member)
self.assignment_api.delete_grant(group_id=new_group['id'],
domain_id=new_domain['id'],
role_id='member')
roles_ref = self.assignment_api.list_grants(
group_id=new_group['id'],
domain_id=new_domain['id'])
self.assertEqual(0, len(roles_ref))
self.assertRaises(exception.NotFound,
self.assignment_api.delete_grant,
group_id=new_group['id'],
domain_id=new_domain['id'],
role_id='member')
def test_project_enabled_ignored_disable_error(self):
# Override
self.skipTest("Doesn't apply since LDAP configuration is ignored for "
"SQL assignment backend.")
class LdapIdentitySqlAssignmentWithMapping(LdapIdentitySqlAssignment):
"""Class to test mapping of default LDAP backend.
The default configuration is not to enable mapping when using a single
backend LDAP driver. However, a cloud provider might want to enable
the mapping, hence hiding the LDAP IDs from any clients of keystone.
Setting backward_compatible_ids to False will enable this mapping.
"""
def config_overrides(self):
super(LdapIdentitySqlAssignmentWithMapping, self).config_overrides()
self.config_fixture.config(group='identity_mapping',
backward_compatible_ids=False)
def test_dynamic_mapping_build(self):
"""Test to ensure entities not create via controller are mapped.
Many LDAP backends will, essentially, by Read Only. In these cases
the mapping is not built by creating objects, rather from enumerating
the entries. We test this here my manually deleting the mapping and
then trying to re-read the entries.
"""
initial_mappings = len(mapping_sql.list_id_mappings())
user1 = {'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id,
'password': uuid.uuid4().hex, 'enabled': True}
user1 = self.identity_api.create_user(user1)
user2 = {'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id,
'password': uuid.uuid4().hex, 'enabled': True}
user2 = self.identity_api.create_user(user2)
mappings = mapping_sql.list_id_mappings()
self.assertEqual(initial_mappings + 2, len(mappings))
# Now delete the mappings for the two users above
self.id_mapping_api.purge_mappings({'public_id': user1['id']})
self.id_mapping_api.purge_mappings({'public_id': user2['id']})
# We should no longer be able to get these users via their old IDs
self.assertRaises(exception.UserNotFound,
self.identity_api.get_user,
user1['id'])
self.assertRaises(exception.UserNotFound,
self.identity_api.get_user,
user2['id'])
# Now enumerate all users...this should re-build the mapping, and
# we should be able to find the users via their original public IDs.
self.identity_api.list_users()
self.identity_api.get_user(user1['id'])
self.identity_api.get_user(user2['id'])
def test_get_roles_for_user_and_project_user_group_same_id(self):
self.skipTest('N/A: We never generate the same ID for a user and '
'group in our mapping table')
class BaseMultiLDAPandSQLIdentity(object):
"""Mixin class with support methods for domain-specific config testing."""
def create_user(self, domain_id):
user = {'name': uuid.uuid4().hex,
'domain_id': domain_id,
'password': uuid.uuid4().hex,
'enabled': True}
user_ref = self.identity_api.create_user(user)
# Put the password back in, since this is used later by tests to
# authenticate.
user_ref['password'] = user['password']
return user_ref
def create_users_across_domains(self):
"""Create a set of users, each with a role on their own domain."""
# We also will check that the right number of id mappings get created
initial_mappings = len(mapping_sql.list_id_mappings())
self.users['user0'] = self.create_user(
self.domains['domain_default']['id'])
self.assignment_api.create_grant(
user_id=self.users['user0']['id'],
domain_id=self.domains['domain_default']['id'],
role_id=self.role_member['id'])
for x in range(1, self.domain_count):
self.users['user%s' % x] = self.create_user(
self.domains['domain%s' % x]['id'])
self.assignment_api.create_grant(
user_id=self.users['user%s' % x]['id'],
domain_id=self.domains['domain%s' % x]['id'],
role_id=self.role_member['id'])
# So how many new id mappings should have been created? One for each
# user created in a domain that is using the non default driver..
self.assertEqual(initial_mappings + self.domain_specific_count,
len(mapping_sql.list_id_mappings()))
def check_user(self, user, domain_id, expected_status):
"""Check user is in correct backend.
As part of the tests, we want to force ourselves to manually
select the driver for a given domain, to make sure the entity
ended up in the correct backend.
"""
driver = self.identity_api._select_identity_driver(domain_id)
unused, unused, entity_id = (
self.identity_api._get_domain_driver_and_entity_id(
user['id']))
if expected_status == 200:
ref = driver.get_user(entity_id)
ref = self.identity_api._set_domain_id_and_mapping(
ref, domain_id, driver, map.EntityType.USER)
user = user.copy()
del user['password']
self.assertDictEqual(ref, user)
else:
# TODO(henry-nash): Use AssertRaises here, although
# there appears to be an issue with using driver.get_user
# inside that construct
try:
driver.get_user(entity_id)
except expected_status:
pass
def setup_initial_domains(self):
def create_domain(domain):
try:
ref = self.assignment_api.create_domain(
domain['id'], domain)
except exception.Conflict:
ref = (
self.assignment_api.get_domain_by_name(domain['name']))
return ref
self.domains = {}
for x in range(1, self.domain_count):
domain = 'domain%s' % x
self.domains[domain] = create_domain(
{'id': uuid.uuid4().hex, 'name': domain})
self.domains['domain_default'] = create_domain(
assignment.calc_default_domain())
def test_authenticate_to_each_domain(self):
"""Test that a user in each domain can authenticate."""
for user_num in range(self.domain_count):
user = 'user%s' % user_num
self.identity_api.authenticate(
context={},
user_id=self.users[user]['id'],
password=self.users[user]['password'])
class MultiLDAPandSQLIdentity(BaseLDAPIdentity, tests.SQLDriverOverrides,
tests.TestCase, BaseMultiLDAPandSQLIdentity):
"""Class to test common SQL plus individual LDAP backends.
We define a set of domains and domain-specific backends:
- A separate LDAP backend for the default domain
- A separate LDAP backend for domain1
- domain2 shares the same LDAP as domain1, but uses a different
tree attach point
- An SQL backend for all other domains (which will include domain3
and domain4)
Normally one would expect that the default domain would be handled as
part of the "other domains" - however the above provides better
test coverage since most of the existing backend tests use the default
domain.
"""
def setUp(self):
self.useFixture(database.Database())
super(MultiLDAPandSQLIdentity, self).setUp()
self.load_backends()
self.engine = sql.get_engine()
self.addCleanup(sql.cleanup)
sql.ModelBase.metadata.create_all(bind=self.engine)
self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
self.domain_count = 5
self.domain_specific_count = 3
self.setup_initial_domains()
self._setup_initial_users()
# All initial test data setup complete, time to switch on support
# for separate backends per domain.
self.config_fixture.config(
group='identity', domain_specific_drivers_enabled=True,
domain_config_dir=tests.TESTCONF + '/domain_configs_multi_ldap')
self.config_fixture.config(group='identity_mapping',
backward_compatible_ids=False)
self.clear_database()
self.load_fixtures(default_fixtures)
self.create_users_across_domains()
def config_overrides(self):
super(MultiLDAPandSQLIdentity, self).config_overrides()
# Make sure identity and assignment are actually SQL drivers,
# BaseLDAPIdentity sets these options to use LDAP.
self.config_fixture.config(
group='identity',
driver='keystone.identity.backends.sql.Identity')
self.config_fixture.config(
group='assignment',
driver='keystone.assignment.backends.sql.Assignment')
def _setup_initial_users(self):
# Create some identity entities BEFORE we switch to multi-backend, so
# we can test that these are still accessible
self.users = {}
self.users['userA'] = self.create_user(
self.domains['domain_default']['id'])
self.users['userB'] = self.create_user(
self.domains['domain1']['id'])
self.users['userC'] = self.create_user(
self.domains['domain3']['id'])
def reload_backends(self, domain_id):
# Just reload the driver for this domain - which will pickup
# any updated cfg
self.identity_api.domain_configs.reload_domain_driver(
self.identity_api.assignment_api, domain_id)
def get_config(self, domain_id):
# Get the config for this domain, will return CONF
# if no specific config defined for this domain
return self.identity_api.domain_configs.get_domain_conf(domain_id)
def test_list_domains(self):
self.skipTest(
'N/A: Not relevant for multi ldap testing')
def test_list_domains_non_default_domain_id(self):
self.skipTest(
'N/A: Not relevant for multi ldap testing')
def test_list_users(self):
# Override the standard list users, since we have added an extra user
# to the default domain, so the number of expected users is one more
# than in the standard test.
users = self.identity_api.list_users(
domain_scope=self._set_domain_scope(
CONF.identity.default_domain_id))
self.assertEqual(len(default_fixtures.USERS) + 1, len(users))
user_ids = set(user['id'] for user in users)
expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
for user in default_fixtures.USERS)
expected_user_ids.add(self.users['user0']['id'])
for user_ref in users:
self.assertNotIn('password', user_ref)
self.assertEqual(expected_user_ids, user_ids)
def test_domain_segregation(self):
"""Test that separate configs have segregated the domain.
Test Plan:
- Users were created in each domain as part of setup, now make sure
you can only find a given user in its relevant domain/backend
- Make sure that for a backend that supports multiple domains
you can get the users via any of its domains
"""
# Check that I can read a user with the appropriate domain-selected
# driver, but won't find it via any other domain driver
check_user = self.check_user
check_user(self.users['user0'],
self.domains['domain_default']['id'], 200)
for domain in [self.domains['domain1']['id'],
self.domains['domain2']['id'],
self.domains['domain3']['id'],
self.domains['domain4']['id']]:
check_user(self.users['user0'], domain, exception.UserNotFound)
check_user(self.users['user1'], self.domains['domain1']['id'], 200)
for domain in [self.domains['domain_default']['id'],
self.domains['domain2']['id'],
self.domains['domain3']['id'],
self.domains['domain4']['id']]:
check_user(self.users['user1'], domain, exception.UserNotFound)
check_user(self.users['user2'], self.domains['domain2']['id'], 200)
for domain in [self.domains['domain_default']['id'],
self.domains['domain1']['id'],
self.domains['domain3']['id'],
self.domains['domain4']['id']]:
check_user(self.users['user2'], domain, exception.UserNotFound)
# domain3 and domain4 share the same backend, so you should be
# able to see user3 and user4 from either.
check_user(self.users['user3'], self.domains['domain3']['id'], 200)
check_user(self.users['user3'], self.domains['domain4']['id'], 200)
check_user(self.users['user4'], self.domains['domain3']['id'], 200)
check_user(self.users['user4'], self.domains['domain4']['id'], 200)
for domain in [self.domains['domain_default']['id'],
self.domains['domain1']['id'],
self.domains['domain2']['id']]:
check_user(self.users['user3'], domain, exception.UserNotFound)
check_user(self.users['user4'], domain, exception.UserNotFound)
# Finally, going through the regular manager layer, make sure we
# only see the right number of users in each of the non-default
# domains. One might have expected two users in domain1 (since we
# created one before we switched to multi-backend), however since
# that domain changed backends in the switch we don't find it anymore.
# This is as designed - we don't support moving domains between
# backends.
#
# The listing of the default domain is already handled in the
# test_lists_users() method.
for domain in [self.domains['domain1']['id'],
self.domains['domain2']['id'],
self.domains['domain4']['id']]:
self.assertThat(
self.identity_api.list_users(domain_scope=domain),
matchers.HasLength(1))
# domain3 had a user created before we switched on
# multiple backends, plus one created afterwards - and its
# backend has not changed - so we should find two.
self.assertThat(
self.identity_api.list_users(
domain_scope=self.domains['domain3']['id']),
matchers.HasLength(2))
def test_existing_uuids_work(self):
"""Test that 'uni-domain' created IDs still work.
Throwing the switch to domain-specific backends should not cause
existing identities to be inaccessible via ID.
"""
self.identity_api.get_user(self.users['userA']['id'])
self.identity_api.get_user(self.users['userB']['id'])
self.identity_api.get_user(self.users['userC']['id'])
def test_scanning_of_config_dir(self):
"""Test the Manager class scans the config directory.
The setup for the main tests above load the domain configs directly
so that the test overrides can be included. This test just makes sure
that the standard config directory scanning does pick up the relevant
domain config files.
"""
# Confirm that config has drivers_enabled as True, which we will
# check has been set to False later in this test
self.assertTrue(config.CONF.identity.domain_specific_drivers_enabled)
self.load_backends()
# Execute any command to trigger the lazy loading of domain configs
self.identity_api.list_users(
domain_scope=self.domains['domain1']['id'])
# ...and now check the domain configs have been set up
self.assertIn('default', self.identity_api.domain_configs)
self.assertIn(self.domains['domain1']['id'],
self.identity_api.domain_configs)
self.assertIn(self.domains['domain2']['id'],
self.identity_api.domain_configs)
self.assertNotIn(self.domains['domain3']['id'],
self.identity_api.domain_configs)
self.assertNotIn(self.domains['domain4']['id'],
self.identity_api.domain_configs)
# Finally check that a domain specific config contains items from both
# the primary config and the domain specific config
conf = self.identity_api.domain_configs.get_domain_conf(
self.domains['domain1']['id'])
# This should now be false, as is the default, since this is not
# set in the standard primary config file
self.assertFalse(conf.identity.domain_specific_drivers_enabled)
# ..and make sure a domain-specific options is also set
self.assertEqual('fake://memory1', conf.ldap.url)
def test_delete_domain_with_user_added(self):
domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'enabled': True}
project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'domain_id': domain['id'],
'description': uuid.uuid4().hex, 'enabled': True
}
self.assignment_api.create_domain(domain['id'], domain)
self.assignment_api.create_project(project['id'], project)
project_ref = self.assignment_api.get_project(project['id'])
self.assertDictEqual(project_ref, project)
self.assignment_api.create_grant(user_id=self.user_foo['id'],
project_id=project['id'],
role_id=self.role_member['id'])
self.assignment_api.delete_grant(user_id=self.user_foo['id'],
project_id=project['id'],
role_id=self.role_member['id'])
domain['enabled'] = False
self.assignment_api.update_domain(domain['id'], domain)
self.assignment_api.delete_domain(domain['id'])
self.assertRaises(exception.DomainNotFound,
self.assignment_api.get_domain,
domain['id'])
def test_user_enabled_ignored_disable_error(self):
# Override.
self.skipTest("Doesn't apply since LDAP config has no affect on the "
"SQL identity backend.")
def test_group_enabled_ignored_disable_error(self):
# Override.
self.skipTest("Doesn't apply since LDAP config has no affect on the "
"SQL identity backend.")
def test_project_enabled_ignored_disable_error(self):
# Override
self.skipTest("Doesn't apply since LDAP configuration is ignored for "
"SQL assignment backend.")
class DomainSpecificLDAPandSQLIdentity(
BaseLDAPIdentity, tests.SQLDriverOverrides, tests.TestCase,
BaseMultiLDAPandSQLIdentity):
"""Class to test when all domains use specific configs, including SQL.
We define a set of domains and domain-specific backends:
- A separate LDAP backend for the default domain
- A separate SQL backend for domain1
Although the default driver still exists, we don't use it.
"""
def setUp(self):
self.useFixture(database.Database())
super(DomainSpecificLDAPandSQLIdentity, self).setUp()
self.initial_setup()
def initial_setup(self):
# We aren't setting up any initial data ahead of switching to
# domain-specific operation, so make the switch straight away.
self.config_fixture.config(
group='identity', domain_specific_drivers_enabled=True,
domain_config_dir=(
tests.TESTCONF + '/domain_configs_one_sql_one_ldap'))
self.config_fixture.config(group='identity_mapping',
backward_compatible_ids=False)
self.load_backends()
self.engine = sql.get_engine()
self.addCleanup(sql.cleanup)
sql.ModelBase.metadata.create_all(bind=self.engine)
self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
self.domain_count = 2
self.domain_specific_count = 2
self.setup_initial_domains()
self.users = {}
self.clear_database()
self.load_fixtures(default_fixtures)
self.create_users_across_domains()
def config_overrides(self):
super(DomainSpecificLDAPandSQLIdentity, self).config_overrides()
# Make sure assignment is actually an SQL driver,
# BaseLDAPIdentity causes this option to use LDAP.
self.config_fixture.config(
group='assignment',
driver='keystone.assignment.backends.sql.Assignment')
def reload_backends(self, domain_id):
# Just reload the driver for this domain - which will pickup
# any updated cfg
self.identity_api.domain_configs.reload_domain_driver(
self.identity_api.assignment_api, domain_id)
def get_config(self, domain_id):
# Get the config for this domain, will return CONF
# if no specific config defined for this domain
return self.identity_api.domain_configs.get_domain_conf(domain_id)
def test_list_domains(self):
self.skipTest(
'N/A: Not relevant for multi ldap testing')
def test_list_domains_non_default_domain_id(self):
self.skipTest(
'N/A: Not relevant for multi ldap testing')
def test_domain_crud(self):
self.skipTest(
'N/A: Not relevant for multi ldap testing')
def test_list_users(self):
# Override the standard list users, since we have added an extra user
# to the default domain, so the number of expected users is one more
# than in the standard test.
users = self.identity_api.list_users(
domain_scope=self._set_domain_scope(
CONF.identity.default_domain_id))
self.assertEqual(len(default_fixtures.USERS) + 1, len(users))
user_ids = set(user['id'] for user in users)
expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id']
for user in default_fixtures.USERS)
expected_user_ids.add(self.users['user0']['id'])
for user_ref in users:
self.assertNotIn('password', user_ref)
self.assertEqual(expected_user_ids, user_ids)
def test_domain_segregation(self):
"""Test that separate configs have segregated the domain.
Test Plan:
- Users were created in each domain as part of setup, now make sure
you can only find a given user in its relevant domain/backend
- Make sure that for a backend that supports multiple domains
you can get the users via any of its domains
"""
# Check that I can read a user with the appropriate domain-selected
# driver, but won't find it via any other domain driver
self.check_user(self.users['user0'],
self.domains['domain_default']['id'], 200)
self.check_user(self.users['user0'],
self.domains['domain1']['id'], exception.UserNotFound)
self.check_user(self.users['user1'],
self.domains['domain1']['id'], 200)
self.check_user(self.users['user1'],
self.domains['domain_default']['id'],
exception.UserNotFound)
# Finally, going through the regular manager layer, make sure we
# only see the right number of users in the non-default domain.
self.assertThat(
self.identity_api.list_users(
domain_scope=self.domains['domain1']['id']),
matchers.HasLength(1))
def test_add_role_grant_to_user_and_project_404(self):
self.skipTest('Blocked by bug 1101287')
def test_get_role_grants_for_user_and_project_404(self):
self.skipTest('Blocked by bug 1101287')
def test_list_projects_for_user_with_grants(self):
self.skipTest('Blocked by bug 1221805')
def test_get_roles_for_user_and_project_user_group_same_id(self):
self.skipTest('N/A: We never generate the same ID for a user and '
'group in our mapping table')
def test_user_id_comma(self):
self.skipTest('Only valid if it is guaranteed to be talking to '
'the fakeldap backend')
def test_user_id_comma_grants(self):
self.skipTest('Only valid if it is guaranteed to be talking to '
'the fakeldap backend')
def test_user_enabled_ignored_disable_error(self):
# Override.
self.skipTest("Doesn't apply since LDAP config has no affect on the "
"SQL identity backend.")
def test_group_enabled_ignored_disable_error(self):
# Override.
self.skipTest("Doesn't apply since LDAP config has no affect on the "
"SQL identity backend.")
def test_project_enabled_ignored_disable_error(self):
# Override
self.skipTest("Doesn't apply since LDAP configuration is ignored for "
"SQL assignment backend.")
class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity):
"""Class to test simplest use of domain-specific SQL driver.
The simplest use of an SQL domain-specific backend is when it is used to
augment the standard case when LDAP is the default driver defined in the
main config file. This would allow, for example, service users to be
stored in SQL while LDAP handles the rest. Hence we define:
- The default driver uses the LDAP backend for the default domain
- A separate SQL backend for domain1
"""
def initial_setup(self):
# We aren't setting up any initial data ahead of switching to
# domain-specific operation, so make the switch straight away.
self.config_fixture.config(
group='identity', domain_specific_drivers_enabled=True,
domain_config_dir=(
tests.TESTCONF + '/domain_configs_default_ldap_one_sql'))
# Part of the testing counts how many new mappings get created as
# we create users, so ensure we are NOT using mapping for the default
# LDAP domain so this doesn't confuse the calculation.
self.config_fixture.config(group='identity_mapping',
backward_compatible_ids=True)
self.load_backends()
self.engine = sql.get_engine()
self.addCleanup(sql.cleanup)
sql.ModelBase.metadata.create_all(bind=self.engine)
self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine)
self.domain_count = 2
self.domain_specific_count = 1
self.setup_initial_domains()
self.users = {}
self.load_fixtures(default_fixtures)
self.create_users_across_domains()
def config_overrides(self):
super(DomainSpecificSQLIdentity, self).config_overrides()
self.config_fixture.config(
group='identity',
driver='keystone.identity.backends.ldap.Identity')
self.config_fixture.config(
group='assignment',
driver='keystone.assignment.backends.sql.Assignment')
def get_config(self, domain_id):
if domain_id == CONF.identity.default_domain_id:
return CONF
else:
return self.identity_api.domain_configs.get_domain_conf(domain_id)
def reload_backends(self, domain_id):
if domain_id == CONF.identity.default_domain_id:
self.load_backends()
else:
# Just reload the driver for this domain - which will pickup
# any updated cfg
self.identity_api.domain_configs.reload_domain_driver(
self.identity_api.assignment_api, domain_id)
def test_default_sql_plus_sql_specific_driver_fails(self):
# First confirm that if ldap is default driver, domain1 can be
# loaded as sql
self.config_fixture.config(
group='identity',
driver='keystone.identity.backends.ldap.Identity')
self.config_fixture.config(
group='assignment',
driver='keystone.assignment.backends.sql.Assignment')
self.load_backends()
# Make any identity call to initiate the lazy loading of configs
self.identity_api.list_users(
domain_scope=CONF.identity.default_domain_id)
self.assertIsNotNone(self.get_config(self.domains['domain1']['id']))
# Now re-initialize, but with sql as the default identity driver
self.config_fixture.config(
group='identity',
driver='keystone.identity.backends.sql.Identity')
self.config_fixture.config(
group='assignment',
driver='keystone.assignment.backends.sql.Assignment')
self.load_backends()
# Make any identity call to initiate the lazy loading of configs, which
# should fail since we would now have two sql drivers.
self.assertRaises(exception.MultipleSQLDriversInConfig,
self.identity_api.list_users,
domain_scope=CONF.identity.default_domain_id)
def test_multiple_sql_specific_drivers_fails(self):
self.config_fixture.config(
group='identity',
driver='keystone.identity.backends.ldap.Identity')
self.config_fixture.config(
group='assignment',
driver='keystone.assignment.backends.sql.Assignment')
self.load_backends()
# Ensure default, domain1 and domain2 exist
self.domain_count = 3
self.setup_initial_domains()
# Make any identity call to initiate the lazy loading of configs
self.identity_api.list_users(
domain_scope=CONF.identity.default_domain_id)
# This will only load domain1, since the domain2 config file is
# not stored in the same location
self.assertIsNotNone(self.get_config(self.domains['domain1']['id']))
# Now try and manually load a 2nd sql specific driver, for domain2,
# which should fail.
self.assertRaises(exception.MultipleSQLDriversInConfig,
self.identity_api.domain_configs._load_config,
self.identity_api.assignment_api,
[tests.TESTCONF + '/domain_configs_one_extra_sql/' +
'keystone.domain2.conf'],
'domain2')
| {
"content_hash": "2b9a240426df659fa6a51fc24faa0d8c",
"timestamp": "",
"source": "github",
"line_count": 2780,
"max_line_length": 79,
"avg_line_length": 43.84496402877698,
"alnum_prop": 0.5955172328922216,
"repo_name": "ging/keystone",
"id": "d46f675bb53d0121e396599eb5f7978d7b94b752",
"size": "122604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keystone/tests/test_backend_ldap.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3894459"
},
{
"name": "Shell",
"bytes": "4619"
}
],
"symlink_target": ""
} |
from builtins import input
import matplotlib
matplotlib.use("TkAgg")
import pylab
pylab.ion() # turn on interactivity
pylab.plot([1,2,3])
pylab.ylabel('Y')
pylab.draw() # draw the current plot
ans=input('press [s] to save figure, any other key to quit: ')
if ans=='s':
pylab.savefig('myfig.eps')
| {
"content_hash": "42e3397502368bea13f6b1dd75788572",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 62,
"avg_line_length": 27.818181818181817,
"alnum_prop": 0.7026143790849673,
"repo_name": "Caoimhinmg/PmagPy",
"id": "1e5a29e30b1bdb11e6e86e23143c92f6b1b249ef",
"size": "328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data_files/LearningPython/matplotlib1a.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "D",
"bytes": "5748"
},
{
"name": "HTML",
"bytes": "63859"
},
{
"name": "Inno Setup",
"bytes": "3675"
},
{
"name": "Jupyter Notebook",
"bytes": "14175459"
},
{
"name": "Python",
"bytes": "14896053"
},
{
"name": "Shell",
"bytes": "6986"
},
{
"name": "TeX",
"bytes": "3146"
}
],
"symlink_target": ""
} |
from pymongo import MongoClient
from config import MONGO_URI, MONGO_DBNAME, MONGO_ACCOUNTSCOLLECTION, MONGO_SPFYIDSCOLLECTION
# Connection to MongoDB
client = MongoClient(MONGO_URI, connect=False)
# Access Spfy's DB in MongoDB
db = client[MONGO_DBNAME]
# Access the collection of accounts information from Spfy's DB
collection_accounts = db[MONGO_ACCOUNTSCOLLECTION]
collection_spfyids = db[MONGO_SPFYIDSCOLLECTION]
# Note: though 'store' refers to Redux Store, we use the same
# key for spfyids.
def mongo_update(uid, json=[], key='store', collection=MONGO_ACCOUNTSCOLLECTION):
'''By default, updates the 'store' document in the accounts collection.
'''
if collection == MONGO_ACCOUNTSCOLLECTION:
collection_accounts.update_one({'_id':uid},{'$set':{key:json}},upsert=True)
elif collection == MONGO_SPFYIDSCOLLECTION:
collection_spfyids.update_one({'_id':uid},{'$set':{key:json}},upsert=True)
# Note: though 'store' refers to Redux Store, we use the same
# key for spfyids.
def mongo_find(uid, key='store', collection=MONGO_ACCOUNTSCOLLECTION):
if collection == MONGO_ACCOUNTSCOLLECTION:
doc = collection_accounts.find_one({'_id':uid})
elif collection == MONGO_SPFYIDSCOLLECTION:
doc = collection_spfyids.find_one({'_id':uid})
return doc[key]
| {
"content_hash": "68078a3154b995cbd6e0a473689fb666",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 93,
"avg_line_length": 45.03448275862069,
"alnum_prop": 0.7281776416539051,
"repo_name": "superphy/backend",
"id": "4a7c0029820cd4f9268b9f064920c75d1bc49e65",
"size": "1306",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/middleware/mongo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "392139"
}
],
"symlink_target": ""
} |
"""Top-level presubmit script for Chromium.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
import re
import subprocess
import sys
_EXCLUDED_PATHS = (
r"^breakpad[\\\/].*",
r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
r"^native_client_sdk[\\\/]src[\\\/]tools[\\\/].*.mk",
r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
r"^skia[\\\/].*",
r"^v8[\\\/].*",
r".*MakeFile$",
r".+_autogen\.h$",
r"^cc[\\\/].*",
r".+[\\\/]pnacl_shim\.c$",
)
# Fragment of a regular expression that matches file name suffixes
# used to indicate different platforms.
_PLATFORM_SPECIFIERS = r'(_(android|chromeos|gtk|mac|posix|win))?'
# Fragment of a regular expression that matches C++ and Objective-C++
# implementation files.
_IMPLEMENTATION_EXTENSIONS = r'\.(cc|cpp|cxx|mm)$'
# Regular expression that matches code only used for test binaries
# (best effort).
_TEST_CODE_EXCLUDED_PATHS = (
r'.*[/\\](fake_|test_|mock_).+%s' % _IMPLEMENTATION_EXTENSIONS,
r'.+_test_(base|support|util)%s' % _IMPLEMENTATION_EXTENSIONS,
r'.+_(api|browser|perf|unit|ui)?test%s%s' % (_PLATFORM_SPECIFIERS,
_IMPLEMENTATION_EXTENSIONS),
r'.+profile_sync_service_harness%s' % _IMPLEMENTATION_EXTENSIONS,
r'.*[/\\](test|tool(s)?)[/\\].*',
# At request of folks maintaining this folder.
r'chrome[/\\]browser[/\\]automation[/\\].*',
)
_TEST_ONLY_WARNING = (
'You might be calling functions intended only for testing from\n'
'production code. It is OK to ignore this warning if you know what\n'
'you are doing, as the heuristics used to detect the situation are\n'
'not perfect. The commit queue will not block on this warning.\n'
'Email joi@chromium.org if you have questions.')
_INCLUDE_ORDER_WARNING = (
'Your #include order seems to be broken. Send mail to\n'
'marja@chromium.org if this is not the case.')
_BANNED_OBJC_FUNCTIONS = (
(
'addTrackingRect:',
(
'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
'prohibited. Please use CrTrackingArea instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
False,
),
(
'NSTrackingArea',
(
'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
'instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
False,
),
(
'convertPointFromBase:',
(
'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
'Please use |convertPoint:(point) fromView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertPointToBase:',
(
'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
'Please use |convertPoint:(point) toView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertRectFromBase:',
(
'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
'Please use |convertRect:(point) fromView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertRectToBase:',
(
'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
'Please use |convertRect:(point) toView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertSizeFromBase:',
(
'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
'Please use |convertSize:(point) fromView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertSizeToBase:',
(
'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
'Please use |convertSize:(point) toView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
)
_BANNED_CPP_FUNCTIONS = (
# Make sure that gtest's FRIEND_TEST() macro is not used; the
# FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
# used instead since that allows for FLAKY_ and DISABLED_ prefixes.
(
'FRIEND_TEST(',
(
'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
),
False,
(),
),
(
'ScopedAllowIO',
(
'New code should not use ScopedAllowIO. Post a task to the blocking',
'pool or the FILE thread instead.',
),
True,
(
r"^content[\\\/]shell[\\\/]shell_browser_main\.cc$",
),
),
)
def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
"""Attempts to prevent use of functions intended only for testing in
non-testing code. For now this is just a best-effort implementation
that ignores header files and may have some false positives. A
better implementation would probably need a proper C++ parser.
"""
# We only scan .cc files and the like, as the declaration of
# for-testing functions in header files are hard to distinguish from
# calls to such functions without a proper C++ parser.
file_inclusion_pattern = r'.+%s' % _IMPLEMENTATION_EXTENSIONS
base_function_pattern = r'ForTest(ing)?|for_test(ing)?'
inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
exclusion_pattern = input_api.re.compile(
r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
base_function_pattern, base_function_pattern))
def FilterFile(affected_file):
black_list = (_EXCLUDED_PATHS +
_TEST_CODE_EXCLUDED_PATHS +
input_api.DEFAULT_BLACK_LIST)
return input_api.FilterSourceFile(
affected_file,
white_list=(file_inclusion_pattern, ),
black_list=black_list)
problems = []
for f in input_api.AffectedSourceFiles(FilterFile):
local_path = f.LocalPath()
lines = input_api.ReadFile(f).splitlines()
line_number = 0
for line in lines:
if (inclusion_pattern.search(line) and
not exclusion_pattern.search(line)):
problems.append(
'%s:%d\n %s' % (local_path, line_number, line.strip()))
line_number += 1
if problems:
if not input_api.is_committing:
return [output_api.PresubmitPromptWarning(_TEST_ONLY_WARNING, problems)]
else:
# We don't warn on commit, to avoid stopping commits going through CQ.
return [output_api.PresubmitNotifyResult(_TEST_ONLY_WARNING, problems)]
else:
return []
def _CheckNoIOStreamInHeaders(input_api, output_api):
"""Checks to make sure no .h files include <iostream>."""
files = []
pattern = input_api.re.compile(r'^#include\s*<iostream>',
input_api.re.MULTILINE)
for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
if not f.LocalPath().endswith('.h'):
continue
contents = input_api.ReadFile(f)
if pattern.search(contents):
files.append(f)
if len(files):
return [ output_api.PresubmitError(
'Do not #include <iostream> in header files, since it inserts static '
'initialization into every file including the header. Instead, '
'#include <ostream>. See http://crbug.com/94794',
files) ]
return []
def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
"""Checks to make sure no source files use UNIT_TEST"""
problems = []
for f in input_api.AffectedFiles():
if (not f.LocalPath().endswith(('.cc', '.mm'))):
continue
for line_num, line in f.ChangedContents():
if 'UNIT_TEST' in line:
problems.append(' %s:%d' % (f.LocalPath(), line_num))
if not problems:
return []
return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
'\n'.join(problems))]
def _CheckNoNewWStrings(input_api, output_api):
"""Checks to make sure we don't introduce use of wstrings."""
problems = []
for f in input_api.AffectedFiles():
if (not f.LocalPath().endswith(('.cc', '.h')) or
f.LocalPath().endswith('test.cc')):
continue
allowWString = False
for line_num, line in f.ChangedContents():
if 'presubmit: allow wstring' in line:
allowWString = True
elif not allowWString and 'wstring' in line:
problems.append(' %s:%d' % (f.LocalPath(), line_num))
allowWString = False
else:
allowWString = False
if not problems:
return []
return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
' If you are calling a cross-platform API that accepts a wstring, '
'fix the API.\n' +
'\n'.join(problems))]
def _CheckNoDEPSGIT(input_api, output_api):
"""Make sure .DEPS.git is never modified manually."""
if any(f.LocalPath().endswith('.DEPS.git') for f in
input_api.AffectedFiles()):
return [output_api.PresubmitError(
'Never commit changes to .DEPS.git. This file is maintained by an\n'
'automated system based on what\'s in DEPS and your changes will be\n'
'overwritten.\n'
'See http://code.google.com/p/chromium/wiki/UsingNewGit#Rolling_DEPS\n'
'for more information')]
return []
def _CheckNoBannedFunctions(input_api, output_api):
"""Make sure that banned functions are not used."""
warnings = []
errors = []
file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
for f in input_api.AffectedFiles(file_filter=file_filter):
for line_num, line in f.ChangedContents():
for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
if func_name in line:
problems = warnings;
if error:
problems = errors;
problems.append(' %s:%d:' % (f.LocalPath(), line_num))
for message_line in message:
problems.append(' %s' % message_line)
file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
for f in input_api.AffectedFiles(file_filter=file_filter):
for line_num, line in f.ChangedContents():
for func_name, message, error, excluded_paths in _BANNED_CPP_FUNCTIONS:
def IsBlacklisted(affected_file, blacklist):
local_path = affected_file.LocalPath()
for item in blacklist:
if input_api.re.match(item, local_path):
return True
return False
if IsBlacklisted(f, excluded_paths):
continue
if func_name in line:
problems = warnings;
if error:
problems = errors;
problems.append(' %s:%d:' % (f.LocalPath(), line_num))
for message_line in message:
problems.append(' %s' % message_line)
result = []
if (warnings):
result.append(output_api.PresubmitPromptWarning(
'Banned functions were used.\n' + '\n'.join(warnings)))
if (errors):
result.append(output_api.PresubmitError(
'Banned functions were used.\n' + '\n'.join(errors)))
return result
def _CheckNoPragmaOnce(input_api, output_api):
"""Make sure that banned functions are not used."""
files = []
pattern = input_api.re.compile(r'^#pragma\s+once',
input_api.re.MULTILINE)
for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
if not f.LocalPath().endswith('.h'):
continue
contents = input_api.ReadFile(f)
if pattern.search(contents):
files.append(f)
if files:
return [output_api.PresubmitError(
'Do not use #pragma once in header files.\n'
'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
files)]
return []
def _CheckNoTrinaryTrueFalse(input_api, output_api):
"""Checks to make sure we don't introduce use of foo ? true : false."""
problems = []
pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
for f in input_api.AffectedFiles():
if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
continue
for line_num, line in f.ChangedContents():
if pattern.match(line):
problems.append(' %s:%d' % (f.LocalPath(), line_num))
if not problems:
return []
return [output_api.PresubmitPromptWarning(
'Please consider avoiding the "? true : false" pattern if possible.\n' +
'\n'.join(problems))]
def _CheckUnwantedDependencies(input_api, output_api):
"""Runs checkdeps on #include statements added in this
change. Breaking - rules is an error, breaking ! rules is a
warning.
"""
# We need to wait until we have an input_api object and use this
# roundabout construct to import checkdeps because this file is
# eval-ed and thus doesn't have __file__.
original_sys_path = sys.path
try:
sys.path = sys.path + [input_api.os_path.join(
input_api.PresubmitLocalPath(), 'tools', 'checkdeps')]
import checkdeps
from cpp_checker import CppChecker
from rules import Rule
finally:
# Restore sys.path to what it was before.
sys.path = original_sys_path
added_includes = []
for f in input_api.AffectedFiles():
if not CppChecker.IsCppFile(f.LocalPath()):
continue
changed_lines = [line for line_num, line in f.ChangedContents()]
added_includes.append([f.LocalPath(), changed_lines])
deps_checker = checkdeps.DepsChecker()
error_descriptions = []
warning_descriptions = []
for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
added_includes):
description_with_path = '%s\n %s' % (path, rule_description)
if rule_type == Rule.DISALLOW:
error_descriptions.append(description_with_path)
else:
warning_descriptions.append(description_with_path)
results = []
if error_descriptions:
results.append(output_api.PresubmitError(
'You added one or more #includes that violate checkdeps rules.',
error_descriptions))
if warning_descriptions:
if not input_api.is_committing:
warning_factory = output_api.PresubmitPromptWarning
else:
# We don't want to block use of the CQ when there is a warning
# of this kind, so we only show a message when committing.
warning_factory = output_api.PresubmitNotifyResult
results.append(warning_factory(
'You added one or more #includes of files that are temporarily\n'
'allowed but being removed. Can you avoid introducing the\n'
'#include? See relevant DEPS file(s) for details and contacts.',
warning_descriptions))
return results
def _CheckFilePermissions(input_api, output_api):
"""Check that all files have their permissions properly set."""
args = [sys.executable, 'tools/checkperms/checkperms.py', '--root',
input_api.change.RepositoryRoot()]
for f in input_api.AffectedFiles():
args += ['--file', f.LocalPath()]
errors = []
(errors, stderrdata) = subprocess.Popen(args).communicate()
results = []
if errors:
results.append(output_api.PresubmitError('checkperms.py failed.',
errors))
return results
def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
"""Makes sure we don't include ui/aura/window_property.h
in header files.
"""
pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
errors = []
for f in input_api.AffectedFiles():
if not f.LocalPath().endswith('.h'):
continue
for line_num, line in f.ChangedContents():
if pattern.match(line):
errors.append(' %s:%d' % (f.LocalPath(), line_num))
results = []
if errors:
results.append(output_api.PresubmitError(
'Header files should not include ui/aura/window_property.h', errors))
return results
def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
"""Checks that the lines in scope occur in the right order.
1. C system files in alphabetical order
2. C++ system files in alphabetical order
3. Project's .h files
"""
c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
state = C_SYSTEM_INCLUDES
previous_line = ''
previous_line_num = 0
problem_linenums = []
for line_num, line in scope:
if c_system_include_pattern.match(line):
if state != C_SYSTEM_INCLUDES:
problem_linenums.append((line_num, previous_line_num))
elif previous_line and previous_line > line:
problem_linenums.append((line_num, previous_line_num))
elif cpp_system_include_pattern.match(line):
if state == C_SYSTEM_INCLUDES:
state = CPP_SYSTEM_INCLUDES
elif state == CUSTOM_INCLUDES:
problem_linenums.append((line_num, previous_line_num))
elif previous_line and previous_line > line:
problem_linenums.append((line_num, previous_line_num))
elif custom_include_pattern.match(line):
if state != CUSTOM_INCLUDES:
state = CUSTOM_INCLUDES
elif previous_line and previous_line > line:
problem_linenums.append((line_num, previous_line_num))
else:
problem_linenums.append(line_num)
previous_line = line
previous_line_num = line_num
warnings = []
for (line_num, previous_line_num) in problem_linenums:
if line_num in changed_linenums or previous_line_num in changed_linenums:
warnings.append(' %s:%d' % (file_path, line_num))
return warnings
def _CheckIncludeOrderInFile(input_api, f, changed_linenums):
"""Checks the #include order for the given file f."""
system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
# Exclude #include <.../...> includes from the check; e.g., <sys/...> includes
# often need to appear in a specific order.
excluded_include_pattern = input_api.re.compile(r'\s*#include \<.*/.*')
custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
if_pattern = input_api.re.compile(
r'\s*#\s*(if|elif|else|endif|define|undef).*')
# Some files need specialized order of includes; exclude such files from this
# check.
uncheckable_includes_pattern = input_api.re.compile(
r'\s*#include '
'("ipc/.*macros\.h"|<windows\.h>|".*gl.*autogen.h")\s*')
contents = f.NewContents()
warnings = []
line_num = 0
# Handle the special first include. If the first include file is
# some/path/file.h, the corresponding including file can be some/path/file.cc,
# some/other/path/file.cc, some/path/file_platform.cc, some/path/file-suffix.h
# etc. It's also possible that no special first include exists.
for line in contents:
line_num += 1
if system_include_pattern.match(line):
# No special first include -> process the line again along with normal
# includes.
line_num -= 1
break
match = custom_include_pattern.match(line)
if match:
match_dict = match.groupdict()
header_basename = input_api.os_path.basename(
match_dict['FILE']).replace('.h', '')
if header_basename not in input_api.os_path.basename(f.LocalPath()):
# No special first include -> process the line again along with normal
# includes.
line_num -= 1
break
# Split into scopes: Each region between #if and #endif is its own scope.
scopes = []
current_scope = []
for line in contents[line_num:]:
line_num += 1
if uncheckable_includes_pattern.match(line):
return []
if if_pattern.match(line):
scopes.append(current_scope)
current_scope = []
elif ((system_include_pattern.match(line) or
custom_include_pattern.match(line)) and
not excluded_include_pattern.match(line)):
current_scope.append((line_num, line))
scopes.append(current_scope)
for scope in scopes:
warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
changed_linenums))
return warnings
def _CheckIncludeOrder(input_api, output_api):
"""Checks that the #include order is correct.
1. The corresponding header for source files.
2. C system files in alphabetical order
3. C++ system files in alphabetical order
4. Project's .h files in alphabetical order
Each region separated by #if, #elif, #else, #endif, #define and #undef follows
these rules separately.
"""
warnings = []
for f in input_api.AffectedFiles():
if f.LocalPath().endswith(('.cc', '.h')):
changed_linenums = set(line_num for line_num, _ in f.ChangedContents())
warnings.extend(_CheckIncludeOrderInFile(input_api, f, changed_linenums))
results = []
if warnings:
if not input_api.is_committing:
results.append(output_api.PresubmitPromptWarning(_INCLUDE_ORDER_WARNING,
warnings))
else:
# We don't warn on commit, to avoid stopping commits going through CQ.
results.append(output_api.PresubmitNotifyResult(_INCLUDE_ORDER_WARNING,
warnings))
return results
def _CheckForVersionControlConflictsInFile(input_api, f):
pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
errors = []
for line_num, line in f.ChangedContents():
if pattern.match(line):
errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
return errors
def _CheckForVersionControlConflicts(input_api, output_api):
"""Usually this is not intentional and will cause a compile failure."""
errors = []
for f in input_api.AffectedFiles():
errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
results = []
if errors:
results.append(output_api.PresubmitError(
'Version control conflict markers found, please resolve.', errors))
return results
def _CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api):
def FilterFile(affected_file):
"""Filter function for use with input_api.AffectedSourceFiles,
below. This filters out everything except non-test files from
top-level directories that generally speaking should not hard-code
service URLs (e.g. src/android_webview/, src/content/ and others).
"""
return input_api.FilterSourceFile(
affected_file,
white_list=(r'^(android_webview|base|content|net)[\\\/].*', ),
black_list=(_EXCLUDED_PATHS +
_TEST_CODE_EXCLUDED_PATHS +
input_api.DEFAULT_BLACK_LIST))
pattern = input_api.re.compile('"[^"]*google\.com[^"]*"')
problems = [] # items are (filename, line_number, line)
for f in input_api.AffectedSourceFiles(FilterFile):
for line_num, line in f.ChangedContents():
if pattern.search(line):
problems.append((f.LocalPath(), line_num, line))
if problems:
if not input_api.is_committing:
warning_factory = output_api.PresubmitPromptWarning
else:
# We don't want to block use of the CQ when there is a warning
# of this kind, so we only show a message when committing.
warning_factory = output_api.PresubmitNotifyResult
return [warning_factory(
'Most layers below src/chrome/ should not hardcode service URLs.\n'
'Are you sure this is correct? (Contact: joi@chromium.org)',
[' %s:%d: %s' % (
problem[0], problem[1], problem[2]) for problem in problems])]
else:
return []
def _CheckNoAbbreviationInPngFileName(input_api, output_api):
"""Makes sure there are no abbreviations in the name of PNG files.
"""
pattern = input_api.re.compile(r'.*_[a-z]_.*\.png$|.*_[a-z]\.png$')
errors = []
for f in input_api.AffectedFiles(include_deletes=False):
if pattern.match(f.LocalPath()):
errors.append(' %s' % f.LocalPath())
results = []
if errors:
results.append(output_api.PresubmitError(
'The name of PNG files should not have abbreviations. \n'
'Use _hover.png, _center.png, instead of _h.png, _c.png.\n'
'Contact oshima@chromium.org if you have questions.', errors))
return results
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS))
results.extend(_CheckAuthorizedAuthor(input_api, output_api))
results.extend(
_CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
results.extend(_CheckNoNewWStrings(input_api, output_api))
results.extend(_CheckNoDEPSGIT(input_api, output_api))
results.extend(_CheckNoBannedFunctions(input_api, output_api))
results.extend(_CheckNoPragmaOnce(input_api, output_api))
results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
results.extend(_CheckUnwantedDependencies(input_api, output_api))
results.extend(_CheckFilePermissions(input_api, output_api))
results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
results.extend(_CheckIncludeOrder(input_api, output_api))
results.extend(_CheckForVersionControlConflicts(input_api, output_api))
results.extend(_CheckPatchFiles(input_api, output_api))
results.extend(_CheckHardcodedGoogleHostsInLowerLayers(input_api, output_api))
results.extend(_CheckNoAbbreviationInPngFileName(input_api, output_api))
if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
input_api, output_api,
input_api.PresubmitLocalPath(),
whitelist=[r'^PRESUBMIT_test\.py$']))
return results
def _CheckSubversionConfig(input_api, output_api):
"""Verifies the subversion config file is correctly setup.
Checks that autoprops are enabled, returns an error otherwise.
"""
join = input_api.os_path.join
if input_api.platform == 'win32':
appdata = input_api.environ.get('APPDATA', '')
if not appdata:
return [output_api.PresubmitError('%APPDATA% is not configured.')]
path = join(appdata, 'Subversion', 'config')
else:
home = input_api.environ.get('HOME', '')
if not home:
return [output_api.PresubmitError('$HOME is not configured.')]
path = join(home, '.subversion', 'config')
error_msg = (
'Please look at http://dev.chromium.org/developers/coding-style to\n'
'configure your subversion configuration file. This enables automatic\n'
'properties to simplify the project maintenance.\n'
'Pro-tip: just download and install\n'
'http://src.chromium.org/viewvc/chrome/trunk/tools/build/slave/config\n')
try:
lines = open(path, 'r').read().splitlines()
# Make sure auto-props is enabled and check for 2 Chromium standard
# auto-prop.
if (not '*.cc = svn:eol-style=LF' in lines or
not '*.pdf = svn:mime-type=application/pdf' in lines or
not 'enable-auto-props = yes' in lines):
return [
output_api.PresubmitNotifyResult(
'It looks like you have not configured your subversion config '
'file or it is not up-to-date.\n' + error_msg)
]
except (OSError, IOError):
return [
output_api.PresubmitNotifyResult(
'Can\'t find your subversion config file.\n' + error_msg)
]
return []
def _CheckAuthorizedAuthor(input_api, output_api):
"""For non-googler/chromites committers, verify the author's email address is
in AUTHORS.
"""
# TODO(maruel): Add it to input_api?
import fnmatch
author = input_api.change.author_email
if not author:
input_api.logging.info('No author, skipping AUTHOR check')
return []
authors_path = input_api.os_path.join(
input_api.PresubmitLocalPath(), 'AUTHORS')
valid_authors = (
input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
for line in open(authors_path))
valid_authors = [item.group(1).lower() for item in valid_authors if item]
if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
input_api.logging.info('Valid authors are %s', ', '.join(valid_authors))
return [output_api.PresubmitPromptWarning(
('%s is not in AUTHORS file. If you are a new contributor, please visit'
'\n'
'http://www.chromium.org/developers/contributing-code and read the '
'"Legal" section\n'
'If you are a chromite, verify the contributor signed the CLA.') %
author)]
return []
def _CheckPatchFiles(input_api, output_api):
problems = [f.LocalPath() for f in input_api.AffectedFiles()
if f.LocalPath().endswith(('.orig', '.rej'))]
if problems:
return [output_api.PresubmitError(
"Don't commit .rej and .orig files.", problems)]
else:
return []
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
# TODO(thestig) temporarily disabled, doesn't work in third_party/
#results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
# input_api, output_api, sources))
# Make sure the tree is 'open'.
results.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api,
output_api,
json_url='http://chromium-status.appspot.com/current?format=json'))
results.extend(input_api.canned_checks.CheckRietveldTryJobExecution(input_api,
output_api, 'http://codereview.chromium.org',
('win_rel', 'linux_rel', 'mac_rel, win:compile'),
'tryserver@chromium.org'))
results.extend(input_api.canned_checks.CheckChangeHasBugField(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
results.extend(_CheckSubversionConfig(input_api, output_api))
return results
def GetPreferredTrySlaves(project, change):
files = change.LocalPaths()
if not files or all(re.search(r'[\\/]OWNERS$', f) for f in files):
return []
if all(re.search('\.(m|mm)$|(^|[/_])mac[/_.]', f) for f in files):
return ['mac_rel', 'mac_asan', 'mac:compile']
if all(re.search('(^|[/_])win[/_.]', f) for f in files):
return ['win_rel', 'win7_aura', 'win:compile']
if all(re.search('(^|[/_])android[/_.]', f) for f in files):
return ['android_dbg', 'android_clang_dbg']
if all(re.search('^native_client_sdk', f) for f in files):
return ['linux_nacl_sdk', 'win_nacl_sdk', 'mac_nacl_sdk']
if all(re.search('[/_]ios[/_.]', f) for f in files):
return ['ios_rel_device', 'ios_dbg_simulator']
trybots = [
'android_clang_dbg',
'android_dbg',
'ios_dbg_simulator',
'ios_rel_device',
'linux_asan',
'linux_aura',
'linux_chromeos',
'linux_clang:compile',
'linux_rel',
'mac_asan',
'mac_rel',
'mac:compile',
'win7_aura',
'win_rel',
'win:compile',
]
# Match things like path/aura/file.cc and path/file_aura.cc.
# Same for chromeos.
if any(re.search('[/_](aura|chromeos)', f) for f in files):
trybots += ['linux_chromeos_clang:compile', 'linux_chromeos_asan']
return trybots
| {
"content_hash": "56355c41a2956ac3e00f623e14a331f3",
"timestamp": "",
"source": "github",
"line_count": 875,
"max_line_length": 80,
"avg_line_length": 36.369142857142855,
"alnum_prop": 0.650724318888854,
"repo_name": "timopulkkinen/BubbleFish",
"id": "a5ea9e45b442405f8db7a086ab7218c239effde2",
"size": "31990",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PRESUBMIT.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "853"
},
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "1174304"
},
{
"name": "Awk",
"bytes": "9519"
},
{
"name": "C",
"bytes": "75801820"
},
{
"name": "C#",
"bytes": "1132"
},
{
"name": "C++",
"bytes": "161884021"
},
{
"name": "DOT",
"bytes": "1559"
},
{
"name": "F#",
"bytes": "381"
},
{
"name": "Java",
"bytes": "3531849"
},
{
"name": "JavaScript",
"bytes": "18556005"
},
{
"name": "Logos",
"bytes": "4517"
},
{
"name": "Matlab",
"bytes": "5234"
},
{
"name": "Objective-C",
"bytes": "7254742"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "Perl",
"bytes": "933011"
},
{
"name": "Python",
"bytes": "8808682"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Ragel in Ruby Host",
"bytes": "3621"
},
{
"name": "Shell",
"bytes": "1537764"
},
{
"name": "Tcl",
"bytes": "277077"
},
{
"name": "XML",
"bytes": "13493"
}
],
"symlink_target": ""
} |
"""Robotiq 2-finger 85 adaptive gripper class."""
from typing import List, Tuple, Optional
from dm_control import mjcf
from dm_robotics.moma.models import types
from dm_robotics.moma.models import utils as models_utils
from dm_robotics.moma.models.end_effectors.robot_hands import robot_hand
from dm_robotics.moma.models.end_effectors.robot_hands import robotiq_2f85_constants as consts
import numpy as np
_GRIPPER_SITE_NAME = 'pinch_site'
_ACTUATOR_NAME = 'fingers_actuator'
_SENSOR_NAME = 'fingers_pos'
_JOINT_NAME = 'left_driver_joint'
COLLISION_CLASS = 'reinforced_fingertip'
_PAD_GEOM_NAMES = [
'reinforced_right_fingertip_geom',
'reinforced_left_fingertip_geom',
]
_PAD_COLOR = (1., 1., 1., 1.)
_POS_SCALE = 255.
_VELOCITY_CTRL_TOL = 2
_DEFAULT_GRIPPER_FRICTION = (0.5, 0.1, 0.01)
_LEGACY_GRIPPER_FRICTION = (1.5, 0.1, 0.001)
# The torque tau that is applied to the actuator is:
# tau = gainprm[0] * act + bias[1] * q + bias[2] * q_dot
# where `act` is the current target reference of the actuator,
# `q` is the current position of the joint and `q_dot` is the joint velocity.
# `q_dot` is used for damping but we add it to the joint instead of the
# actuator. This has to do with the underlying simulation as joint damping is
# more stable.
# The control range of the actuator is [0, 255] but the actual joint range is
# [0 0.8] so we need to ensure that there is a mapping that is done from one
# range to the other. We want an effective torque of bias[0] Nm when the
# actuator is fully closed. Therefore we have the following equality:
# gainprm[0] * 255 + x * 0.8 = 0
# gainprm[0] = - bias[0] * 0.8 / 255
_GAINPRM = (100. * 0.8 / 255, 0.0, 0.0)
_BIASPRM = (0.0, -100, 0.0)
_BASE_COLLISION_KWARGS = [{
'name': 'base_CollisionGeom_1',
'type': 'cylinder',
'pos': '0 0 0.01',
'size': '0.04 0.024',
}, {
'name': 'base_CollisionGeom_2',
'type': 'sphere',
'pos': '0 0 0.05',
'size': '0.045',
}]
_RIGHT_DRIVER_COLLISION_KWARGS = [{
'name': 'right_driver_CollisionGeom',
'type': 'capsule',
'fromto': '0 0 0 0 0.027 0.0018',
'size': '0.015',
}]
_RIGHT_COUPLER_COLLISION_KWARGS = [{
'name': 'right_coupler_CollisionGeom',
'type': 'capsule',
'fromto': '0 0 0 0 0.005 0.045',
'size': '0.015',
}]
_RIGHT_SPRING_LINK_COLLISION_KWARGS = [{
'name': 'right_spring_link_CollisionGeom',
'type': 'capsule',
'fromto': '0 0 0 0 0.031 0.036',
'size': '0.02',
}]
_RIGHT_FOLLOWER_COLLISION_KWARGS = [{
'name': 'right_follower_CollisionGeom',
'type': 'sphere',
'pos': '0 -0.01 0.005',
'size': '0.015',
}]
_LEFT_DRIVER_COLLISION_KWARGS = [{
'name': 'left_driver_CollisionGeom',
'type': 'capsule',
'fromto': '0 0 0 0 0.027 0.0018',
'size': '0.015',
}]
_LEFT_COUPLER_COLLISION_KWARGS = [{
'name': 'left_coupler_CollisionGeom',
'type': 'capsule',
'fromto': '0 0 0 0 0.005 0.045',
'size': '0.015',
}]
_LEFT_SPRING_LINK_COLLISION_KWARGS = [{
'name': 'left_spring_link_CollisionGeom',
'type': 'capsule',
'fromto': '0 0 0 0 0.031 0.036',
'size': '0.02',
}]
_LEFT_FOLLOWER_COLLISION_KWARGS = [{
'name': 'left_follower_CollisionGeom',
'type': 'sphere',
'pos': '0 -0.01 0.005',
'size': '0.015',
}]
_RIGHT_PAD_COLLISION_KWARGS = [{
'name': 'right_pad_CollisionGeom',
'type': 'box',
'pos': '0 0.004 0.019',
'size': '0.012 0.01 0.019',
}]
_LEFT_PAD_COLLISION_KWARGS = [{
'name': 'left_pad_CollisionGeom',
'type': 'box',
'pos': '0 0.004 0.019',
'size': '0.012 0.01 0.019',
}]
# Dictionary mapping body names to a list of their collision geoms
_COLLISION_GEOMS_DICT = {
'base': _BASE_COLLISION_KWARGS,
'right_driver': _RIGHT_DRIVER_COLLISION_KWARGS,
'right_coupler': _RIGHT_COUPLER_COLLISION_KWARGS,
'right_spring_link': _RIGHT_SPRING_LINK_COLLISION_KWARGS,
'right_follower': _RIGHT_FOLLOWER_COLLISION_KWARGS,
'left_driver': _LEFT_DRIVER_COLLISION_KWARGS,
'left_coupler': _LEFT_COUPLER_COLLISION_KWARGS,
'left_spring_link': _LEFT_SPRING_LINK_COLLISION_KWARGS,
'left_follower': _LEFT_FOLLOWER_COLLISION_KWARGS,
'right_pad': _RIGHT_PAD_COLLISION_KWARGS,
'left_pad': _LEFT_PAD_COLLISION_KWARGS,
}
class Robotiq2F85(robot_hand.RobotHand):
"""Robotiq 2-finger 85 adaptive gripper."""
_mjcf_root: mjcf.RootElement
def _build(self,
name: str = 'robotiq_2f85',
gainprm: Tuple[float, float, float] = _GAINPRM,
biasprm: Tuple[float, float, float] = _BIASPRM,
tcp_orientation: Optional[np.ndarray] = None,
use_realistic_friction: bool = True):
"""Initializes the Robotiq 2-finger 85 gripper.
Args:
name: The name of this robot. Used as a prefix in the MJCF name
gainprm: The gainprm of the finger actuator.
biasprm: The biasprm of the finger actuator.
tcp_orientation: Quaternion [w, x, y, z] representing the orientation of
the tcp frame of the gripper. This is needed for compatibility between
sim and real. This depends on which robot is being used so we need it to
be parametrizable. If None, use the original tcp site.
use_realistic_friction: If true will use friction parameters which result
in a more realistic. Should only be set to False for backwards
compatibility.
"""
self._mjcf_root = mjcf.from_path(consts.XML_PATH)
self._mjcf_root.model = name
# If the user provided a quaternion, rotate the tcp site. Otherwise use the
# default one.
if tcp_orientation is not None:
gripper_base = self.mjcf_model.find('body', 'base')
gripper_base.add(
'site',
type='sphere',
name='aligned_gripper_tcp',
pos=consts.TCP_SITE_POS,
quat=tcp_orientation)
self._tool_center_point = self.mjcf_model.find(
'site', 'aligned_gripper_tcp')
else:
self._tool_center_point = self._mjcf_root.find('site', _GRIPPER_SITE_NAME)
self._finger_actuator = self._mjcf_root.find('actuator', _ACTUATOR_NAME)
self._joint_sensor = self._mjcf_root.find('sensor', _SENSOR_NAME)
self._joints = [self._mjcf_root.find('joint', _JOINT_NAME)]
# Use integrated velocity control.
self._define_integrated_velocity_actuator(gainprm, biasprm)
# Cache the limits for the finger joint.
joint_min, joint_max = self._finger_actuator.tendon.joint[
0].joint.dclass.joint.range
self._joint_offset = joint_min
self._joint_scale = joint_max - joint_min
self._color_pads()
self._add_collision_geoms()
self._add_collisions_boxes()
self._set_physics_properties(use_realistic_friction)
def _set_physics_properties(self, use_realistic_friction: bool):
"""Set physics related properties."""
# Set collision and friction parameter to the same values as in the jaco
# hand - as we know they work very stable.
padbox_class = self._mjcf_root.find('default', COLLISION_CLASS)
if use_realistic_friction:
padbox_class.geom.friction = _DEFAULT_GRIPPER_FRICTION
else:
padbox_class.geom.friction = _LEGACY_GRIPPER_FRICTION
# These values of solimp and solref have been tested at different physics
# timesteps [5e-4, 5e-3]. They make the gripper less stiff and allow a
# proper object-gripper interaction at larger physics timestep.
padbox_class.geom.solimp = (0.9, 0.95, 0.001, 0.01, 2)
padbox_class.geom.solref = (-30000, -200)
# Adapt spring link stiffness to allow proper initialisation and more
# realistic behaviour. The original value will cause one of the links
# to get stuck in a bent position after initialisation sometimes.
spring_link_class = self._mjcf_root.find('default', 'spring_link')
spring_link_class.joint.stiffness = 0.01
# Adapt the driver joint to make movement of gripper slower, similar to the
# real hardware.
driver_class = self._mjcf_root.find('default', 'driver')
driver_class.joint.armature = 0.1
# Add in the damping on the joint level instead of on the actuator level
# this results in a more stable damping.
driver_class.joint.damping = 1
def _add_collisions_boxes(self):
"""Adds two boxes to each of the fingertips to improve physics stability."""
for side in ('left', 'right'):
pad = self._mjcf_root.find('body', f'{side}_pad')
pad.add(
'geom',
name=f'{side}_collision_box1',
dclass='reinforced_fingertip',
size=[0.007, 0.0021575, 0.005],
type='box',
rgba=[0.0, 0.0, 0.0, 0.0],
pos=[0.0, 0.0117, 0.03])
pad.add(
'geom',
name=f'{side}_collision_box2',
dclass='reinforced_fingertip',
size=[0.007, 0.0021575, 0.005],
type='box',
rgba=[0.0, 0.0, 0.0, 0.0],
pos=[0.0, 0.0117, 0.015])
def _add_collision_geoms(self):
"""Add collision geoms use by the QP velocity controller for avoidance."""
self._collision_geoms = models_utils.attach_collision_geoms(
self.mjcf_model, _COLLISION_GEOMS_DICT)
def _color_pads(self) -> None:
"""Define the color for the gripper pads."""
for geom_name in _PAD_GEOM_NAMES:
geom = self._mjcf_root.find('geom', geom_name)
geom.rgba = _PAD_COLOR
def _define_integrated_velocity_actuator(self,
gainprm: Tuple[float, float, float],
biasprm: Tuple[float, float, float]):
"""Define integrated velocity actuator."""
self._finger_actuator.ctrlrange = (-255.0, 255.0)
self._finger_actuator.dyntype = 'integrator'
self._finger_actuator.gainprm = gainprm
self._finger_actuator.biasprm = biasprm
def initialize_episode(self, physics: mjcf.Physics,
random_state: np.random.RandomState):
"""Function called at the beginning of every episode."""
del random_state # Unused.
# Apply gravity compensation
body_elements = self.mjcf_model.find_all('body')
gravity = np.hstack([physics.model.opt.gravity, [0, 0, 0]])
physics_bodies = physics.bind(body_elements)
if physics_bodies is None:
raise ValueError('Calling physics.bind with bodies returns None.')
physics_bodies.xfrc_applied[:] = -gravity * physics_bodies.mass[..., None]
@property
def joints(self) -> List[types.MjcfElement]:
"""List of joint elements belonging to the hand."""
if not self._joints:
raise AttributeError('Robot joints is None.')
return self._joints
@property
def actuators(self) -> List[types.MjcfElement]:
"""List of actuator elements belonging to the hand."""
if not self._finger_actuator:
raise AttributeError('Robot actuators is None.')
return [self._finger_actuator]
@property
def mjcf_model(self) -> mjcf.RootElement:
"""Returns the `mjcf.RootElement` object corresponding to the robot hand."""
if not self._mjcf_root:
raise AttributeError('Robot mjcf_root is None.')
return self._mjcf_root
@property
def name(self) -> str:
"""Name of the robot hand."""
return self.mjcf_model.model
@property
def tool_center_point(self) -> types.MjcfElement:
"""Tool center point site of the hand."""
return self._tool_center_point
@property
def joint_sensor(self) -> types.MjcfElement:
"""Joint sensor of the hand."""
return self._joint_sensor
def after_substep(self, physics: mjcf.Physics,
random_state: np.random.RandomState) -> None:
"""A callback which is executed after a simulation step.
This function is necessary when using the integrated velocity mujoco
actuator. Mujoco will limit the incoming velocity but the hidden state of
the integrated velocity actuators must be clipped to the actuation range.
Args:
physics: An instance of `mjcf.Physics`.
random_state: An instance of `np.random.RandomState`.
"""
del random_state # Unused.
# Clip the actuator.act with the actuator limits.
physics_actuators = models_utils.binding(physics, self.actuators)
physics_actuators.act[:] = np.clip(
physics_actuators.act[:],
a_min=0.,
a_max=255.)
def convert_position(self, position, **unused_kwargs):
"""Converts raw joint position to sensor output."""
normed_pos = (position - self._joint_offset) / self._joint_scale # [0, 1]
rescaled_pos = np.clip(normed_pos * _POS_SCALE, 0, _POS_SCALE)
return np.round(rescaled_pos)
def grasp_sensor_callable(self, physics) -> int:
"""Simulate the robot's gOBJ object detection flag."""
# No grasp when no collision.
collision_geoms_colliding = _are_all_collision_geoms_colliding(
physics, self.mjcf_model)
if not collision_geoms_colliding:
return consts.NO_GRASP
# No grasp when no velocity ctrl command.
desired_vel = physics.bind(self.actuators[0]).ctrl
if np.abs(desired_vel) < _VELOCITY_CTRL_TOL:
return consts.NO_GRASP
# If ctrl is positive, the gripper is closing. Hence, inward grasp.
if desired_vel > 0:
return consts.INWARD_GRASP
else:
return consts.OUTWARD_GRASP
@property
def collision_geom_group(self):
collision_geom_group = [
geom.full_identifier for geom in self._collision_geoms
]
return collision_geom_group
def _is_geom_in_collision(physics: mjcf.Physics,
geom_name: str,
geom_exceptions: Optional[List[str]] = None) -> bool:
"""Returns true if a geom is in collision in the physics object."""
for contact in physics.data.contact:
geom1_name = physics.model.id2name(contact.geom1, 'geom')
geom2_name = physics.model.id2name(contact.geom2, 'geom')
if contact.dist > 1e-8:
continue
if (geom1_name == geom_name and geom2_name not in geom_exceptions) or (
geom2_name == geom_name and geom1_name not in geom_exceptions):
return True
return False
def _are_all_collision_geoms_colliding(physics: mjcf.Physics,
mjcf_root: mjcf.RootElement) -> bool:
"""Returns true if the collision geoms in the model are colliding."""
collision_geoms = [
mjcf_root.find('geom', name).full_identifier
for name in _PAD_GEOM_NAMES
]
return all([
_is_geom_in_collision(physics, geom, collision_geoms)
for geom in collision_geoms
])
| {
"content_hash": "1e23ab0cdd1c278768cc74509dba71e0",
"timestamp": "",
"source": "github",
"line_count": 393,
"max_line_length": 94,
"avg_line_length": 36.73791348600509,
"alnum_prop": 0.6481507133952071,
"repo_name": "deepmind/dm_robotics",
"id": "263f73c5f57c9cb4d91751602dcd655fa151d739",
"size": "15034",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "py/moma/models/end_effectors/robot_hands/robotiq_2f85.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "479450"
},
{
"name": "CMake",
"bytes": "34173"
},
{
"name": "Jupyter Notebook",
"bytes": "106284"
},
{
"name": "Python",
"bytes": "1413203"
},
{
"name": "Shell",
"bytes": "3244"
}
],
"symlink_target": ""
} |
from rest_framework import serializers
from rest_framework.authtoken.models import Token
from chronos.settings import MEDIA_URL
from urlparse import urlparse
import app
##############################
# --------- Users! --------- #
##############################
class ChronosPublicUserSerializer(serializers.ModelSerializer):
class Meta:
model = app.models.ChronosUser
fields = ('id', 'username',)
class SimpleEventSerializer(serializers.ModelSerializer):
class Meta:
model = app.models.Events
fields = ('id', 'name',)
class EventIdSerializer(serializers.ModelSerializer):
class Meta:
model = app.models.Events
fields = ('id',)
class SimpleVoteSerializer(serializers.ModelSerializer):
event = EventIdSerializer()
direction = serializers.IntegerField(min_value=-1, max_value=1)
class Meta:
model = app.models.Vote
fields = ('event', 'direction',)
class ChronosUserSerializer(serializers.ModelSerializer):
saved_events = EventIdSerializer(many=True)
reported_events = serializers.SerializerMethodField()
voted_events = serializers.SerializerMethodField()
class Meta:
model = app.models.ChronosUser
fields = ('id', 'first_name', 'last_name', 'username', 'email', 'userType', 'place_id', 'place_name', 'saved_events', 'reported_events', 'voted_events',)
def get_reported_events(self, obj):
return [SimpleReportSerializer(report).data for report in app.models.Reports.objects.filter(user=obj.id)]
def get_voted_events(self, obj):
return [SimpleVoteSerializer(v).data for v in app.models.Vote.objects.filter(user=obj.id)]
class ChronosUserRegisterSerializer(serializers.ModelSerializer):
class Meta:
model = app.models.ChronosUser
fields = ('id', 'username', 'password', 'email', 'first_name', 'last_name', 'userType', 'place_id', 'place_name')
def __init__(self, *args, **kwargs):
fields = kwargs.pop('fields', None)
super(serializers.ModelSerializer, self).__init__(*args, **kwargs)
if fields:
allowed = set(fields)
existing = set(self.fields.keys())
for field_name in existing - allowed:
self.fields.pop(field_name)
def create(self, validated_data):
user = app.models.ChronosUser.objects.create(**validated_data)
user.set_password(validated_data['password'])
user.save()
token, created = Token.objects.get_or_create(user=user)
return token, created, user
def validate_username(self, value):
"""
Ensure that the username doesn't already exist
"""
if app.models.ChronosUser.objects.filter(username=value).exists():
raise serializers.ValidationError("Username already exists")
return value
def validate_email(self, value):
"""
Ensure that the email doesn't already exist
"""
if app.models.ChronosUser.objects.filter(email=value).exists():
raise serializers.ValidationError("Email already exists")
return value
class ChronosUserUpdateSerializer(serializers.ModelSerializer):
password = serializers.CharField(max_length=128, required=False)
class Meta:
model = app.models.ChronosUser
fields = ('id', 'password', 'email', 'first_name', 'last_name', 'place_id', 'place_name')
def validate_email(self, value):
"""
Ensure that the email doesn't already exist
"""
if app.models.ChronosUser.objects.filter(email=value).exists():
raise serializers.ValidationError("Email already exists")
return value
def update(self, instance, validated_data):
if validated_data.get('password'):
instance.set_password('password')
instance.email = validated_data.get('email', instance.email)
instance.first_name = validated_data.get('first_name', instance.first_name)
instance.last_name = validated_data.get('last_name', instance.last_name)
instance.place_id = validated_data.get('place_id', instance.place_id)
instance.place_name = validated_data.get('place_name', instance.place_name)
instance.save()
return instance
##############################
# --------- Other! --------- #
##############################
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = app.models.Tag
fields = ('name','usage')
class TagEventSerializer(serializers.Serializer):
name = serializers.CharField(max_length=100)
class ImageUrlField(serializers.ImageField):
"""
Django Rest Framework returns the url to the image, but it assumes that
the called url is the top level url. So, it will append the MEDIA_URL to the
caller url rather than the top level domain as expected. This is due to the way
that request.build_absolute_uri works in Django at the moment.
To fix this, we need to rip out the top level domain, and rebuild the url the way
we want. To rip out the top level domain, I found the following code on
stack overflow: http://stackoverflow.com/questions/9626535/get-domain-name-from-url
"""
def to_representation(self, value):
invalid_url = super(serializers.ImageField, self).to_representation(value)
parsed_uri = urlparse(invalid_url)
domain = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri)
return domain + MEDIA_URL + str(value)
class ImageReadSeralizer(serializers.ModelSerializer):
image = ImageUrlField()
class Meta:
model = app.models.Image
fields = ('image', )
#https://medium.com/@jxstanford/django-rest-framework-file-upload-e4bc8de669c0
class ImageWriteSerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.SlugRelatedField(read_only=True, slug_field='id')
image = ImageUrlField()
class Meta:
model = app.models.Image
fields = ('id', 'created', 'image', 'owner',)
##############################
# --------- Events! -------- #
##############################
class EventWriteSerializer(serializers.ModelSerializer):
tags = TagEventSerializer(many=True, required=False)
class Meta:
model = app.models.Events
fields = ('id', 'name', 'description', 'creator', 'picture', "create_date", "edit_date" , "start_date", "end_date", "report", "is_deleted", "place_id", "place_name", "tags")
def __init__(self, *args, **kwargs):
fields = kwargs.pop('fields', None)
super(serializers.ModelSerializer, self).__init__(*args, **kwargs)
if fields:
allowed = set(fields)
existing = set(self.fields.keys())
for field_name in existing - allowed:
self.fields.pop(field_name)
def validate(self, data):
if data['start_date'] >= data['end_date']:
raise serializers.ValidationError('Start date is greater than the end date');
return data
def create(self, validated_data):
# Tags is a many to may field in the Event model, and therefore cannot be created through the objects.create method
tags = validated_data["tags"]
validated_data.pop("tags", None)
event = app.models.Events.objects.create(**validated_data)
# Get all the tags that already exist
tag_names = [tag["name"] for tag in tags]
existing_tag_queryset = app.models.Tag.objects.filter(name__in=tag_names)
for tag in existing_tag_queryset:
tag.usage += 1
tag.save()
# Get all the tags that don't exist in the DB yet, and create them in bulk
missing_tag_names = filter(lambda x: x not in [e.name for e in existing_tag_queryset], tag_names)
#TODO: Attempt to get the bulk create working. It is inefficient to create in a list like this. The problem is that
# bulk_create will not call save, meaning all newly created Tags will not be in the database quite yet
#missing_tags = app.models.Tag.objects.bulk_create([app.models.Tag(name=missing_tag_name) for missing_tag_name in missing_tag_names])
missing_tags = [app.models.Tag.objects.create(name=missing_tag_name) for missing_tag_name in missing_tag_names]
# Add the tag object to our newly created event
tag_objs = list(existing_tag_queryset) + missing_tags
for tag_obj in tag_objs:
event.tags.add(tag_obj)
return event
def update(self, instance, validated_data):
"""
Only update the fields that are necessary
"""
instance.name = validated_data.get('name', instance.name)
instance.description = validated_data.get('description', instance.description)
instance.picture = validated_data.get('picture', instance.picture)
instance.start_date = validated_data.get('start_date', instance.start_date)
instance.end_date = validated_data.get('end_date', instance.end_date)
instance.edit_date = validated_data.get('edit_date', instance.edit_date)
instance.place_id = validated_data.get('place_id', instance.place_id)
instance.save()
return instance
class EventReadSerializer(serializers.ModelSerializer):
"""
This is because it's almost impossible to pull all the information about a tag only when doing a read operation
using one central event serializer. It became necessary to split them up into a write serializer, and a read serializer to
allow the writing of events using only the foreign keys of tags.
"""
tags = TagSerializer(many=True)
vote = serializers.SerializerMethodField()
creator = ChronosPublicUserSerializer()
picture = ImageReadSeralizer()
class Meta:
model = app.models.Events
fields = ('id', 'name', 'description', 'creator', "create_date", "edit_date" , "start_date", "end_date", "vote", "upvote", "downvote", "report", "is_deleted", "picture", "place_id", "place_name", "tags")
def get_vote(self, obj):
return obj.upvote - obj.downvote
class VoteEventSerializer(serializers.Serializer):
direction = serializers.IntegerField(min_value=-1, max_value=1)
class Meta:
model = app.models.Vote
fields = ('direction', 'event', 'user')
def create(self, validated_data):
vote = app.models.Vote.objects.create(**validated_data)
vote.save()
event = validated_data['event']
direction = validated_data['direction']
if direction == 1:
event.upvote += 1
elif direction == -1:
event.downvote += 1
event.save()
return vote
def update(self, instance, validated_data):
direction = validated_data.get('direction')
event = validated_data['event']
if direction is not None and instance.direction != direction:
if instance.direction == 1:
event.upvote -= 1
elif instance.direction == -1:
event.downvote -= 1
if direction == 1:
event.upvote += 1
elif direction == -1:
event.downvote += 1
event.save()
instance.direction = direction
instance.save()
return instance
class ReportEventSerializer(serializers.Serializer):
reason = serializers.CharField(max_length=20, required=True)
class Meta:
model = app.models.Reports
fields = ('reason', 'event', 'user',)
def create(self, validated_data):
report = app.models.Reports.objects.create(**validated_data)
report.save()
return report
def update(self, instance, validated_data):
reason = validated_data.get('reason')
if reason is not None and instance.reason != reason:
instance.reason = reason
instance.save()
return instance
class SimpleReportSerializer(serializers.ModelSerializer):
class Meta:
model = app.models.Reports
fields = ('reason', 'event')
##############################
# --------- Comments! ------ #
##############################
class RecursiveField(serializers.Serializer):
def to_representation(self, value):
serializer = self.parent.parent.__class__(value, context=self.context)
return serializer.data
class CommentReadSerializer(serializers.ModelSerializer):
children = RecursiveField(many=True)
user = ChronosPublicUserSerializer()
class Meta:
model = app.models.Comments
fields = ('id', 'content', 'event', 'user', 'date', 'depth', 'path', 'children')
class CommentWriteSerializer(serializers.ModelSerializer):
class Meta:
model = app.models.Comments
fields = ('id', 'content', 'event', 'user', 'date', 'depth', 'path', 'parent')
def __init__(self, *args, **kwargs):
fields = kwargs.pop('fields', None)
super(serializers.ModelSerializer, self).__init__(*args, **kwargs)
if fields:
allowed = set(fields)
existing = set(self.fields.keys())
for field_name in existing - allowed:
self.fields.pop(field_name)
def create(self, validated_data):
comment = app.models.Comments.objects.create(**validated_data)
comment.save()
return comment
| {
"content_hash": "7a8f3f7305399e57644bccbdfab16d6c",
"timestamp": "",
"source": "github",
"line_count": 344,
"max_line_length": 211,
"avg_line_length": 38.81395348837209,
"alnum_prop": 0.6358597962852007,
"repo_name": "paulmoon/chronos",
"id": "a34e6199f26bd5f03084a12d27763528c8936df2",
"size": "13352",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/chronos/app/serializers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "24139"
},
{
"name": "CSS",
"bytes": "23956"
},
{
"name": "HTML",
"bytes": "53760"
},
{
"name": "JavaScript",
"bytes": "130515"
},
{
"name": "Python",
"bytes": "51754"
}
],
"symlink_target": ""
} |
"""
Computes auto-correlation function for irregularly sampled time series.
Uses the method proposed in:
Rehfeld, K., Marwan, N., Heitzig, J., & Kurths, J. (2011). Comparison of correlation analysis techniques for irregularly sampled time series. Nonlinear Processes in Geophysics, 18(3), 389-404.
This script provides analyses for this publication:
J.F. Donges, R.V. Donner, N. Marwan, S.F.M. Breitenbach, K. Rehfeld, and J. Kurths,
Nonlinear regime shifts in Holocene Asian monsoon variability: Potential impacts on cultural change and migratory patterns,
Climate of the Past 11, 709-741 (2015),
DOI: 10.5194/cp-11-709-2015
"""
#
# Imports
#
import sys
import numpy as np
import pylab
import progressbar
#
# Settings
#
# Filename
FILENAME_X = "../../data/raw_proxy_data/Dongge_DA.dat"
# Resolution of cross-correlation (units of time)
DELTA_LAG = 10 # Measured in years here
# Maximum lag index
MAX_LAG_INDEX = 100
# Toggle detrending
DETRENDING = True
DETRENDING_WINDOW_SIZE = 1000. # Measured in years here
#
# Functions
#
def detrend_time_series(time, data, window_size):
# Get length of data array
n = data.shape[0]
# Initialize a local copy of data array
detrended_data = np.empty(n)
# Detrend data
for j in xrange(n):
# Get lower and upper bound of window in time domain
lower_bound = time[j] - window_size / 2.
upper_bound = time[j] + window_size / 2.
# Get time indices lying within the window
window_indices = np.logical_and(time >= lower_bound, time <= upper_bound)
# Substract window mean from data point in the center
detrended_data[j] = data[j] - data[window_indices].mean()
return detrended_data
def gaussian(x, std):
"""
Returns value of gaussian distribution at x with 0 mean
and standard deviation std.
"""
return 1 / np.sqrt(2 * np.pi * std) * np.exp(-np.abs(x ** 2) / (2 * std**2) )
def kernel_auto_correlation_est(x, time_diff, kernel_func, kernel_param,
delta_lag, max_lag_index):
"""
Estimates auto correlation using a kernel function.
"""
# Normalize time series
x -= x.mean()
x /= x.std()
# Initialize discrete auto-correlation function
auto_correlation = np.zeros(max_lag_index + 1)
# Loop over all positive lags and zero lag
for k in xrange(max_lag_index + 1):
# Calculate b matrix
b = kernel_func(k * delta_lag - time_diff, kernel_param)
# Calculate nominator
nominator = np.dot(x, np.dot(b, x.transpose()))
# Calculate denominator
denominator = b.sum()
# Calculate auto-correlation
auto_correlation[k] = nominator / denominator
lag_times = delta_lag * np.arange(max_lag_index + 1)
return (lag_times, auto_correlation)
#
# Main script
#
# Load record x
data_x = np.loadtxt(FILENAME_X, unpack=False, usecols=(0,1,), comments="#")
#data_x = np.fromfile(FILENAME_X, sep=" ")
time_x = data_x[:,0]
x = data_x[:,1]
# Detrending of time series using moving window averages
if DETRENDING:
x = detrend_time_series(time_x, x, DETRENDING_WINDOW_SIZE)
# Get length of records
N_x = len(time_x)
# Get recommended standard deviation of gaussian Kernel (Kira Rehfeld's
# NPG paper)
sigma = 0.25 * np.diff(time_x).mean()
print "Length of record x:", N_x
print "Mean sampling time x:", np.diff(time_x).mean()
print "Recommended standard deviation of gaussian Kernel:", sigma
# Calculate matrix of time differences
time_diff = np.zeros((N_x, N_x))
for i in xrange(N_x):
for j in xrange(N_x):
time_diff[i,j] = time_x[i] - time_x[j]
# Estimate auto-correlation function
(lag_times, auto_correlation) = kernel_auto_correlation_est(x=x.copy(), time_diff=time_diff, kernel_func=gaussian, kernel_param=sigma, delta_lag=DELTA_LAG, max_lag_index=MAX_LAG_INDEX)
#
# Save results
#
results = np.zeros((MAX_LAG_INDEX + 1, 2))
results[:,0] = lag_times
results[:,1] = auto_correlation
np.savetxt("kernel_acf_dongge.txt", results)
#
# Plot results
#
# Set plotting parameters (for Clim. Past paper)
params = { 'figure.figsize': (6.,6.),
'axes.labelsize': 12,
'text.fontsize': 12,
'xtick.labelsize': 12,
'ytick.labelsize': 12,
'legend.fontsize': 10,
'title.fontsize': 12,
'text.usetex': False,
'font': 'Helvetica',
'mathtext.bf': 'helvetica:bold',
'xtick.major.pad': 6,
'ytick.major.pad': 6,
'xtick.major.size': 5,
'ytick.major.size': 5,
'tick.labelsize': 'small'
}
#pylab.rcParams.update(params)
# Plot time series
pylab.figure(1)
pylab.plot(time_x, x)
pylab.xlabel("Age (y B.P.)")
pylab.ylabel("Normalized values")
pylab.figure(2)
pylab.plot(lag_times, auto_correlation, "k")
pylab.axhline(y=1 / np.e, color="red")
pylab.xlabel("Time delay [y]")
pylab.ylabel("ACF")
pylab.ylim(-0.5,1)
pylab.savefig("auto_corr_irregular.pdf")
pylab.show()
| {
"content_hash": "84120b463f0ccacaf555cdaed76ab5d9",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 192,
"avg_line_length": 25.866666666666667,
"alnum_prop": 0.6461141950832673,
"repo_name": "pik-copan/pyregimeshifts",
"id": "4df7f69f361443c28b6842f8f396c0647a92a172",
"size": "5228",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/auto_correlation_function/auto_correlation_irregular_sampling.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "114224"
},
{
"name": "Roff",
"bytes": "176"
}
],
"symlink_target": ""
} |
"""
WSGI config for get_a_room project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "get_a_room.settings")
application = get_wsgi_application()
| {
"content_hash": "20c9d333d397bc9869aced1c2e9142e8",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 24.8125,
"alnum_prop": 0.7632241813602015,
"repo_name": "ViktorMarinov/get-a-room",
"id": "b37f9e8c9cad598ea8fd74c0bf7b6463e3f501c8",
"size": "397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "get_a_room/get_a_room/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45753"
},
{
"name": "Shell",
"bytes": "3103"
}
],
"symlink_target": ""
} |
"""Airflow module for email backend using AWS SES"""
from typing import List, Optional, Union
from airflow.providers.amazon.aws.hooks.ses import SesHook
def send_email(
from_email: str,
to: Union[List[str], str],
subject: str,
html_content: str,
files: Optional[List] = None,
cc: Optional[Union[List[str], str]] = None,
bcc: Optional[Union[List[str], str]] = None,
mime_subtype: str = 'mixed',
mime_charset: str = 'utf-8',
conn_id: str = 'aws_default',
**kwargs,
) -> None:
"""Email backend for SES."""
hook = SesHook(aws_conn_id=conn_id)
hook.send_email(
mail_from=from_email,
to=to,
subject=subject,
html_content=html_content,
files=files,
cc=cc,
bcc=bcc,
mime_subtype=mime_subtype,
mime_charset=mime_charset,
)
| {
"content_hash": "1148ec448ec8cc8817f48b269c7b7712",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 58,
"avg_line_length": 25.757575757575758,
"alnum_prop": 0.6011764705882353,
"repo_name": "mistercrunch/airflow",
"id": "33cca1a28e017380e5d8fa628783a78b010b30b8",
"size": "1637",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "airflow/providers/amazon/aws/utils/emailer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "36341"
},
{
"name": "HTML",
"bytes": "99243"
},
{
"name": "JavaScript",
"bytes": "891460"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "773270"
},
{
"name": "Shell",
"bytes": "5659"
}
],
"symlink_target": ""
} |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'BuildConfiguration.build_script'
db.add_column(u'webui_buildconfiguration', 'build_script',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'BuildConfiguration.build_script'
db.delete_column(u'webui_buildconfiguration', 'build_script')
models = {
u'webui.branch': {
'Meta': {'object_name': 'Branch'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintainer': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
u'webui.buildconfiguration': {
'Meta': {'object_name': 'BuildConfiguration'},
'auto_build': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'build_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'build_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'depends_list': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'git_branch': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'git_pass': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'git_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'git_user': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'install_root': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'last_build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_commit_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'pkg_branch': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['webui.Branch']"}),
'post_install_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'pre_install_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
u'webui.packagenamemapping': {
'Meta': {'object_name': 'PackageNameMapping'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'orig_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'to_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
}
}
complete_apps = ['webui'] | {
"content_hash": "520c9e1b3f0975b79d022a5636da0403",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 128,
"avg_line_length": 59.810344827586206,
"alnum_prop": 0.5601037763044104,
"repo_name": "daniel-yavorovich/cpan2repo",
"id": "9b97f71d7607b034e46267426a99eee1fd79d32f",
"size": "3493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webui/migrations/0005_auto__add_field_buildconfiguration_build_script.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "98"
},
{
"name": "JavaScript",
"bytes": "9402"
},
{
"name": "Perl",
"bytes": "558"
},
{
"name": "Python",
"bytes": "70320"
},
{
"name": "Shell",
"bytes": "1320"
}
],
"symlink_target": ""
} |
from rest_framework_3 import serializers
class PostSerializer(serializers.Serializer):
"""
Blog post serializer
"""
title = serializers.CharField(max_length=50)
| {
"content_hash": "7dd2a7927ab7cc2d79a21fa1e0d8acca",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 48,
"avg_line_length": 20,
"alnum_prop": 0.7166666666666667,
"repo_name": "coUrbanize/rest_framework_ember",
"id": "cc9005574b132225af987eb847268bf49a8fdd1a",
"size": "180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example/api/serializers/post.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "29620"
}
],
"symlink_target": ""
} |
class GuildUpgrade(object):
"""
https://wiki.guildwars2.com/wiki/API:2/guild/upgrades
"""
def __init__(self):
pass
def __repr__(self):
return ''
| {
"content_hash": "43d0227040ac8a68aaf249c3fda8356b",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 54,
"avg_line_length": 15.6,
"alnum_prop": 0.6282051282051282,
"repo_name": "anasazy/gw2api-python",
"id": "bbb42e218ef4b2dbc0c27a2253c92f405c3f93e7",
"size": "245",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/models/guild_upgrade.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "34010"
}
],
"symlink_target": ""
} |
"""
Copyright (c) 2012, Rui Carmo
Description: Docstring utility functions
License: MIT (see LICENSE.md for details)
"""
import os, sys, logging
import inspect
from bottle import app
log = logging.getLogger()
def docs():
"""Gather all docstrings related to routes and return them grouped by module"""
routes = []
modules = {}
for route in app().routes:
doc = inspect.getdoc(route.callback) or inspect.getcomments(route.callback)
if not doc:
doc = ''
module = inspect.getmodule(route.callback).__name__
item = {
'method': route.method,
'route': route.rule,
'function': route.callback.__name__,
'module': module,
'doc': inspect.cleandoc(doc)
}
if not module in modules:
modules[module] = []
modules[module].append(item)
return modules | {
"content_hash": "ee637d46ea05d3d8147b4ee2ac783c04",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 83,
"avg_line_length": 27.151515151515152,
"alnum_prop": 0.5959821428571429,
"repo_name": "Velmont/digital-signage-server",
"id": "5b8cd3da3d2b9adcc30867012bb38534162c1376",
"size": "943",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/utils/dockit.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "201702"
},
{
"name": "HTML",
"bytes": "31011"
},
{
"name": "JavaScript",
"bytes": "1251726"
},
{
"name": "Python",
"bytes": "302215"
},
{
"name": "Shell",
"bytes": "395"
},
{
"name": "Smarty",
"bytes": "5862"
}
],
"symlink_target": ""
} |
from blueman.plugins.AppletPlugin import AppletPlugin
import dbus
from gi.repository import GObject
from blueman.main.SignalTracker import SignalTracker
from blueman.gui.Notification import Notification
from blueman.Sdp import *
from blueman.Functions import get_icon, composite_icon
import weakref
class ConnectionHandler:
def __init__(self, parent, device, uuid, reply, err):
self.parent = parent
self.device = device
self.uuid = uuid
self.reply = reply
self.err = err
self.rfcomm_dev = None
self.timeout = None
self.signals = SignalTracker()
self.signals.Handle("dbus", self.parent.bus,
self.on_mm_device_added,
"DeviceAdded",
"org.freedesktop.ModemManager")
#for some reason these handlers take a reference and don't give it back
#so i have to workaround :(
w = weakref.ref(self)
device.Services["serial"].Connect(uuid,
reply_handler=lambda *args: w() and w().on_connect_reply(*args),
error_handler=lambda *args: w() and w().on_connect_error(*args))
def __del__(self):
dprint("deleting")
def on_connect_reply(self, rfcomm):
self.rfcomm_dev = rfcomm
self.timeout = GObject.timeout_add(10000, self.on_timeout)
def on_connect_error(self, *args):
self.err(*args)
self.cleanup()
def cleanup(self):
if self.timeout:
GObject.source_remove(self.timeout)
self.signals.DisconnectAll()
del self.device
def on_mm_device_added(self, path):
dprint(path)
props = self.parent.bus.call_blocking("org.freedesktop.ModemManager",
path,
"org.freedesktop.DBus.Properties",
"GetAll",
"s",
["org.freedesktop.ModemManager.Modem"])
if self.rfcomm_dev and props["Driver"] == "bluetooth" and props["Device"] in self.rfcomm_dev:
dprint("It's our bluetooth modem!")
modem = get_icon("modem", 24)
blueman = get_icon("blueman", 48)
icon = composite_icon(blueman, [(modem, 24, 24, 255)])
Notification(_("Bluetooth Dialup"),
_("DUN connection on %s will now be available in Network Manager") % self.device.Alias,
pixbuf=icon,
status_icon=self.parent.Applet.Plugins.StatusIcon)
self.reply(self.rfcomm_dev)
self.cleanup()
def on_timeout(self):
self.timeout = None
self.err(dbus.DBusException(_("Modem Manager did not support the connection")))
self.cleanup()
class NMDUNSupport(AppletPlugin):
__depends__ = ["StatusIcon", "DBusService"]
__conflicts__ = ["PPPSupport", "NMIntegration"]
__icon__ = "modem"
__author__ = "Walmis"
__description__ = _("Provides support for Dial Up Networking (DUN) with ModemManager and NetworkManager 0.8")
__priority__ = 1
def on_load(self, applet):
self.bus = dbus.SystemBus()
def on_unload(self):
pass
def rfcomm_connect_handler(self, device, uuid, reply, err):
uuid16 = sdp_get_serial_type(device.Address, uuid)
if DIALUP_NET_SVCLASS_ID in uuid16:
ConnectionHandler(self, device, uuid, reply, err)
return True
else:
return False
| {
"content_hash": "74bc495d1b5128f95ff797bac2673caa",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 110,
"avg_line_length": 28.885714285714286,
"alnum_prop": 0.6752390372568414,
"repo_name": "nmercier/linux-cross-gcc",
"id": "84ab1f72e02358b2a3c06601b075668046d44553",
"size": "3800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "linux/lib/python2.7/dist-packages/blueman/plugins/applet/NMDUNSupport.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1047092"
},
{
"name": "C++",
"bytes": "151335"
},
{
"name": "Makefile",
"bytes": "82796"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Python",
"bytes": "29123266"
},
{
"name": "Shell",
"bytes": "14668"
}
],
"symlink_target": ""
} |
from rest_framework.views import APIView
from rest_framework.exceptions import APIException, NotFound
from rest_framework.response import Response
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from returntoclinicstation.models import *
from datetime import *
from django.core import serializers
from django.http import HttpResponse, HttpResponseForbidden, HttpResponseBadRequest, HttpResponseServerError, HttpResponseNotFound
import numbers
from common.decorators import *
import json
import sys
class ReturnToClinicStationView(APIView):
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
def stateToDb(self, state):
ret = None
if state == "created":
ret = '1'
elif state == "scheduled_dest":
ret = '2'
elif state == "checked_out_dest":
ret = '3'
elif state == "scheduled_return":
ret = '4'
return ret
def dbToState(self, db):
ret = None
if db == '1':
ret = "created"
elif db == '2':
ret = "scheduled_dest"
elif db == '3':
ret = "checked_out_dest"
elif db == '4':
ret = "scheduled_return"
return ret
@log_request
def get(self, request, returntoclinicstation_id=None, format=None):
notFound = False
badRequest = False
returntoclinicstation = None
if returntoclinicstation_id:
try:
returntoclinicstation = ReturnToClinicStation.objects.get(id = returntoclinicstation_id)
except:
returntoclinicstation = None
else:
kwargs = {}
try:
clinicid = request.GET.get('clinic', '')
if clinicid != '':
clinicid = int(clinicid)
try:
aClinic = Clinic.objects.get(id=clinicid)
kwargs["clinic"] = aClinic
except:
notFound = True
except:
pass
try:
patientid = request.GET.get('patient', '')
if patientid != '':
patientid = int(patientid)
try:
aPatient = Patient.objects.get(id=patientid)
kwargs["patient"] = aPatient
except:
notFound = True
except:
pass
try:
stationid = request.GET.get('station', '')
if stationid != '':
stationid = int(stationid)
try:
aStation = Station.objects.get(id=stationid)
kwargs["station"] = aStation
except:
notFound = True
except:
pass
try:
requestingclinicstationid = request.GET.get('requestingclinicstation', '')
if requestingclinicstationid != '':
requestingclinicstationid = int(requestingclinicstationid)
try:
aRequestingClinicStation = ClinicStation.objects.get(id=requestingclinicstationid)
kwargs["requestingclinicstation"] = aRequestingClinicStation
except:
notFound = True
except:
pass
try:
state = request.GET.get('state', '')
if state != '':
stateDb = self.stateToDb(state)
if stateDb == None:
badRequest = True
else:
kwargs["state"] = stateDb
except:
pass
if (not badRequest) and (not notFound) and (len(kwargs) == 0):
returntoclinicstation = ReturnToClinicStation.objects.all()
elif not badRequest and not notFound:
try:
returntoclinicstation = ReturnToClinicStation.objects.filter(**kwargs)
except:
returntoclinicstation = None
if notFound or not returntoclinicstation:
raise NotFound
elif badRequest:
raise BadRequest
elif returntoclinicstation_id:
ret = {}
x = returntoclinicstation
ret["clinic"] = x.clinic.id
ret["patient"] = x.patient.id
ret["station"] = x.station.id
ret["requestingclinicstation"] = x.requestingclinicstation.id
ret["createtime"] = x.createtime
ret["statechangetime"] = x.statechangetime
ret["state"] = self.dbToState(x.state)
ret["id"] = x.id
else:
ret = []
for x in returntoclinicstation:
m = {}
m["id"] = x.id
ret.append(m)
return Response(ret)
@log_request
def put(self, request, id=None, format=None):
badRequest = False
implError = False
notFound = False
state = None
data = json.loads(request.body)
try:
state = data["state"]
except:
pass
if state == None:
badRequest = True
stateDb = self.stateToDb(state)
if stateDb == None:
badRequest = True
if not badRequest:
returntoclinicstation = None
# see if the returntoclinicstation already exists
try:
returntoclinicstation = ReturnToClinicStation.objects.get(id=id)
except:
pass
if not returntoclinicstation:
notFound = True
else:
try:
returntoclinicstation.state=stateDb
returntoclinicstation.save()
except:
implError = True
implMsg = sys.exc_info()[0]
if badRequest:
return HttpResponseBadRequest()
if notFound:
return HttpResponseNotFound()
if implError:
return HttpResponseServerError(implMsg)
else:
return Response({})
@log_request
def post(self, request, format=None):
badRequest = False
notFound = False
implError = False
aClinic = None
aPatient = None
aStation = None
aRequestingClinicStation = None
state = None
data = json.loads(request.body)
try:
clinic = data["clinic"]
except:
badRequest = True
try:
patient = data["patient"]
except:
badRequest = True
try:
station = data["station"]
except:
badRequest = True
try:
requestingclinicstation = data["requestingclinicstation"]
except:
badRequest = True
if not badRequest:
try:
aClinic = Clinic.objects.get(id=clinic)
except:
aClinic = None
try:
aStation = Station.objects.get(id=station)
except:
aStation = None
try:
aRequestingClinicStation = ClinicStation.objects.get(id=requestingclinicstation)
except:
aRequestingClinicStation = None
try:
aPatient = Patient.objects.get(id=patient)
except:
aPatient = None
if not aClinic or not aStation or not aPatient or not aRequestingClinicStation:
notFound = True
if not badRequest and not notFound:
returntoclinicstation = None
# see if the returntoclinicstation already exists
try:
returntoclinicstation = ReturnToClinicStation.objects.filter(clinic=aClinic,
patient=aPatient,
station=aStation,
requestingclinicstation=aRequestingClinicStation)
if not returntoclinicstation or len(returntoclinicstation) == 0:
returntoclinicstation = None
except:
implMsg = "ReturnToClinicStation.objects.filter {} {}".format(sys.exc_info()[0], data)
implError = True
if not returntoclinicstation:
try:
returntoclinicstation = ReturnToClinicStation(clinic=aClinic,
patient=aPatient,
station=aStation,
requestingclinicstation=aRequestingClinicStation,
state='1')
if returntoclinicstation:
returntoclinicstation.save()
else:
implMsg = "Unable to create returntoclinicstation"
implError = True
except:
implMsg = "ReturnToClinicStation create {} {}".format(sys.exc_info()[0], data)
implError = True
if badRequest:
return HttpResponseBadRequest()
if notFound:
return HttpResponseNotFound()
if implError:
return HttpResponseServerError(implMsg)
else:
return Response({'id': returntoclinicstation.id})
@log_request
def delete(self, request, returntoclinicstation_id=None, format=None):
returntoclinicstation = None
# see if the returntoclinicstation resource exists
try:
returntoclinicstation = ReturnToClinicStation.objects.get(id=returntoclinicstation_id)
except:
returntoclinicstation = None
if not returntoclinicstation:
raise NotFound
else:
returntoclinicstation.delete()
return Response({})
| {
"content_hash": "a36f698589b735efd5723122ff0ad743",
"timestamp": "",
"source": "github",
"line_count": 309,
"max_line_length": 130,
"avg_line_length": 33.19093851132686,
"alnum_prop": 0.5113104524180967,
"repo_name": "slogan621/tscharts",
"id": "e429852217d595eec8d6ec6156c2f59bfd1f457a",
"size": "10867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "returntoclinicstation/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "763"
},
{
"name": "Python",
"bytes": "1690774"
},
{
"name": "Shell",
"bytes": "2706"
}
],
"symlink_target": ""
} |
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
from autoslug import AutoSlugField
from . import utils
class Category(models.Model):
name = models.CharField(max_length=200, db_index=True,
verbose_name=_('Name'))
slug = AutoSlugField(
always_update=True, populate_from='name', unique=True,
slugify=utils.slugify_, db_index=True)
parent_category = models.ForeignKey('self', null=True, blank=True)
image = models.ImageField(upload_to=utils.category_img_path,
blank=True, verbose_name=_('Image'))
class Meta:
ordering = ['name']
verbose_name = _('Category')
verbose_name_plural = _('Categories')
def __str__(self):
return self.name
def get_absolute_url(self):
key = 'category_{}_abs_url'.format(self.id)
abs_url = cache.get(key)
if abs_url is None:
abs_url = reverse('shop:product_list_by_category',
kwargs={'category_slug': self.slug})
cache.set(key, abs_url)
return abs_url
def products(self):
key = 'category_{}_products'.format(self.id)
products = cache.get(key)
if products is None:
products = self.category_products.all()
cache.set(key, products)
return products
def subcategories(self):
key = 'category_{}_subcategories'.format(self.id)
subcategories = cache.get(key)
if subcategories is None:
subcategories = Category.objects.filter(
parent_category__id=self.id)
cache.set(key, subcategories)
return subcategories
def has_parent_category(self):
key = 'category_{}_has_parent_category'.format(self.id)
cached_value = cache.get(key)
if cached_value is None:
value = True if self.parent_category else False
cache.set(key, 'True' if value else 'False')
else:
value = True if cached_value == 'True' else False
return value
@property
def full_name(self): # to show in admin panel
if not self.has_parent_category():
return self.name
cat = self
full_name = self.name
while cat.has_parent_category():
full_name = "{} / {}".format(cat.parent_category.name, full_name)
cat = cat.parent_category
return full_name
class Manufacturer(models.Model):
name = models.CharField(
max_length=200, db_index=True, verbose_name=_('Manufacturer'))
slug = AutoSlugField(
always_update=True, populate_from='name', unique=True,
slugify=utils.slugify_, db_index=True)
image = models.ImageField(upload_to=utils.manufacturer_img_path,
blank=True, verbose_name=_('Image'))
class Meta:
ordering = ['name']
verbose_name = _('Manufacturer')
verbose_name_plural = _('Manufacturers')
def __str__(self):
return self.name
def get_absolute_url(self):
key = 'manufacturer_{}_abs_url'.format(self.id)
abs_url = cache.get(key)
if abs_url is None:
abs_url = reverse('shop:product_list_by_manufacturer',
kwargs={'manufacturer_slug': self.slug})
cache.set(key, abs_url)
return abs_url
@property
def products(self):
key = 'manufacturer_{}_products'.format(self.id)
products = cache.get(key)
if products is None:
products = self.manufacturer_products.all()
cache.set(key, products)
return products
@property
def products_qty(self):
key = 'manufacturer_{}_products_qty'.format(self.id)
products_qty = cache.get(key)
if products_qty is None:
products_qty = self.products.count()
cache.set(key, products_qty)
return products_qty
class Product(models.Model):
category = models.ForeignKey(
Category, related_name='category_products', verbose_name=_('Category'))
manufacturer = models.ForeignKey(
Manufacturer, related_name='manufacturer_products',
verbose_name=_('Manufacturer'))
name = models.CharField(
max_length=200, db_index=True, verbose_name=_('Name'))
model_name = models.CharField(
max_length=200, blank=True, verbose_name=_('Model'))
slug = AutoSlugField(
always_update=True, populate_from=utils.base_for_product_slug,
unique=True, slugify=utils.slugify_, db_index=True)
main_image = models.ImageField(upload_to=utils.product_main_img_path,
blank=True, verbose_name=_('Image'))
description = models.TextField(
blank=True, verbose_name=_('Description'))
price = models.DecimalField(
max_digits=10, decimal_places=2, verbose_name=_('Price'))
stock = models.PositiveIntegerField(
verbose_name=_('Stock Qty'))
available = models.BooleanField(
default=True, verbose_name=_('Available'))
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ['name']
verbose_name = _('Product')
verbose_name_plural = _('Products')
def __str__(self):
return self.name
def get_absolute_url(self):
key = 'product_{}_abs_url'.format(self.id)
abs_url = cache.get(key)
if abs_url is None:
abs_url = reverse('shop:product_detail',
kwargs={'slug': self.slug})
cache.set(key, abs_url)
return abs_url
class ProductImage(models.Model):
product = models.ForeignKey(Product, related_name='images')
image = models.ImageField(upload_to=utils.product_img_path,
blank=True, verbose_name=_('Image'))
| {
"content_hash": "0831109c8e1b07d7202331228248fe87",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 79,
"avg_line_length": 35.57988165680474,
"alnum_prop": 0.5983701979045402,
"repo_name": "samitnuk/online_shop",
"id": "eaede28f22fef53b7a25f547d6963f108a8057d4",
"size": "6013",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/shop/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "28580"
},
{
"name": "HTML",
"bytes": "32446"
},
{
"name": "JavaScript",
"bytes": "1111"
},
{
"name": "Python",
"bytes": "104680"
}
],
"symlink_target": ""
} |
import os
import csv
import math
rawdata_dir = 'rawdata_to_select/'
selected_dir = 'selected/'
filelist = os.listdir(rawdata_dir)
write_recurerror = open('ErrorList/RecursionErrorList.txt', 'a')
write_unexperror = open('ErrorList/UnexpectedErrorList.txt', 'a')
# 2D array declaration
rawdata = []
zerorow = []
image = []
size = 1024
height = 1024
width = 1024
# radius_cut is used for removing dark background - so as to classify sunspots.
radius_cut = 440 # cutting radius
# real radius of sun - used for calculation.
radius_real = 460
halfheight = int(height/2)
halfwidth = int(width/2)
# Calculation of latitude and longitude of sunspot on sun
def latitude(x):
return math.asin( (halfheight - x) / radius_real )
def longitude(x,y):
return math.asin( (y - halfwidth) / (radius_real * math.cos(latitude(x))) )
#######################################################
#### DFS range. Need to determine this taxi radius.####
dx = [0,1,1,1,0,-1,-1,-1]
dy = [1,1,0,-1,-1,-1,0,1]
#######################################################
# temporary variables
x_pixel_sum = 0
y_pixel_sum = 0
num_pixel = 0
total_num_of_row = 0
def dfs(x,y): # DFS(Depth First Search)
global x_pixel_sum
global y_pixel_sum
global num_pixel
image[x][y] = -1 # mark as already visited.
x_pixel_sum += x
y_pixel_sum += y
num_pixel += 1
for i in range(0,len(dx)):
if(image[x+dx[i]][y+dy[i]] == 1):
dfs(x+dx[i],y+dy[i])
for num in range(0,len(filelist)): # process all files in rawdata_dir
print ('Processing ' + filelist[num] + '...')
f = open(rawdata_dir + filelist[num], 'r')
result_raw = open(selected_dir + 'selected-raw_' + filelist[num], 'w')
result = open(selected_dir + 'selected_' + filelist[num], 'w')
csvReader = csv.reader(f)
for row in csvReader:
rawdata.append(row)
total_num_of_row += 1
f.close()
total_num_of_image = int(rawdata[len(rawdata)-1][0]) + 1
print('Total ' + str(total_num_of_image) + ' images detected.')
num_row = 0
for image_num in range(0,total_num_of_image):
try:
print('Image ' + str(image_num) + '...')
image = [[0 for a in range(size)] for b in range(size)]
# Make image array to perform DFS
while(int(rawdata[num_row][0]) == image_num):
current_time = float(rawdata[num_row][1])
x = int(rawdata[num_row][2])
y = int(rawdata[num_row][3])
image[x][y] = 1
num_row += 1
if(num_row == total_num_of_row):
break
print('DFS...')
for a in range(0,size):
for b in range(0,size):
if(image[a][b] == 1):
x_pixel_sum = 0
y_pixel_sum = 0
num_pixel = 0
dfs(a,b)
x_average = x_pixel_sum / num_pixel
y_average = y_pixel_sum / num_pixel
latit = latitude(x_average)
longi = longitude(x_average, y_average)
result_raw.write(str(image_num) + ',' + str(current_time) + ',' + str(format(x_average, '.3f')) + ',' + str(format(y_average, '.3f')) + '\n')
result.write(str(image_num) + ',' + str(current_time) + ',' + str(format(latit, '.3f')) + ',' + str(format(longi, '.3f')) + '\n')
except RecursionError:
print('RecursionError had occured. This image number is saved to ErrorList/RecursionError.txt.')
write_recurerror.write('Image No.' + str(image_num) + ' of file ' + filelist[num] + '\n')
except:
print('Unexpected error had occured. This image name is saved to ErrorList/UnexpectedErrorList.txt.')
print('Skipping Image No.' + str(image_num) + ' of file ' + filelist[num] + '...')
write_unexperror.write('Image No.' + str(image_num) + ' of file ' + filelist[num] + '\n')
| {
"content_hash": "14f551d1c41d5796e6d90ade0053d70c",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 165,
"avg_line_length": 37.77064220183486,
"alnum_prop": 0.5273257226135536,
"repo_name": "seungwonpark/SunSpotTracker",
"id": "e21b72b30450a80c47937a4df15653fd243b6374",
"size": "4222",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "selectdata.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14243"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import os
import sys
import unittest
import tempfile
import shutil
import platform
import getpass
import time
import glob
import logging
import numpy
from analysis_server.server import start_server, stop_server
from analysis_server.client import Client
from analysis_server.arrwrapper import array2str, str2array
STARTDIR = os.getcwd()
class DOETestCase(unittest.TestCase):
""" Test AnalysisServer emulation for a model that uses MPI to run
a DOE in parallel.
"""
def setUp(self):
logging.info("---------------- Starting test %s" % self.id())
self.testdir = os.path.dirname(os.path.abspath(__file__))
self.tempdir = tempfile.mkdtemp(prefix='aserver-')
if not os.path.isdir(self.tempdir):
os.mkdir(self.tempdir)
shutil.copy(os.path.join(self.testdir, 'ASTestProb.py'),
os.path.join(self.tempdir))
shutil.copy(os.path.join(self.testdir, 'TestParDOEProblem.cfg'),
os.path.join(self.tempdir))
os.chdir(self.tempdir)
def tearDown(self):
try:
self.client.quit()
stop_server(self.server)
finally:
os.chdir(STARTDIR)
if not os.environ.get('OPENMDAO_KEEPDIRS'):
try:
shutil.rmtree(self.tempdir)
except OSError:
pass
def test_execute(self):
try:
self.server, self.port = start_server(args=['-c', 'TestParDOEProblem.cfg'])
self.client = Client(port=self.port)
except:
os.chdir(STARTDIR)
raise
reply = self.client.start('TestParDOEProblem', 'p')
# set some input cases [indep_var.a, indep_var.b, indep_var.c]
ncases = 20
cases = numpy.arange(3.0*ncases).reshape(ncases, 3)
self.client.set('p.driver.desvar_array', array2str(cases))
self.client.execute('p')
results = str2array(self.client.get('p.driver.response_array'))
for i in range(results.shape[0]):
for j in range(i, results.shape[0]):
if i!=j and results[i][0] == results[j][0]:
logging.info("*** indices %d and %d match" % (i,j))
# test to make sure that the set/get array -> str -> array conversion
# works.
numpy.testing.assert_array_almost_equal(cases,
str2array(self.client.get('p.driver.desvar_array')),
decimal=9)
# we registered our case inputs as responses (first 3 cols of results)
# so make sure their values haven't changed, and are in the same
# order as we sent them.
numpy.testing.assert_array_almost_equal(cases, results[:,:3],
decimal=9)
mult = numpy.array([2.0, 3.0, 1.5])
for i in range(cases.shape[0]):
numpy.testing.assert_array_almost_equal(results[i,3:],
cases[i]*mult, decimal=9)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "ab83292e722874dde4623848c10d3074",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 87,
"avg_line_length": 32.666666666666664,
"alnum_prop": 0.5730229591836735,
"repo_name": "naylor-b/aserver",
"id": "4dccfd8876e683b50c4892c32fa04d9277777bb5",
"size": "3137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "analysis_server/test/test_doeserver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "233311"
}
],
"symlink_target": ""
} |
"""
helloworld.py
Author: <your name here>
Credit: <list sources used, if any>
Assignment:
Write and submit a Python program that prints the following:
Hello, world!
"""
| {
"content_hash": "1f8462210230e8f4ac20fd72de277a02",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 60,
"avg_line_length": 14.5,
"alnum_prop": 0.7241379310344828,
"repo_name": "HHS-IntroProgramming/Hello-world",
"id": "782a95aaa82188ac87f9042d55d4e2159831155b",
"size": "174",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "helloworld.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "174"
},
{
"name": "Shell",
"bytes": "194"
}
],
"symlink_target": ""
} |
import numpy as np
import pandas as pd
def get_msd(traj, dt, with_nan=True, with_std=False, label=None):
shifts = np.arange(1, len(traj), dtype='int')
if with_std:
msd = np.empty((len(shifts), 3), dtype='float')
else:
msd = np.empty((len(shifts), 2), dtype='float')
msd[:] = np.nan
msd[:, 1] = shifts * dt
for i, shift in enumerate(shifts):
diffs = traj[:-shift] - traj[shift:]
if with_nan:
diffs = diffs[~np.isnan(diffs).any(axis=1)]
diffs = np.square(diffs).sum(axis=1)
if len(diffs) > 0:
msd[i, 0] = np.mean(diffs)
if with_std:
msd[i, 2] = np.std(diffs)
msd = pd.DataFrame(msd)
if with_std:
msd.columns = ["msd", "delay", "std"]
else:
msd.columns = ["msd", "delay"]
if label:
msd['label'] = label
msd.set_index(['label', 'delay'], drop=True, inplace=True)
else:
msd.set_index('delay', drop=True, inplace=True)
msd.dropna(inplace=True)
return msd
def get_homogenous_traj(trajs, side, coords=['x', 'y']):
times = trajs.index.get_level_values('t_stamp').unique().astype('int')
# Get the spindle center
spbA = trajs.loc[pd.IndexSlice[:, 'spb', 'A'], coords].values
spbB = trajs.loc[pd.IndexSlice[:, 'spb', 'B'], coords].values
spindle_center = (spbA + spbB) / 2
# Get KT
traj = trajs.loc[pd.IndexSlice[:, 'kt', side], coords].reset_index(['main_label', 'side'],
drop=True)
traj -= spindle_center
# Generate a single homogenous vector 'htraj' with nan values for missing timepoint
htraj = np.empty((times[-1] + 1, traj.shape[1]))
htraj[:] = np.nan
htraj[times] = traj.values
return htraj
| {
"content_hash": "7232b7d8b32c0120e00b9a39dfe97418",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 94,
"avg_line_length": 28,
"alnum_prop": 0.5483516483516484,
"repo_name": "hadim/spindle_tracker",
"id": "685ff56a91a38cc9bd48a289a965100cea590ffd",
"size": "1820",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spindle_tracker/msd/_msd.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "315"
},
{
"name": "Python",
"bytes": "515124"
}
],
"symlink_target": ""
} |
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask('rest_server')
app.config['SECRET_KEY'] = "NOT_FOR_PRODUCTION"
app.config.setdefault('SQLALCHEMY_DATABASE_URI', 'mysql://tmp:tmp@127.0.0.1/tmp')
#app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
def connect():
db = SQLAlchemy(app)
return db
############### sqlalchemy configs ############################################
# from sqlalchemy import create_engine
# from sqlalchemy.orm import scoped_session, sessionmaker
# from sqlalchemy.ext.declarative import declarative_base
#
#
#
# def connect():
#
# engine = create_engine('mysql://tmp:tmp@127.0.0.1/tmp', convert_unicode=True)
# db = scoped_session(sessionmaker(autocommit=False,
# autoflush=False,
# bind=engine))
# Base = declarative_base()
# Base.query = db_session.query_property()
#
# def init_db():
# # import all modules here that might define models so that
# # they will be registered properly on the metadata. Otherwise
# # you will have to import them first before calling init_db()
# from sql.models import *
# Base.metadata.create_all(bind=engine) | {
"content_hash": "f6cba595569dea46959bca44619fcde5",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 81,
"avg_line_length": 36.75,
"alnum_prop": 0.6137566137566137,
"repo_name": "devopsec/threatdetectionservice",
"id": "2d14ddaceb5519258964f5054dfe4f1eda35b02e",
"size": "1323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/database.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "73"
},
{
"name": "CSS",
"bytes": "60463"
},
{
"name": "HTML",
"bytes": "73698"
},
{
"name": "JavaScript",
"bytes": "6500"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "166187"
},
{
"name": "Shell",
"bytes": "24573"
}
],
"symlink_target": ""
} |
"""Test for neg_log_likelihood.py."""
import logging
from absl.testing import absltest
from gpax import utils
from gpax.models import gp
from gpax.objectives import neg_log_likelihood as nll
from jax import numpy as jnp
from jax import random
class NLLTest(absltest.TestCase):
def test_objective(self):
key1 = random.PRNGKey(0)
dataset = [
utils.SubDataset(
random.uniform(key1, (8, 5)), random.uniform(key1, (8,))),
utils.SubDataset(
random.uniform(key1, (8, 5)), random.uniform(key1, (8,)))
]
model = gp.GaussianProcess()
params = model.init(key1, dataset[0].x)
objective = nll.objective(model, params, dataset)
self.assertNotEqual(objective, jnp.nan)
self.assertEmpty(objective.shape)
logging.info(msg=f'nll = {objective}')
if __name__ == '__main__':
absltest.main()
| {
"content_hash": "0bf01e947b3dd996058d0bcbf3df8cc0",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 70,
"avg_line_length": 25.264705882352942,
"alnum_prop": 0.6658905704307334,
"repo_name": "google-research/gpax",
"id": "8cac3a97327cb66be9ed10231dd40125cb4c7c9c",
"size": "1452",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "GPax/objectives/neg_log_likelihood_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "42678"
}
],
"symlink_target": ""
} |
from django.conf.urls import url
from django.contrib import admin
from .views import (
PostDetailsAPIView,
PostCreateUpdateAPIView,
PostUpdateAPIView,
PostDeleteAPIView,
PostListAPIView
)
urlpatterns = [
url(r'^$', PostListAPIView.as_view(), name='list'),
url(r'^create/$', PostCreateUpdateAPIView.as_view(), name='create'),
url(r'^(?P<slug>[\w-]+)/$', PostDetailsAPIView.as_view(), name='detail'),
url(r'^(?P<slug>[\w-]+)/edit$', PostUpdateAPIView.as_view(), name='update'),
url(r'^(?P<slug>[\w-]+)/delete$', PostDeleteAPIView.as_view(), name='delete'),
]
| {
"content_hash": "261a6dd74f32af1204d166ac6886c55e",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 79,
"avg_line_length": 31.61111111111111,
"alnum_prop": 0.6906854130052724,
"repo_name": "fengxuangit/blog-api-view",
"id": "7f0f9c679d3e18a1651de549586571f54e429a2d",
"size": "569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/posts/api/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "52162"
},
{
"name": "HTML",
"bytes": "15634"
},
{
"name": "JavaScript",
"bytes": "321792"
},
{
"name": "Python",
"bytes": "88612"
},
{
"name": "Shell",
"bytes": "3755"
}
],
"symlink_target": ""
} |
"""Wordcount exercise
Google's Python class
La función main() de abajo ya está definida y completa. Llama a las funciones
print_words() y print_top() que escribiste.
1. Para la bandera --count, implementar una función print_words(nombre_archivo) que cuenta
qué tan frecuentemente cada palabra aparece en el texto e imprime:
palabra1 cantidad1
palabra2 cantidad2
...
Imprimir la lista de arriba ordenadas por palabras (python ordenará para que la puntuación
venga antes de las letras -- no se preocupen por eso). Guardar todas las palabras en minúsculas,
así 'The' y 'the' cuentan como la misma palabra.
2. para la bandera --topcount, implementar una función print_top(nombre_archivo) que es
similar a print_words() pero imprime sólo las 20 palabras más comunes ordenadas
para que aparezca la palabra más común primero, luego la siguiente más común, y así.
Utilizar str.split() (sin argumentos) para dividir todo por los espacios en blanco.
Flujo de trabajo: no construyas todo el programa de una vez. Llega hasta un hito intermedio
e imprime tu estructura de datos y luego sys.exit(0).
Cuando eso funcione, intenta con el siguiente hito.
Opcional: defina una función de ayuda para evitar duplicar código dentro de
print_words() y print_top().
"""
import sys
def print_words(archword):
dicc(archword)
def dicc(archword):
f = open(archword,'rU')
for linea in f:
print linea,
f.close()
# Define las funciones print_words(nombre_archivo) y print_top(nombre_archivo).
# Puedes escribir una función de ayuda que lee un archivo y construye y retorna
# un diccionario palabra/cantidad.
# Luego print_words() y print_top() pueden llamar directamente a la función de ayuda.
###
# Se provee este código básico de parseado de argumentos de línea de comandos
# que llama a las funciones print_words() y print_top() que debes definir.
def main():
if len(sys.argv) != 3:
print 'uso: ./wordcount.py {--count | --topcount} archivo'
sys.exit(1)
option = sys.argv[1]
filename = sys.argv[2]
if option == '--count':
print_words(filename)
elif option == '--topcount':
print_top(filename)
else:
print 'opcion desconocida: ' + option
sys.exit(1)
if __name__ == '__main__':
main()
| {
"content_hash": "72d6ec2d403d90e6a3097422e201dad9",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 96,
"avg_line_length": 32.357142857142854,
"alnum_prop": 0.7214128035320089,
"repo_name": "cursoweb/python-archivos",
"id": "34a79263ac5b3b2f31af723799f241030c06ed62",
"size": "2544",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Federico-Bigolotti/wordcount.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35757"
}
],
"symlink_target": ""
} |
import numpy as np
import re
import matplotlib.pyplot as plt
file1 = '../alphaWhales/train/validation/train.csv'
file2 = '../alphaWhales/train/train/train.csv'
fileTrain = '../alphaWhales/train/train/train.csv'
data1 = np.genfromtxt(file1, skip_header=1, dtype=[('image', 'S13'), ('label', 'S11')], delimiter=',')
data2 = np.genfromtxt(file2, skip_header=1, dtype=[('image', 'S13'), ('label', 'S11')], delimiter=',')
dataTrain = np.genfromtxt(fileTrain, skip_header=1, dtype=[('image', 'S13'), ('label', 'S11')], delimiter=',')
labels1 = [int(re.search("whale_(\\d+)", x[1]).group(1)) for x in data1]
labels2 = [int(re.search("whale_(\\d+)", x[1]).group(1)) for x in data2]
labelsTrain = [int(re.search("whale_(\\d+)", x[1]).group(1)) for x in dataTrain]
labels = labels1 + labels2
counts = np.bincount(labels)
ii = np.unique(labelsTrain)
plt.bar(range(len(ii)), counts[ii])
plt.title("")
plt.xlabel("Whale")
plt.ylabel("Frequency")
plt.xticks(range(len(ii)), ii)
locs, labels = plt.xticks()
plt.setp(labels, rotation=80)
plt.show()
| {
"content_hash": "e85cc9efd4a5d37dda42021efcbb630a",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 110,
"avg_line_length": 32.5625,
"alnum_prop": 0.6641074856046065,
"repo_name": "andpol5/whaleDetector",
"id": "9d9a819ae52725728ba2f78f55ba2d3cfb571afb",
"size": "1042",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "histograms/hist.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Matlab",
"bytes": "3134"
},
{
"name": "Python",
"bytes": "241330"
}
],
"symlink_target": ""
} |
"""Unit test for cgroups module.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import os
import shutil
import tempfile
import unittest
import mock
import treadmill
from treadmill import cgroups
from treadmill import cgutils
PROCCGROUPS = """#subsys_name hierarchy num_cgroups enabled
cpuset 4 1 0
ns 10 3 0
cpu 2 3 1
cpuacct 3 3 1
memory 7 3 1
devices 5 1 0
freezer 6 1 0
net_cls 8 1 0
blkio 1 1 0
perf_event 11 1 0
net_prio 9 1 0"""
class CGroupsTest(unittest.TestCase):
"""Tests for teadmill.cgroups."""
def setUp(self):
self.root = tempfile.mkdtemp()
def tearDown(self):
if self.root and os.path.isdir(self.root):
shutil.rmtree(self.root)
@mock.patch('treadmill.cgroups.get_data',
mock.Mock(side_effect=['2', '1\n2', '-1', '']))
def test_get_value(self):
"""Test cgroup value fetching"""
value = cgroups.get_value('memory', 'foo', 'memory,usage_in_bytes')
self.assertEqual(value, 2)
value = cgroups.get_value('memory', 'foo', 'memory,usage_in_bytes')
self.assertEqual(value, 1)
value = cgroups.get_value('memory', 'foo', 'memory,usage_in_bytes')
self.assertEqual(value, 0)
value = cgroups.get_value('memory', 'foo', 'memory,usage_in_bytes')
self.assertEqual(value, 0)
@mock.patch('treadmill.cgroups.get_mountpoint',
mock.Mock(return_value='/cgroups'))
@mock.patch('os.makedirs', mock.Mock())
def test_create(self):
"""Tests cgroup creation."""
group = os.path.join('treadmill', 'apps', 'test1')
cgroups.create('cpu', group)
cgroups.create('memory', group)
cgroups.create('cpuacct', group)
os.makedirs.assert_has_calls(
[mock.call('/cgroups/treadmill/apps/test1'),
mock.call('/cgroups/treadmill/apps/test1'),
mock.call('/cgroups/treadmill/apps/test1')])
@mock.patch('treadmill.cgroups.get_mountpoint', mock.Mock())
def test_extractpath(self):
""" test cgroup name from a cgroup path"""
treadmill.cgroups.get_mountpoint.return_value = '/fs/cgroup/memory'
cgrp = cgroups.extractpath('/fs/cgroup/memory/treadmill/core',
'memory')
self.assertEqual(cgrp, 'treadmill/core')
cgrp = cgroups.extractpath('/fs/cgroup/memory/treadmill/core/foo',
'memory', 'foo')
self.assertEqual(cgrp, 'treadmill/core')
with self.assertRaises(ValueError):
cgroups.extractpath('/cgroup/memory/treadmill/core', 'memory')
with self.assertRaises(ValueError):
cgroups.extractpath('/fs/cgroup/memory/treadmill/core/foo',
'cpu', 'bar')
@mock.patch('treadmill.cgroups.get_mountpoint', mock.Mock())
@mock.patch('os.rmdir', mock.Mock())
def test_delete(self):
"""Tests cgroup deletion."""
cgroups_dir = os.path.join(self.root, 'cgroups')
treadmill.cgroups.get_mountpoint.return_value = cgroups_dir
group = os.path.join('treadmill', 'apps', 'test1')
# Create a directory for the cgroup
os.makedirs(os.path.join(cgroups_dir, group))
cgroups.delete('cpu', group)
os.rmdir.assert_called_once_with(
os.path.join(cgroups_dir, group)
)
@mock.patch('treadmill.cgroups.get_mountpoint',
mock.Mock(return_value='/cgroups'))
@mock.patch('io.open', mock.mock_open())
def test_join(self):
"""Tests joining the cgroup."""
group = os.path.join('treadmill', 'apps', 'test1')
cgroups.join('cpu', group, '1234')
io.open.assert_called_once_with(
'/cgroups/treadmill/apps/test1/tasks', 'w')
io.open().write.assert_called_once_with('1234')
@mock.patch('treadmill.cgroups.mounted_subsystems',
mock.Mock(return_value={'cpu': '/cgroup/cpu'}))
@mock.patch('treadmill.cgroups.mount', mock.Mock())
def test_ensure_mounted_missing(self):
"""Checks that missing subsystem is mounted."""
cgroups.ensure_mounted(['cpu', 'memory'])
treadmill.cgroups.mount.assert_called_with('memory')
@mock.patch('io.open', mock.Mock(return_value=io.StringIO(PROCCGROUPS)))
def test_available_subsystems(self):
"""Test functions """
subsystems = cgroups.available_subsystems()
self.assertEqual(['cpu', 'cpuacct', 'memory'], subsystems)
@mock.patch('treadmill.cgroups.create', mock.Mock())
@mock.patch('treadmill.cgroups.set_value', mock.Mock())
@mock.patch('treadmill.cgroups.get_data',
mock.Mock(side_effect=['0', '0', '', '1024', '512']))
@mock.patch('treadmill.sysinfo.cpu_count',
mock.Mock(return_value=4))
def test_create_treadmill_cgroups(self):
"""Test the creation of core treadmill cgroups"""
system_cpu_shares = 50
treadmill_cpu_shares = 50
treadmill_core_cpu_shares = 10
treadmill_apps_cpu_shares = 90
treadmill_cpu_cores = 0
treadmill_mem = 1024
treadmill_core_mem = 512
treadmill_apps_mem = treadmill_mem - treadmill_core_mem
cgutils.create_treadmill_cgroups(system_cpu_shares,
treadmill_cpu_shares,
treadmill_core_cpu_shares,
treadmill_apps_cpu_shares,
treadmill_cpu_cores,
treadmill_mem,
treadmill_core_mem)
calls = [mock.call('cpu', 'system'),
mock.call('cpu', 'treadmill'),
mock.call('cpu', 'treadmill/core'),
mock.call('cpu', 'treadmill/apps'),
mock.call('cpuacct', 'system'),
mock.call('cpuacct', 'treadmill'),
mock.call('cpuacct', 'treadmill/core'),
mock.call('cpuacct', 'treadmill/apps'),
mock.call('cpuset', 'system'),
mock.call('cpuset', 'treadmill'),
mock.call('memory', 'system'),
mock.call('memory', 'treadmill'),
mock.call('memory', 'treadmill/core'),
mock.call('memory', 'treadmill/apps')]
treadmill.cgroups.create.assert_has_calls(calls)
calls = [mock.call('cpu', 'treadmill',
'cpu.shares', treadmill_cpu_shares),
mock.call('cpu', 'system',
'cpu.shares', system_cpu_shares),
mock.call('cpu', 'treadmill/core',
'cpu.shares', treadmill_core_cpu_shares),
mock.call('cpu', 'treadmill/apps',
'cpu.shares', treadmill_apps_cpu_shares),
mock.call('cpuset', 'system',
'cpuset.mems', 0),
mock.call('cpuset', 'treadmill',
'cpuset.mems', 0),
mock.call('cpuset', 'treadmill',
'cpuset.cpus', '0-3'),
mock.call('cpuset', 'system',
'cpuset.cpus', '0-3'),
mock.call('memory', 'system',
'memory.move_charge_at_immigrate', 1),
mock.call('memory', 'treadmill',
'memory.move_charge_at_immigrate', 1),
mock.call('memory', 'treadmill',
'memory.use_hierarchy', '1'),
mock.call('memory', 'treadmill',
'memory.limit_in_bytes', treadmill_mem),
mock.call('memory', 'treadmill',
'memory.memsw.limit_in_bytes', treadmill_mem),
mock.call('memory', 'treadmill',
'memory.oom_control', '0'),
mock.call('memory', 'treadmill/core',
'memory.move_charge_at_immigrate', 1),
mock.call('memory', 'treadmill/apps',
'memory.move_charge_at_immigrate', 1),
mock.call('memory', 'treadmill/core',
'memory.limit_in_bytes', treadmill_core_mem),
mock.call('memory', 'treadmill/core',
'memory.memsw.limit_in_bytes', treadmill_core_mem),
mock.call('memory', 'treadmill/core',
'memory.soft_limit_in_bytes', treadmill_core_mem),
mock.call('memory', 'treadmill/apps',
'memory.limit_in_bytes', treadmill_apps_mem),
mock.call('memory', 'treadmill/apps',
'memory.memsw.limit_in_bytes', treadmill_apps_mem)]
treadmill.cgroups.set_value.assert_has_calls(calls)
# TODO: Remove or fix
# @mock.patch('os.kill', mock.Mock())
# def test_kill_apps_in_cgroup(self):
# """Make sure we kill all the stale apps."""
# os.mkdir(os.path.join(self.root, 'a/b/c'))
# os.mkdir(os.path.join(self.root, 'a/b/c/XXX'))
# with open(os.path.join(self.root, 'a/b/c/tasks'), 'w+') as f:
# f.write('123\n231\n')
#
# cgutils.kill_apps_in_cgroup(self.root, 'a/b/c', delete_cgrp=True)
# os.kill.assert_has_calls([mock.call(123, signal.SIGKILL),
# mock.call(321, signal.SIGKILL)])
# self.assertFalse(os.path.exists(os.path.join(self.root, 'a/b/c')))
@mock.patch('treadmill.cgroups.set_value',
mock.Mock())
@mock.patch('treadmill.cgroups.get_value',
mock.Mock(return_value=512))
@mock.patch('treadmill.cgroups.makepath',
mock.Mock(return_value='/cgroup/memory/treadmill/apps'))
@mock.patch('treadmill.cgutils.total_soft_memory_limits',
mock.Mock(return_value=1024))
@mock.patch('os.listdir',
mock.Mock(return_value=['a', 'b']))
@mock.patch('os.path.isdir',
mock.Mock(return_value=True))
def test_reset_mem_limit_in_bytes(self):
"""Make sure we are setting hardlimits right"""
cgutils.reset_memory_limit_in_bytes()
mock_calls = [mock.call('memory',
'treadmill/apps',
'memory.limit_in_bytes'),
mock.call('memory',
'treadmill/apps/a',
'memory.soft_limit_in_bytes'),
mock.call('memory',
'treadmill/apps/b',
'memory.soft_limit_in_bytes')]
cgroups.get_value.assert_has_calls(mock_calls)
mock_calls = [mock.call('memory',
'treadmill/apps/a',
'memory.limit_in_bytes',
512),
mock.call('memory',
'treadmill/apps/a',
'memory.memsw.limit_in_bytes',
512),
mock.call('memory',
'treadmill/apps/b',
'memory.limit_in_bytes',
512),
mock.call('memory',
'treadmill/apps/b',
'memory.memsw.limit_in_bytes',
512)]
cgroups.set_value.assert_has_calls(mock_calls)
@mock.patch('treadmill.cgutils.set_memory_hardlimit', mock.Mock())
@mock.patch('treadmill.cgroups.get_value',
mock.Mock(return_value=512))
@mock.patch('treadmill.cgroups.makepath',
mock.Mock(return_value='/cgroup/memory/treadmill/apps'))
@mock.patch('treadmill.cgutils.total_soft_memory_limits',
mock.Mock(return_value=1024))
@mock.patch('os.listdir',
mock.Mock(return_value=['a']))
@mock.patch('os.path.isdir',
mock.Mock(return_value=True))
def test_reset_mem_limit_kill(self):
"""Make sure we kill groups when we cannot lower their hardlimits."""
treadmill.cgutils.set_memory_hardlimit.side_effect = \
cgutils.TreadmillCgroupError('test')
res = cgutils.reset_memory_limit_in_bytes()
self.assertEqual(res, ['a'])
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "f81139bec7b30d2c0bda94e7a64b9cd3",
"timestamp": "",
"source": "github",
"line_count": 301,
"max_line_length": 78,
"avg_line_length": 42.67774086378738,
"alnum_prop": 0.5224972754164721,
"repo_name": "captiosus/treadmill",
"id": "480300036c4352af7eb2dbe09ec6945e72ba4138",
"size": "12846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/cgroups_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "570"
},
{
"name": "Python",
"bytes": "2598791"
},
{
"name": "Ruby",
"bytes": "3712"
},
{
"name": "Shell",
"bytes": "58099"
}
],
"symlink_target": ""
} |
from flask import Flask
from flask_restful import Api
# files
from iAS.uicontroller.uicontroller import *
from iAS.uicontroller.authentication import *
from iAS.uicontroller.sample_app_uicontroller import * | {
"content_hash": "4859afd6354b3486e740d62322cbc743",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 54,
"avg_line_length": 29.571428571428573,
"alnum_prop": 0.8309178743961353,
"repo_name": "CodeLankaHack/team---iAS",
"id": "5846651c4e10c15adaddf03c6f5d61190d6d3cf3",
"size": "816",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "iAS/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "6827"
},
{
"name": "HTML",
"bytes": "10160"
},
{
"name": "JavaScript",
"bytes": "38740"
},
{
"name": "Python",
"bytes": "33134"
}
],
"symlink_target": ""
} |
from .. import types, functions
from ... import utils
class InlineResult:
"""
Custom class that encapsulates a bot inline result providing
an abstraction to easily access some commonly needed features
(such as clicking a result to select it).
Attributes:
result (:tl:`BotInlineResult`):
The original :tl:`BotInlineResult` object.
"""
# tdlib types are the following (InlineQueriesManager::answer_inline_query @ 1a4a834):
# gif, article, audio, contact, file, geo, photo, sticker, venue, video, voice
#
# However, those documented in https://core.telegram.org/bots/api#inline-mode are different.
ARTICLE = 'article'
PHOTO = 'photo'
GIF = 'gif'
VIDEO = 'video'
VIDEO_GIF = 'mpeg4_gif'
AUDIO = 'audio'
DOCUMENT = 'document'
LOCATION = 'location'
VENUE = 'venue'
CONTACT = 'contact'
GAME = 'game'
def __init__(self, client, original, query_id=None, *, entity=None):
self._client = client
self.result = original
self._query_id = query_id
self._entity = entity
@property
def type(self):
"""
The always-present type of this result. It will be one of:
``'article'``, ``'photo'``, ``'gif'``, ``'mpeg4_gif'``, ``'video'``,
``'audio'``, ``'voice'``, ``'document'``, ``'location'``, ``'venue'``,
``'contact'``, ``'game'``.
You can access all of these constants through `InlineResult`,
such as `InlineResult.ARTICLE`, `InlineResult.VIDEO_GIF`, etc.
"""
return self.result.type
@property
def message(self):
"""
The always-present :tl:`BotInlineMessage` that
will be sent if `click` is called on this result.
"""
return self.result.send_message
@property
def title(self):
"""
The title for this inline result. It may be `None`.
"""
return self.result.title
@property
def description(self):
"""
The description for this inline result. It may be `None`.
"""
return self.result.description
@property
def url(self):
"""
The URL present in this inline results. If you want to "click"
this URL to open it in your browser, you should use Python's
`webbrowser.open(url)` for such task.
"""
if isinstance(self.result, types.BotInlineResult):
return self.result.url
@property
def photo(self):
"""
Returns either the :tl:`WebDocument` thumbnail for
normal results or the :tl:`Photo` for media results.
"""
if isinstance(self.result, types.BotInlineResult):
return self.result.thumb
elif isinstance(self.result, types.BotInlineMediaResult):
return self.result.photo
@property
def document(self):
"""
Returns either the :tl:`WebDocument` content for
normal results or the :tl:`Document` for media results.
"""
if isinstance(self.result, types.BotInlineResult):
return self.result.content
elif isinstance(self.result, types.BotInlineMediaResult):
return self.result.document
async def click(self, entity=None, reply_to=None, comment_to=None,
silent=False, clear_draft=False, hide_via=False,
background=None):
"""
Clicks this result and sends the associated `message`.
Args:
entity (`entity`):
The entity to which the message of this result should be sent.
reply_to (`int` | `Message <telethon.tl.custom.message.Message>`, optional):
If present, the sent message will reply to this ID or message.
comment_to (`int` | `Message <telethon.tl.custom.message.Message>`, optional):
Similar to ``reply_to``, but replies in the linked group of a
broadcast channel instead (effectively leaving a "comment to"
the specified message).
silent (`bool`, optional):
Whether the message should notify people with sound or not.
Defaults to `False` (send with a notification sound unless
the person has the chat muted). Set it to `True` to alter
this behaviour.
clear_draft (`bool`, optional):
Whether the draft should be removed after sending the
message from this result or not. Defaults to `False`.
hide_via (`bool`, optional):
Whether the "via @bot" should be hidden or not.
Only works with certain bots (like @bing or @gif).
background (`bool`, optional):
Whether the message should be send in background.
"""
if entity:
entity = await self._client.get_input_entity(entity)
elif self._entity:
entity = self._entity
else:
raise ValueError('You must provide the entity where the result should be sent to')
if comment_to:
entity, reply_id = await self._client._get_comment_data(entity, comment_to)
else:
reply_id = None if reply_to is None else utils.get_message_id(reply_to)
req = functions.messages.SendInlineBotResultRequest(
peer=entity,
query_id=self._query_id,
id=self.result.id,
silent=silent,
background=background,
clear_draft=clear_draft,
hide_via=hide_via,
reply_to_msg_id=reply_id
)
return self._client._get_response_message(
req, await self._client(req), entity)
async def download_media(self, *args, **kwargs):
"""
Downloads the media in this result (if there is a document, the
document will be downloaded; otherwise, the photo will if present).
This is a wrapper around `client.download_media
<telethon.client.downloads.DownloadMethods.download_media>`.
"""
if self.document or self.photo:
return await self._client.download_media(
self.document or self.photo, *args, **kwargs)
| {
"content_hash": "21efbaa96d2307df546ee469e3297593",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 96,
"avg_line_length": 35.53409090909091,
"alnum_prop": 0.5887432043492165,
"repo_name": "LonamiWebs/Telethon",
"id": "15639aa5135776f6f1d9033626aafb82a5f5373e",
"size": "6254",
"binary": false,
"copies": "1",
"ref": "refs/heads/v1",
"path": "telethon/tl/custom/inlineresult.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "776"
},
{
"name": "CSS",
"bytes": "9611"
},
{
"name": "HTML",
"bytes": "8839"
},
{
"name": "JavaScript",
"bytes": "7489"
},
{
"name": "Makefile",
"bytes": "605"
},
{
"name": "Python",
"bytes": "1091881"
},
{
"name": "Shell",
"bytes": "352"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import print_function
import os
import random
import re
import sys
import time
from happy.ReturnMsg import ReturnMsg
from happy.utils.IP import IP
from happy.HappyNode import HappyNode
from happy.HappyNetwork import HappyNetwork
from WeaveTest import WeaveTest
from six.moves import zip
options = {"quiet": False,
"mobile": None,
"device": None,
"server": None,
"tap": None,
"tier": None,
"username": None,
"password": None,
"devices_info": [],
'mobile_process_tag': "WEAVE-PAIRING-MOBILE",
'device_process_tag': "WEAVE-PAIRING-DEVICE",
'server_process_tag': "WEAVE-PAIRING-SERVER",
'mobile_node_id': None,
'server_node_id': None,
'register_cmd': None}
def option():
return options.copy()
class WeavePairing(HappyNode, HappyNetwork, WeaveTest):
"""
weave-pairing [-h --help] [-q --quiet] [-m --mobile <NAME>] [-d --device <NAME>] [-s --server <NAME>]
command to test pairing using a local mock server:
$ weave-pairing --mobile node01 --device node02 --server node03
command to test pairing using the default Weave ServiceProvisioning node-id over the internet:
$ weave-pairing --mobile node01 --device node02 --server service
command to test pairing using a custom Weave ServiceProvisioning server over the internet:
$ weave-pairing --mobile node01 --device node02 --server <ip address>
return:
0 success
1 failure
"""
def __init__(self, opts=options):
HappyNode.__init__(self)
HappyNetwork.__init__(self)
WeaveTest.__init__(self)
self.__dict__.update(opts)
def __pre_check(self):
device_node_id = None
# Check if Weave Pairing device node is given.
if self.devices is None:
emsg = "Missing name or address of the Weave Pairing device node."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
# Set the produce resource that mock-device is paired to
resourceDictionaries = self.getResourceIds()
if len(self.devices) == 1 and self.devices[0] == "border_gateway":
resourceIndexList = ["nf1"]
else:
resourceIndexList = os.environ.get("RESOURCE_IDS", "thd1").split(" ")
self.resources = [resourceDictionaries[resourceIndex]
for resourceIndex in resourceIndexList]
# Check if Weave Pairing mobile node is given.
if self.mobile is None:
emsg = "Missing name or address of the Weave Pairing mobile node."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
# Check if Weave Pairing server info is given.
if self.server is None:
emsg = "Missing name or address of the Weave Pairing server node."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
# Make sure that fabric was created
if self.getFabricId() == None:
emsg = "Weave Fabric has not been created yet."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
# Check if Weave Pairing mobile node exists.
if self._nodeExists(self.mobile):
self.mobile_node_id = self.mobile
# Check if mobile is provided in a form of IP address
if IP.isIpAddress(self.mobile):
self.mobile_node_id = self.getNodeIdFromAddress(self.mobile)
if self.mobile_node_id is None:
emsg = "Unknown identity of the mobile node."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
# Find out whether to use a local mock server or a server
# reachable over the internet
if self._nodeExists(self.server):
self.server_node_id = self.server
self.server_ip = self.getNodeWeaveIPAddress(self.server_node_id)
self.server_weave_id = self.getWeaveNodeID(self.server_node_id)
elif IP.isIpAddress(self.server):
self.server_ip = self.server
self.server_weave_id = self.IPv6toWeaveId(self.server)
elif IP.isDomainName(self.server) or self.server == "service":
self.server_ip = self.getServiceWeaveIPAddress("ServiceProvisioning")
self.server_weave_id = self.IPv6toWeaveId(self.server_ip)
self.mobile_ip = self.getNodeWeaveIPAddress(self.mobile_node_id)
self.mobile_weave_id = self.getWeaveNodeID(self.mobile_node_id)
for device, resource in zip(self.devices, self.resources):
# Check if Weave Pairing device node exists.
if self._nodeExists(device):
device_node_id = device
# Check if device is provided in a form of IP address
if IP.isIpAddress(device):
device_node_id = self.getNodeIdFromAddress(device)
if device_node_id is None:
emsg = "Unknown identity of the device node."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
device_ip = self.getNodeWeaveIPAddress(device_node_id)
if device_ip is None:
emsg = "Could not find IP address of the device node."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
device_weave_id = self.getWeaveNodeID(device_node_id)
if device_weave_id is None:
emsg = "Could not find Weave node ID of the device node."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
device_serial_num = self.getSerialNum(device_node_id)
if device_serial_num is None:
emsg = "Could not find serial number of the device node."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
self.devices_info.append({'device': device,
'device_node_id': device_node_id,
'device_ip': device_ip,
'device_weave_id': device_weave_id,
'device_serial_num': device_serial_num,
'device_process_tag': device + "_" + self.device_process_tag,
'resource': resource})
if self.mobile_ip is None:
emsg = "Could not find IP address of the mobile node."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
if self.mobile_weave_id is None:
emsg = "Could not find Weave node ID of the mobile node."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
if self.server_ip is None:
emsg = "Could not find IP address of the server."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
if self.server_weave_id is None:
emsg = "Could not find Weave node ID of the server."
self.logger.error("[localhost] WeavePairing: %s" % (emsg))
sys.exit(1)
def getResourceIds(self):
resource_ids = {}
resource_ids['np1'] = {}
resource_ids['np1']['vendor_id'] = '9050'
resource_ids['np1']['software_id'] = '1.0'
resource_ids['np1']['product_id'] = '8'
resource_ids['np1']['id'] = 'np1'
resource_ids['nf1'] = {}
resource_ids['nf1']['vendor_id'] = '9050'
resource_ids['nf1']['software_id'] = '1.0'
resource_ids['nf1']['product_id'] = '12'
resource_ids['nf1']['id'] = 'nf1'
resource_ids['nan1'] = {}
resource_ids['nan1']['vendor_id'] = '9050'
resource_ids['nan1']['software_id'] = '1.0'
resource_ids['nan1']['product_id'] = '22'
resource_ids['nan1']['id'] = 'nan1'
resource_ids['ntl2'] = {}
resource_ids['ntl2']['vendor_id'] = '9050'
resource_ids['ntl2']['software_id'] = '1.0'
resource_ids['ntl2']['product_id'] = '32'
resource_ids['ntl2']['id'] = 'ntl2'
resource_ids['ntb2'] = {}
resource_ids['ntb2']['vendor_id'] = '9050'
resource_ids['ntb2']['software_id'] = '1.0'
resource_ids['ntb2']['product_id'] = '33'
resource_ids['ntb2']['id'] = 'ntb2'
resource_ids['gn1'] = {}
resource_ids['gn1']['vendor_id'] = '57600'
resource_ids['gn1']['software_id'] = '1.0'
resource_ids['gn1']['product_id'] = '1'
resource_ids['gn1']['id'] = 'gn1'
resource_ids['gv1'] = {}
resource_ids['gv1']['vendor_id'] = '57600'
resource_ids['gv1']['software_id'] = '1.0'
resource_ids['gv1']['product_id'] = '3'
resource_ids['gv1']['id'] = 'gv1'
resource_ids['gm1'] = {}
resource_ids['gm1']['vendor_id'] = '57600'
resource_ids['gm1']['software_id'] = '1.0'
resource_ids['gm1']['product_id'] = '4'
resource_ids['gm1']['id'] = 'gm1'
resource_ids['gsl1'] = {}
resource_ids['gsl1']['vendor_id'] = '57600'
resource_ids['gsl1']['software_id'] = '1.0'
resource_ids['gsl1']['product_id'] = '65024'
resource_ids['gsl1']['id'] = 'gsl1'
resource_ids['gsrbr1'] = {}
resource_ids['gsrbr1']['vendor_id'] = '57600'
resource_ids['gsrbr1']['software_id'] = '1.0'
resource_ids['gsrbr1']['product_id'] = '65025'
resource_ids['gsrbr1']['id'] = 'gsrbr1'
resource_ids['thd1'] = {}
resource_ids['thd1']['vendor_id'] = '9050'
resource_ids['thd1']['software_id'] = '1.0'
resource_ids['thd1']['product_id'] = '65534'
resource_ids['thd1']['id'] = 'thd1'
resource_ids['tst1'] = {}
resource_ids['tst1']['vendor_id'] = '9050'
resource_ids['tst1']['software_id'] = '1.0'
resource_ids['tst1']['product_id'] = '65024'
resource_ids['tst1']['id'] = 'tst1'
return resource_ids
def __start_server(self):
cmd = self.getWeaveMockDevicePath()
if not cmd:
return
cmd += " --node-addr " + self.server_ip
if self.tap:
cmd += " --tap-device " + self.tap
self.start_weave_process(
self.server_node_id,
cmd,
self.server_process_tag,
sync_on_output=self.ready_to_service_events_str)
def __start_mobile_side(self, device_info, mobile_process_tag):
os.environ['WEAVE_DEVICE_MGR_PATH'] = self.getWeaveDeviceMgrPath()
os.environ['WEAVE_DEVICE_MGR_LIB_PATH'] = self.getWeaveDeviceMgrLibPath()
cmd = "/usr/bin/env python3 " + \
os.path.dirname(os.path.realpath(__file__)) + "/../lib/WeaveDeviceManager.py"
if not cmd:
return
cmd += " " + device_info['device_ip'] + " " + device_info['device_weave_id']
cmd += " --pairing-code TEST"
if self.server is not None and not self.server_node_id:
if not self.register_cmd:
import ServiceAccountManager
options = ServiceAccountManager.option()
options["tier"] = self.tier
options["username"] = self.username
options["password"] = self.password
registration = ServiceAccountManager.ServiceAccountManager(self.logger, options)
self.register_cmd = registration.run()
if self.register_cmd:
cmd += self.register_cmd
else:
raise ValueError('register_cmd is empty')
if self.tap:
cmd += " --tap-device " + self.tap
self.start_weave_process(self.mobile_node_id, cmd, mobile_process_tag, env=os.environ)
def __start_device_side(self, device_info):
cmd = self.getWeaveMockDevicePath()
if not cmd:
return
cmd += " --node-addr " + device_info['device_ip'] + " --pairing-code TEST"
cmd += " --wdm-resp-mutual-sub --test-case 10 --total-count 0 --wdm-update-timing NoSub "
cmd += " --event-generator TestTrait --event-batch-size 500"
if self.server is not None:
cmd += " --pairing-server " + self.server_ip \
+ " --wrm-pairing" \
+ " --vendor-id " + device_info['resource']['vendor_id'] \
+ " --software-version " + '"' + device_info['resource']['software_id'] + '"' \
+ " --product-id " + device_info['resource']['product_id'] \
+ " --suppress-ac" \
+ " --serial-num " + device_info['device_serial_num']
if self.server_node_id is not None:
# if the server is a local mock, we need to override the default endpoint id
cmd += " --pairing-endpoint-id " + self.server_weave_id
if self.tap:
cmd += " --tap-device " + self.tap
self.start_weave_process(
device_info['device_node_id'],
cmd,
device_info['device_process_tag'],
sync_on_output=self.ready_to_service_events_str)
def __process_results(self, mobiles_output, devices_info):
result_list = []
for mobile_output in mobiles_output:
if "Shutdown complete" in mobile_output:
result_list.append(True)
else:
result_list.append(False)
for device_info, result in zip(devices_info, result_list):
print(" %s weave-pairing from mobile %s (%s) to device %s (%s) : " % \
("Success for" if result else "Fail for", self.mobile_node_id,
self.mobile_ip, device_info['device_node_id'], device_info['device_ip']))
return result_list
def __wait_for_mobile(self, mobile_process_tag):
self.wait_for_test_to_end(self.mobile_node_id, mobile_process_tag)
def __stop_device_side(self, device_info):
self.stop_weave_process(device_info['device_node_id'], device_info['device_process_tag'])
def __stop_server_side(self):
self.stop_weave_process(self.server_node_id, self.server_process_tag)
def run(self):
self.logger.debug("[localhost] WeavePairing: Run.")
self.__pre_check()
devices_output_data = []
devices_strace_data = []
mobiles_output_data = []
mobiles_strace_data = []
for device_info in self.devices_info:
self.__start_device_side(device_info)
# delay Execution
time.sleep(0.5)
emsg = "WeavePairing %s should be running." % (device_info['device_process_tag'])
self.logger.debug("[%s] WeavePairing: %s" % (device_info['device_node_id'], emsg))
if self.server_node_id:
self.__start_server()
mobile_process_tag = self.mobile_process_tag + device_info['device']
self.__start_mobile_side(device_info, mobile_process_tag)
self.__wait_for_mobile(mobile_process_tag)
mobile_output_value, mobile_output_data = \
self.get_test_output(self.mobile_node_id, mobile_process_tag, True)
mobile_strace_value, mobile_strace_data = \
self.get_test_strace(self.mobile_node_id, mobile_process_tag, True)
self.__stop_device_side(device_info)
device_output_value, device_output_data = \
self.get_test_output(
device_info['device_node_id'],
device_info['device_process_tag'],
True)
device_strace_value, device_strace_data = \
self.get_test_strace(
device_info['device_node_id'],
device_info['device_process_tag'],
True)
devices_output_data.append(device_output_data)
devices_strace_data.append(device_strace_data)
mobiles_output_data.append(mobile_output_data)
mobiles_strace_data.append(mobile_strace_data)
# delay execution
time.sleep(3)
server_output_value = None
server_output_data = None
server_strace_value = None
server_strace_data = None
if self.server_node_id:
self.__stop_server_side()
server_output_value, server_output_data = \
self.get_test_output(self.server_node_id, self.server_process_tag, True)
server_strace_value, server_strace_data = \
self.get_test_strace(self.server_node_id, self.server_process_tag, True)
result_list = self.__process_results(mobiles_output_data, self.devices_info)
data = {}
data["devices_output"] = devices_output_data
data["devices_strace"] = devices_strace_data
data["mobiles_output"] = mobiles_output_data
data["mobiles_strace"] = mobiles_strace_data
data["server_output"] = server_output_data
data["server_strace"] = server_strace_data
data["devices_info"] = self.devices_info
self.logger.debug("[localhost] WeavePairing: Done.")
return ReturnMsg(result_list, data)
| {
"content_hash": "ac730b51081bc7c15305d78b90e49092",
"timestamp": "",
"source": "github",
"line_count": 442,
"max_line_length": 105,
"avg_line_length": 39.52488687782805,
"alnum_prop": 0.5615912993703491,
"repo_name": "openweave/openweave-core",
"id": "51f0a7181251d7083c08651852dc2e04569cb667",
"size": "18297",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test-apps/happy/test-templates/WeavePairing.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "664311"
},
{
"name": "C++",
"bytes": "13369518"
},
{
"name": "Java",
"bytes": "300780"
},
{
"name": "M4",
"bytes": "115889"
},
{
"name": "Makefile",
"bytes": "354863"
},
{
"name": "Objective-C",
"bytes": "126850"
},
{
"name": "Objective-C++",
"bytes": "302756"
},
{
"name": "Perl",
"bytes": "12136"
},
{
"name": "Python",
"bytes": "2029596"
},
{
"name": "Shell",
"bytes": "122005"
}
],
"symlink_target": ""
} |
from google.cloud import bigquery_storage_v1
async def sample_append_rows():
# Create a client
client = bigquery_storage_v1.BigQueryWriteAsyncClient()
# Initialize request argument(s)
request = bigquery_storage_v1.AppendRowsRequest(
write_stream="write_stream_value",
)
# This method expects an iterator which contains
# 'bigquery_storage_v1.AppendRowsRequest' objects
# Here we create a generator that yields a single `request` for
# demonstrative purposes.
requests = [request]
def request_generator():
for request in requests:
yield request
# Make the request
stream = await client.append_rows(requests=request_generator())
# Handle the response
async for response in stream:
print(response)
# [END bigquerystorage_v1_generated_BigQueryWrite_AppendRows_async]
| {
"content_hash": "8bdf0c9b5e53c6126954c009c27ff114",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 67,
"avg_line_length": 28,
"alnum_prop": 0.7016129032258065,
"repo_name": "googleapis/python-bigquery-storage",
"id": "34fb01321fd298f603e2446160990e66652c4b18",
"size": "2265",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/generated_samples/bigquerystorage_v1_generated_big_query_write_append_rows_async.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "1136897"
},
{
"name": "Shell",
"bytes": "30690"
}
],
"symlink_target": ""
} |
from pyvips import ffi, glib_lib, vips_lib, gobject_lib, \
_to_string, _to_bytes, Error
def leak_set(leak):
"""Enable or disable libvips leak checking.
With this enabled, libvips will check for object and area leaks on exit.
Enabling this option will make libvips run slightly more slowly.
"""
return vips_lib.vips_leak_set(leak)
def version(flag):
"""Get the major, minor or micro version number of the libvips library.
Args:
flag (int): Pass flag 0 to get the major version number, flag 1 to
get minor, flag 2 to get micro.
Returns:
The version number,
Raises:
:class:`.Error`
"""
value = vips_lib.vips_version(flag)
if value < 0:
raise Error('unable to get library version')
return value
def get_suffixes():
"""Get a list of all the filename suffixes supported by libvips.
Returns:
[string]
"""
names = []
if at_least_libvips(8, 8):
array = vips_lib.vips_foreign_get_suffixes()
i = 0
while array[i] != ffi.NULL:
name = _to_string(array[i])
if name not in names:
names.append(name)
glib_lib.g_free(array[i])
i += 1
glib_lib.g_free(array)
return names
# we need to define this before we import the declarations: they need to know
# which bits to make
def at_least_libvips(x, y):
"""Is this at least libvips x.y?"""
major = version(0)
minor = version(1)
return major > x or (major == x and minor >= y)
def path_filename7(filename):
return _to_string(vips_lib.vips_path_filename7(_to_bytes(filename)))
def path_mode7(filename):
return _to_string(vips_lib.vips_path_mode7(_to_bytes(filename)))
def type_find(basename, nickname):
"""Get the GType for a name.
Looks up the GType for a nickname. Types below basename in the type
hierarchy are searched.
"""
return vips_lib.vips_type_find(_to_bytes(basename), _to_bytes(nickname))
def type_name(gtype):
"""Return the name for a GType."""
return _to_string(gobject_lib.g_type_name(gtype))
def nickname_find(gtype):
"""Return the nickname for a GType."""
return _to_string(vips_lib.vips_nickname_find(gtype))
def type_from_name(name):
"""Return the GType for a name."""
return gobject_lib.g_type_from_name(_to_bytes(name))
def type_map(gtype, fn):
"""Map fn over all child types of gtype."""
cb = ffi.callback('VipsTypeMap2Fn', fn)
return vips_lib.vips_type_map(gtype, cb, ffi.NULL, ffi.NULL)
def values_for_enum(gtype):
"""Get all values for a enum (gtype)."""
g_type_class = gobject_lib.g_type_class_ref(gtype)
g_enum_class = ffi.cast('GEnumClass *', g_type_class)
values = []
# -1 since we always have a "last" member.
for i in range(0, g_enum_class.n_values - 1):
value = _to_string(g_enum_class.values[i].value_nick)
values.append(value)
return values
__all__ = [
'leak_set',
'version',
'at_least_libvips',
'path_filename7',
'path_mode7',
'type_find',
'nickname_find',
'get_suffixes',
'type_name',
'type_map',
'type_from_name',
'values_for_enum'
]
| {
"content_hash": "011ab3daa37bb6a4c9ee3fa8b71ba9b6",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 77,
"avg_line_length": 22.692307692307693,
"alnum_prop": 0.6172573189522342,
"repo_name": "jcupitt/pyvips",
"id": "b939ae5e53542cd0992391e884f7804b698fc3b4",
"size": "3276",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyvips/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "115655"
},
{
"name": "Shell",
"bytes": "811"
}
],
"symlink_target": ""
} |
from pyjamas import DOM
from pyjamas import Factory
from pyjamas.ui.ButtonBase import ButtonBase
from pyjamas.ui import Event
from pyjamas.ui import Focus
_CheckBox_unique_id=0;
class CheckBox(ButtonBase):
_props = [("name", "Name", "Name", None),
]
def __init__(self, label=None, asHTML=False, **ka):
ka['StyleName'] = ka.get('StyleName', "gwt-CheckBox")
if label:
if asHTML:
ka['HTML'] = label
else:
ka['Text'] = label
element = ka.pop('Element', None) or DOM.createInputCheck()
self.initElement(element, **ka)
@classmethod
def _getProps(self):
return ButtonBase._getProps() + self._props
def initElement(self, element, **ka):
self.inputElem = element
self.labelElem = DOM.createLabel()
element = ka.pop('Element', None) or DOM.createSpan()
ButtonBase.__init__(self, element, **ka)
self.unsinkEvents(Event.FOCUSEVENTS | Event.ONCLICK)
DOM.sinkEvents(self.inputElem, Event.FOCUSEVENTS | Event.ONCLICK |
DOM.getEventsSunk(self.inputElem))
DOM.appendChild(self.getElement(), self.inputElem)
DOM.appendChild(self.getElement(), self.labelElem)
uid = "check%d" % self.getUniqueID()
DOM.setAttribute(self.inputElem, "id", uid)
DOM.setAttribute(self.labelElem, "htmlFor", uid)
# emulate static
def getUniqueID(self):
global _CheckBox_unique_id
_CheckBox_unique_id += 1
return _CheckBox_unique_id;
def getHTML(self):
return DOM.getInnerHTML(self.labelElem)
def getName(self):
return DOM.getAttribute(self.inputElem, "name")
def getText(self):
return DOM.getInnerText(self.labelElem)
def setChecked(self, checked):
DOM.setBooleanAttribute(self.inputElem, "checked", checked)
DOM.setBooleanAttribute(self.inputElem, "defaultChecked", checked)
def isChecked(self):
""" XXX this function is deprecated: use getChecked
"""
return self.getChecked()
def getChecked(self):
if self.isAttached():
propName = "checked"
else:
propName = "defaultChecked"
return DOM.getBooleanAttribute(self.inputElem, propName)
def isEnabled(self):
""" XXX this function is deprecated: use getEnabled
"""
return self.getEnabled()
def getEnabled(self):
return not DOM.getBooleanAttribute(self.inputElem, "disabled")
def setEnabled(self, enabled):
DOM.setBooleanAttribute(self.inputElem, "disabled", not enabled)
def setFocus(self, focused):
if focused:
Focus.focus(self.inputElem)
else:
Focus.blur(self.inputElem)
def setHTML(self, html):
DOM.setInnerHTML(self.labelElem, html)
def setName(self, name):
DOM.setAttribute(self.inputElem, "name", name)
def setTabIndex(self, index):
Focus.setTabIndex(self.inputElem, index)
def setText(self, text):
DOM.setInnerText(self.labelElem, text)
def onDetach(self):
self.setChecked(self.isChecked())
ButtonBase.onDetach(self)
Factory.registerClass('pyjamas.ui.CheckBox', 'CheckBox', CheckBox)
| {
"content_hash": "0379c5f3c7ca7af508079b23f033b10d",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 74,
"avg_line_length": 29.71171171171171,
"alnum_prop": 0.6276531231049121,
"repo_name": "anandology/pyjamas",
"id": "63b8594693484a58f73122d3b1f2f371bc2a8332",
"size": "3957",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "library/gwt/ui/CheckBox.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "325172"
},
{
"name": "PHP",
"bytes": "121841"
},
{
"name": "Python",
"bytes": "6383764"
},
{
"name": "Shell",
"bytes": "19448"
}
],
"symlink_target": ""
} |
import alignments
import re
import read
import binaryIO
import math
import os
import preprocess
import time
class Compressor:
aligned = None
# 0 - zlib
# 1 - lzma
# 2 - bz2
compressMethod = 0
covSize = 0
totalSize = 0
def __init__(self, frag_len_cutoff):
if self.compressMethod == 0:
self.zlib = __import__('zlib')
elif self.compressMethod == 1:
self.lzma = __import__('lzma')
elif self.compressMethod == 2:
self.bz2 = __import__('bz2')
if frag_len_cutoff:
print('Set fragment length cutoff to %d' % frag_len_cutoff)
self.frag_len_cutoff = frag_len_cutoff
def compress(self, samFilename, compressedFilename, gtf, min_filename, frag_len_z_cutoff, split_diff_strands, split_discordant):
''' Compresses the alignments to 2 files, one for unspliced and one for spliced
file_prefix: Prefix for all output file names
'''
self.p = preprocess.Preprocessor(samFilename, frag_len_z_cutoff, split_diff_strands)
if not self.frag_len_cutoff:
self.frag_len_cutoff = self.p.frag_len_cutoff
print('Using fragment length cutoff of ' + str(self.frag_len_cutoff))
if split_diff_strands:
print('Splitting mates on different strands')
else:
print('Not splitting mates on different strands')
if split_discordant:
print('Splitting discordant')
else:
print('Not splitting discordant')
# Reads on different strands that should be unpaired
self.diff_strand_unpaired = self.p.unpaired
del self.p
# Read header
header = ''
with open(samFilename, 'r') as f:
for line in f:
if line[0] == '@':
header += line
else:
break
self.chromosomes = self.parseSAMHeader(header)
self.aligned = alignments.Alignments(self.chromosomes, self.frag_len_cutoff, split_discordant)
if gtf:
self.aligned.gtf_exons = self.parseGTF(gtf, self.aligned.chromOffsets)
self.compressByBundle(samFilename, compressedFilename, min_filename)
#print('%d unmatched' % self.aligned.numUnmatched)
print('Approximately %d / %d = %f%% of compressed file is coverage' % (self.covSize, self.totalSize, 100.0*float(self.covSize)/float(self.totalSize)))
print('Finished compressing')
def compressByBundle(self, input_name, compressed_name, intermediate_name=None):
'''
Read a sorted SAM file and compress in segments determined by clusters of reads
:param filename:
:return:
'''
# If coverage is 0 for at least this many bases end of a potential gene
overlapRadius = 50
spliced_index = []
bundles = []
first = True
bundle_id = 0
read_id = 0
diff_strand_unpaired_id = 0
num_diff_strand_unpaired = len(self.diff_strand_unpaired)
firstR = None
with open(input_name, 'r') as filehandle:
id = 0
start_id = 0
for line in filehandle:
# Check if header line
if line[0] == '@':
continue
row = line.strip().split('\t')
if row[2] == '*':
# HISAT includes unmapped reads at the end of the file; we just skip them
continue
if not row[2] in self.chromosomes[0]:
print('Error! Chromosome ' + str(row[2]) + ' not found!')
exit()
# Starting position of this read
start = self.aligned.chromOffsets[row[2]] + int(row[3])
if self.aligned.gene_bounds and start > (self.aligned.gene_bounds[-1] + overlapRadius):
# Compress most recent bundle
self.aligned.finalizeExons()
self.aligned.finalizeUnmatched()
self.aligned.finalize_cross_bundle_reads()
#if self.aligned.gene_bounds[0] < 100480943 and self.aligned.gene_bounds[1] > 100478955:
# print(bundle_id)
# print(self.aligned.gene_bounds)
# print(self.aligned.exons)
# print(self.aligned.gene_bounds[0] - self.aligned.chromOffsets['X'])
# print(self.aligned.gene_bounds[1] - self.aligned.chromOffsets['X'])
# exit()
bundle_id += 1
start_id = id
bundles.append(self.aligned.exons)
# Write to intermediate file
if intermediate_name:
if first:
# If it's the first bundle, write the header as well
with open(intermediate_name, 'w') as f1:
read_id = self.aligned.writeSAM(f1, self.aligned.unpaired, self.aligned.paired, True, False, read_id)
else:
with open(intermediate_name, 'a') as f1:
read_id = self.aligned.writeSAM(f1, self.aligned.unpaired, self.aligned.paired, False, False, read_id)
junctions, maxReadLen = self.aligned.computeBuckets()
self.sortedJuncs = sorted(junctions.keys())
# Compress bundle to temporary file
if first:
mode = 'wb'
else:
mode = 'ab'
with open('temp.bin', mode) as f:
l = self.compressBundle(junctions, maxReadLen, f)
spliced_index.append(l)
# Start new bundle
self.aligned.resetBundle()
self.aligned.exons.add(start)
first = False
# Process read
if row[5] == '*':
# HISAT occasionally prints * as the cigar string when it is identical to its mate
#print('No cigar string')
#print(row[0])
#exit()
exons = None
else:
exons = self.parseCigar(row[5], int(row[3]))
# find XS (strand) and NH values
strand = None
NH = 1
for r in row[11 : len(row)]:
if r[0:5] == 'XS:A:' or r[0:5] == 'XS:a:':
strand = r[5]
elif r[0:3] == 'NH:':
NH = int(r[5:])
flags = int(row[1])
if flags & 4:
# Read is unmapped
continue
r = read.Read(row[2], int(row[3]), exons, strand, NH)
#r.name = row[0]
if row[6] == '*' or (flags & 8):
paired = False
elif diff_strand_unpaired_id < num_diff_strand_unpaired and id == self.diff_strand_unpaired[diff_strand_unpaired_id]:
#if not row[6] == '*':
# print('\t'.join(row))
paired = False
diff_strand_unpaired_id += 1
else:
paired = True
r.bundle = bundle_id
r.pairOffset = int(row[7])
if row[6] == '=':
r.pairChrom = row[2]
else:
r.pairChrom = row[6]
self.aligned.processRead(row[0], r, paired)
id += 1
# Compress final cluster
self.aligned.finalizeExons()
self.aligned.finalizeUnmatched()
self.aligned.finalize_cross_bundle_reads()
bundle_id += 1
bundles.append(self.aligned.exons)
# Write to intermediate file
if intermediate_name:
if first:
# If it's the first bundle, write the header as well
with open(intermediate_name, 'w') as f1:
read_id = self.aligned.writeSAM(f1, self.aligned.unpaired, self.aligned.paired, True, False, read_id)
first = False
else:
with open(intermediate_name, 'a') as f1:
read_id = self.aligned.writeSAM(f1, self.aligned.unpaired, self.aligned.paired, False, False, read_id)
junctions, maxReadLen = self.aligned.computeBuckets()
self.sortedJuncs = sorted(junctions.keys())
# Compress bundle to temporary file
if first:
mode = 'wb'
else:
mode = 'ab'
with open('temp.bin', mode) as f:
l = self.compressBundle(junctions, maxReadLen, f)
spliced_index.append(l)
leftovers = 0
for k,v in self.aligned.cross_bundle_reads.items():
#if len(v) > 0:
# print(k)
# print(v)
# exit()
leftovers += len(v)
print('%d cross-bundle reads unmatched' % leftovers)
bundle_lens = [c[-1]-c[0] for c in bundles]
print('Minimum bundle length: %d' % min(bundle_lens))
print('Maximum bundle length: %d' % max(bundle_lens))
print('Average bundle length: %d'% (sum(bundle_lens) / len(bundle_lens)))
# Write index information and append spliced and unspliced files
with open(compressed_name, 'wb') as f:
s = binaryIO.writeChroms(self.chromosomes)
s += binaryIO.writeClusters(bundles)
s += binaryIO.writeList(spliced_index)
f.write(s)
# Compress bundle-spanning buckets
self.compressCrossBundle(self.aligned.cross_bundle_buckets, self.aligned.max_cross_bundle_read_len, bundle_id, f)
# Move contents of temporary file to output file
with open('temp.bin', 'rb') as f2:
f.write(f2.read())
os.remove('temp.bin')
def compressBundle(self, junctions, maxReadLen, filehandle):
# Determine the number of bytes for read lengths
readLenBytes = binaryIO.findNumBytes(maxReadLen)
cluster = binaryIO.valToBinary(1, readLenBytes)
cluster += binaryIO.writeJunctionsList(self.sortedJuncs, 2)
self.totalSize += len(cluster)
# TODO: No need for junc_lens?
junc_lens = []
junc_string = b''
for j in self.sortedJuncs:
#if self.aligned.exons[0] == 100476370 and j == [2, None, 1]:
#
s, c, t = binaryIO.writeJunction(readLenBytes, junctions[j])
self.covSize += c
self.totalSize += t
junc_lens.append(len(s))
junc_string += s
#cluster += binaryIO.writeList(junc_lens)
cluster += junc_string
# Write to file
start = filehandle.tell()
filehandle.write(self.compressString(cluster))
# return length of cluster in file
return filehandle.tell() - start
def compressCrossBundle(self, cross_bundle_buckets, maxReadLen, num_bundles, filehandle):
'''
Compress the bundle-spanning buckets
'''
readLenBytes = binaryIO.findNumBytes(maxReadLen)
bundleIdBytes = binaryIO.findNumBytes(num_bundles)
buckets_sorted = sorted(cross_bundle_buckets.keys())
if len(buckets_sorted) > 0:
print('%d cross-bundle buckets' % len(buckets_sorted))
pos = filehandle.tell()
chunk_size = 20
num_chunks = math.ceil(len(buckets_sorted) / chunk_size)
chunk_lens = [0] * num_chunks
index = binaryIO.valToBinary(4, len(buckets_sorted))
index += binaryIO.valToBinary(2, chunk_size)
index += binaryIO.valToBinary(1, readLenBytes)
index += binaryIO.writeCrossBundleBucketNames(bundleIdBytes, cross_bundle_buckets, buckets_sorted)
self.totalSize += len(index)
main = b''
chunk = b''
chunk_id = 0
for i in range(len(buckets_sorted)):
b = buckets_sorted[i]
ch, c, t = binaryIO.writeCrossBundleBucket(readLenBytes, cross_bundle_buckets[b])
chunk += ch
self.covSize += c
self.totalSize += t
if (i+1) % chunk_size == 0:
compressed = self.compressString(chunk)
chunk_lens[chunk_id] = len(compressed)
chunk_id += 1
main += compressed
chunk = b''
if len(chunk) > 0:
compressed = self.compressString(chunk)
chunk_lens[chunk_id] = len(compressed)
main += compressed
index += binaryIO.writeList(chunk_lens)
index = self.compressString(index)
length = len(index)
numBytes = binaryIO.findNumBytes(length)
binaryIO.writeVal(filehandle, 1, numBytes)
binaryIO.writeVal(filehandle, numBytes, length)
filehandle.write(index)
filehandle.write(main)
print('Compressed size: %d' % (filehandle.tell() - pos))
else:
binaryIO.writeVal(filehandle, 1, 1)
binaryIO.writeVal(filehandle, 1, 0)
def parseCigar(self, cigar, offset):
''' Parse the cigar string starting at the given index of the genome
Returns a list of offsets for each exonic region of the read [(start1, end1), (start2, end2), ...]
'''
exons = []
newExon = True
# Parse cigar string
match = re.search("\D", cigar)
while match:
index = match.start()
length = int(''.join(cigar[:index]))
if cigar[index] == 'N':
# Separates contiguous exons, so set boolean to start a new one
newExon = True
elif cigar[index] == 'M':
# If in the middle of a contiguous exon, append the length to it, otherwise start a new exon
if newExon:
exons.append([offset, offset+length])
newExon = False
else:
exons[-1][1] += length
elif cigar[index] == 'D':
# If in the middle of a contiguous exon, append the deleted length to it
if not newExon:
exons[-1][1] += length
# Skip soft clipping
if not cigar[index] == 'S':
offset += length
cigar = cigar[index+1:]
match = re.search("\D", cigar)
return exons
def parseSAMHeader(self, header):
# In the order they appear in the header
chromNames = []
chromLens = []
# Dictionary contains chromosome lengths for lookup
for line in header.split('\n'):
if line[0:3] == '@SQ':
row = line.strip().split('\t')
chromNames.append(row[1][3:])
chromLens.append(int(row[2][3:]))
return [chromNames, chromLens]
def parseGTF(self, gtf, chromOffsets):
exons = set()
with open(gtf, 'r') as f:
for line in f:
row = line.rstrip().split('\t')
if row[2] == 'exon':
exons.add(int(row[3]) + chromOffsets[row[0]])
exons.add(int(row[4]) + chromOffsets[row[0]])
return sorted(list(exons))
def compressString(self, s):
''' Use a predefined python library to compress the given string.
Return the compressed string '''
if self.compressMethod == 0:
return self.zlib.compress(s)
elif self.compressMethod == 1:
return self.lzma.compress(s)
elif self.compressMethod == 2:
return self.bz2.compress(s)
| {
"content_hash": "5c6d7313e8606a86be35e888b1dc9ba7",
"timestamp": "",
"source": "github",
"line_count": 441,
"max_line_length": 158,
"avg_line_length": 36.70748299319728,
"alnum_prop": 0.5169261181121818,
"repo_name": "jpritt/boiler",
"id": "8b175f7db934339e171858a403bdb58fb2024d98",
"size": "16188",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "compress.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "233739"
},
{
"name": "Shell",
"bytes": "493"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import deepserve.fileupload.models
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('fileupload', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='File',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('filename', models.CharField(max_length=255)),
('sha1sum', models.CharField(max_length=40)),
('fileobj', models.FileField(upload_to=deepserve.fileupload.models.generate_file_name)),
('created_on', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
]
| {
"content_hash": "859da75d6b96be4b64dd3dccd889c6c9",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 114,
"avg_line_length": 32.074074074074076,
"alnum_prop": 0.5923787528868361,
"repo_name": "nathanhi/deepserve",
"id": "955cadad08f31565d7dd227c827b653c8d1e0cbb",
"size": "936",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deepserve/fileupload/migrations/0002_file.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "575"
},
{
"name": "HTML",
"bytes": "3888"
},
{
"name": "JavaScript",
"bytes": "233"
},
{
"name": "Python",
"bytes": "14946"
}
],
"symlink_target": ""
} |
from abc import ABCMeta, abstractproperty
from charlesbot.slack.slack_base_object import SlackBaseObject
class SlackRoomJoined(SlackBaseObject, metaclass=ABCMeta):
def __init__(self, **kwargs):
super().__init__(**kwargs)
@abstractproperty
def compatibility_key(self):
pass
def load(self, object_dict):
for prop in self.properties:
default = getattr(self, prop)
setattr(self, prop, object_dict.get('channel', {}).get(prop, default)) # NOQA
self.type = object_dict.get('type')
| {
"content_hash": "776ef268be1a29fbdf9f005162971321",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 90,
"avg_line_length": 30.72222222222222,
"alnum_prop": 0.6528028933092225,
"repo_name": "marvinpinto/charlesbot",
"id": "7e9d3242f1196ccd788958cda15a5fa8bdc7b9d0",
"size": "553",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "charlesbot/slack/slack_room_joined.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "43"
},
{
"name": "Makefile",
"bytes": "1453"
},
{
"name": "Python",
"bytes": "76502"
}
],
"symlink_target": ""
} |
import unittest
import test
def main():
suites = test.all()
runner = unittest.TextTestRunner(verbosity=2).run(suites)
if __name__ == '__main__':
main()
| {
"content_hash": "d973092e398b072997f63712f7da99b0",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 61,
"avg_line_length": 18.444444444444443,
"alnum_prop": 0.6325301204819277,
"repo_name": "zqqf16/clipboard",
"id": "3cc348ef63888159b2d073652c039af1762d4d0a",
"size": "213",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clipboard/run_test.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "810"
},
{
"name": "Python",
"bytes": "5596"
}
],
"symlink_target": ""
} |
import re
import time
import json
from random import randint
from calvin.Tools import cscompiler as compiler
from calvin.runtime.north.appmanager import Deployer
from calvin.runtime.north import metering
from calvin.utilities.calvinlogger import get_logger
from calvin.utilities.calvin_callback import CalvinCB
from calvin.runtime.south.plugins.async import server_connection, async
from urlparse import urlparse
from calvin.requests import calvinresponse
from calvin.utilities.security import security_needed_check
from calvin.actorstore.store import DocumentationStore
from calvin.utilities import calvinuuid
_log = get_logger(__name__)
uuid_re = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"
control_api_doc = ""
control_api_doc += \
"""
GET /actor_doc {path}
Get documentation in 'raw' format for actor or module at {path}
Path is formatted as '/{module}/{submodule}/ ... /{actor}'.
If {path} is empty return top-level documentation.
See DocumentStore help_raw() for details on data format.
Response status code: OK
Response: dictionary with documentation
"""
re_get_actor_doc = re.compile(r"GET /actor_doc(\S*)\sHTTP/1")
control_api_doc += \
"""
POST /log
Register for log events and set actor and event filter.
Body:
{
'user_id': <user_id> # Optional user id
'actors': [<actor-id>], # Actors to log, empty list for all
'events': [<event_type>] # Event types to log: actor_firing, action_result,
actor_new, actor_destroy, actor_migrate,
application_new, application_destroy
}
Response status code: OK or BAD_REQUEST
Response:
{
'user_id': <user_id>,
'epoch_year': <the year the epoch starts at Jan 1 00:00, e.g. 1970>
}
"""
re_post_log = re.compile(r"POST /log\sHTTP/1")
control_api_doc += \
"""
DELETE /log/{user-id}
Unregister for trace data
Response status code: OK or NOT_FOUND
"""
re_delete_log = re.compile(r"DELETE /log/(TRACE_" + uuid_re + "|" + uuid_re + ")\sHTTP/1")
control_api_doc += \
"""
GET /log/{user-id}
Get streamed log events
Response status code: OK or NOT_FOUND
Content-Type: text/event-stream
data:
{
'timestamp': <timestamp>,
'node_id': <node_id>,
'type': <event_type>, # event types: actor_fire, actor_new, actor_destroy, actor_migrate, application_new, application_destroy
'actor_id', # included in: actor_fire, actor_new, actor_destroy, actor_migrate
'actor_name', # included in: actor_new
'actor_is_shadow' # included in: actor_new
'action_method', # included in: actor_fire
'consumed', # included in: actor_fire
'produced' # included in: actor_fire
'action_result' # included in: actor_fire
'actor_type', # included in: actor_new
'dest_node_id', # included in: actor_migrate
'application_name', # included in: application_new
'application_id' # included in: application, application_destroy
}
"""
re_get_log = re.compile(r"GET /log/(TRACE_" + uuid_re + "|" + uuid_re + ")\sHTTP/1")
control_api_doc += \
"""
GET /id
Get id of this calvin node
Response status code: OK
Response: node-id
"""
re_get_node_id = re.compile(r"GET /id\sHTTP/1")
control_api_doc += \
"""
GET /nodes
List nodes in network (excluding self) known to self
Response status code: OK
Response: List of node-ids
"""
re_get_nodes = re.compile(r"GET /nodes\sHTTP/1")
control_api_doc += \
"""
GET /node/{node-id}
Get information on node node-id
Response status code: OK or NOT_FOUND
Response:
{
"attributes": {...},
"control_uri": "http://<address>:<controlport>",
"uri": "calvinip://<address>:<port>"
}
"""
re_get_node = re.compile(r"GET /node/(NODE_" + uuid_re + "|" + uuid_re + ")\sHTTP/1")
control_api_doc += \
"""
POST /peer_setup
Add calvin nodes to network
Body: {"peers: ["calvinip://<address>:<port>", ...] }
Response status code: OK or SERVICE_UNAVAILABLE
Response: {<peer control uri>: [<peer node id>, <per peer status>], ...}
"""
re_post_peer_setup = re.compile(r"POST /peer_setup\sHTTP/1")
control_api_doc += \
"""
GET /applications
Get applications launched from this node
Response status code: OK
Response: List of application ids
"""
re_get_applications = re.compile(r"GET /applications\sHTTP/1")
control_api_doc += \
"""
GET /application/{application-id}
Get information on application application-id
Response status code: OK or NOT_FOUND
Response:
{
"origin_node_id": <node id>,
"actors": <list of actor ids>
"name": <name or id of this application>
}
"""
re_get_application = re.compile(r"GET /application/(APP_" + uuid_re + "|" + uuid_re + ")\sHTTP/1")
control_api_doc += \
"""
DELETE /application/{application-id}
Stop application (only applications launched from this node)
Response status code: OK, NOT_FOUND, INTERNAL_ERROR
Response: none
"""
re_del_application = re.compile(r"DELETE /application/(APP_" + uuid_re + "|" + uuid_re + ")\sHTTP/1")
control_api_doc += \
"""
POST /actor
Create a new actor
Body:
{
"actor_type:" <type of actor>,
"args" : { "name": <name of actor>, <actor argument>:<value>, ... }
"deploy_args" : {"app_id": <application id>, "app_name": <application name>} (optional)
}
Response status code: OK or INTERNAL_ERROR
Response: {"actor_id": <actor-id>}
"""
re_post_new_actor = re.compile(r"POST /actor\sHTTP/1")
control_api_doc += \
"""
GET /actors
Get list of actors on this runtime
Response status code: OK
Response: list of actor ids
"""
re_get_actors = re.compile(r"GET /actors\sHTTP/1")
control_api_doc += \
"""
GET /actor/{actor-id}
Get information on actor
Response status code: OK or NOT_FOUND
Response:
{
"inports": list inports
"node_id": <node-id>,
"type": <actor type>,
"name": <actor name>,
"outports": list of outports
}
"""
re_get_actor = re.compile(r"GET /actor/(ACTOR_" + uuid_re + "|" + uuid_re + ")\sHTTP/1")
control_api_doc += \
"""
DELETE /actor/{actor-id}
Delete actor
Response status code: OK or NOT_FOUND
Response: none
"""
re_del_actor = re.compile(r"DELETE /actor/(ACTOR_" + uuid_re + "|" + uuid_re + ")\sHTTP/1")
control_api_doc += \
"""
GET /actor/{actor-id}/report
Some actor store statistics on inputs and outputs, this reports these. Not always present.
Response status code: OK or NOT_FOUND
Response: Depends on actor
"""
re_get_actor_report = re.compile(r"GET /actor/(ACTOR_" + uuid_re + "|" + uuid_re + ")/report\sHTTP/1")
control_api_doc += \
"""
POST /actor/{actor-id}/migrate
Migrate actor to (other) node, either explicit node_id or by updated requirements
Body: {"peer_node_id": <node-id>}
Alternative body:
Body:
{
"requirements": [ {"op": "<matching rule name>",
"kwargs": {<rule param key>: <rule param value>, ...},
"type": "+" or "-" for set intersection or set removal, respectively
}, ...
],
"extend": True or False # defaults to False, i.e. replace current requirements
"move": True or False # defaults to False, i.e. when possible stay on the current node
}
For further details about requirements see application deploy.
Response status code: OK, BAD_REQUEST, INTERNAL_ERROR or NOT_FOUND
Response: none
"""
re_post_actor_migrate = re.compile(r"POST /actor/(ACTOR_" + uuid_re + "|" + uuid_re + ")/migrate\sHTTP/1")
control_api_doc += \
"""
POST /actor/{actor-id}/disable
DEPRECATED. Disables an actor
Response status code: OK or NOT_FOUND
Response: none
"""
re_post_actor_disable = re.compile(r"POST /actor/(ACTOR_" + uuid_re + "|" + uuid_re + ")/disable\sHTTP/1")
# control_api_doc += \
"""
GET /actor/{actor-id}/port/{port-id}
Get information on port {port-id} of actor {actor-id}
Response status code: OK or NOT_FOUND
"""
re_get_port = re.compile(
r"GET /actor/(ACTOR_" + uuid_re + "|" + uuid_re + ")/port/(PORT_" + uuid_re + "|" + uuid_re + ")\sHTTP/1")
# control_api_doc += \
"""
GET /actor/{actor-id}/port/{port-id}/state
Get port state {port-id} of actor {actor-id}
Response status code: OK or NOT_FOUND
"""
re_get_port_state = re.compile(
r"GET /actor/(ACTOR_" + uuid_re + "|" + uuid_re + ")/port/(PORT_" + uuid_re + "|" + uuid_re + ")/state\sHTTP/1")
control_api_doc += \
"""
POST /connect
Connect actor ports
Body:
{
"actor_id" : <actor-id>,
"port_name": <port-name>,
"port_dir": <in/out>,
"peer_node_id": <node-id>,
"peer_actor_id": <actor-id>,
"peer_port_name": <port-name>,
"peer_port_dir": <out/in>
}
Response status code: OK, BAD_REQUEST, INTERNAL_ERROR or NOT_FOUND
Response: {"peer_port_id": <peer port id>}
"""
re_post_connect = re.compile(r"POST /connect\sHTTP/1")
control_api_doc += \
"""
POST /set_port_property
Sets a property of the port.
Currently only fanout on outports is supported.
Body:
{
"actor_id" : <actor-id>,
"port_type": <in/out>,
"port_name": <port-name>,
"port_property": <property-name>
"value" : <property value>
}
Response status code: OK or NOT_FOUND
Response: none
"""
re_set_port_property = re.compile(r"POST /set_port_property\sHTTP/1")
control_api_doc += \
"""
POST /deploy
Compile and deploy a calvin script to this calvin node
Apply deployment requirements to actors of an application
and initiate migration of actors accordingly
Body:
{
"name": <application name>,
"script": <calvin script> # alternativly "app_info"
"app_info": <compiled script as app_info> # alternativly "script"
"sec_sign": {<cert hash>: <security signature of script>, ...} # optional and only with "script"
"sec_credentials": <security credentials of user> # optional
"deploy_info":
{"groups": {"<group 1 name>": ["<actor instance 1 name>", ...]}, # TODO not yet implemented
"requirements": {
"<actor instance 1 name>": [ {"op": "<matching rule name>",
"kwargs": {<rule param key>: <rule param value>, ...},
"type": "+" or "-" for set intersection or set removal, respectively
}, ...
],
...
}
}
}
Note that either a script or app_info must be supplied. Optionally security
verification of application script can be made. Also optionally user credentials
can be supplied, some runtimes are configured to require credentials. The
credentials takes for example the following form:
{"user": <username>,
"password": <password>,
"role": <role>,
"group": <group>,
...
}
The matching rules are implemented as plug-ins, intended to be extended.
The type "+" is "and"-ing rules together (actually the intersection of all
possible nodes returned by the rules.) The type "-" is explicitly removing
the nodes returned by this rule from the set of possible nodes. Note that
only negative rules will result in no possible nodes, i.e. there is no
implied "all but these."
A special matching rule exist, to first form a union between matching
rules, i.e. alternative matches. This is useful for e.g. alternative
namings, ownerships or specifying either of two specific nodes.
{"op": "union_group",
"requirements": [list as above of matching rules but without type key]
"type": "+"
}
Other matching rules available is current_node, all_nodes and
node_attr_match which takes an index param which is attribute formatted,
e.g.
{"op": "node_attr_match",
"kwargs": {"index": ["node_name", {"organization": "org.testexample", "name": "testNode1"}]}
"type": "+"
}
Response status code: OK, CREATED, BAD_REQUEST, UNAUTHORIZED or INTERNAL_ERROR
Response: {"application_id": <application-id>,
"actor_map": {<actor name with namespace>: <actor id>, ...}
"placement": {<actor_id>: <node_id>, ...},
"requirements_fulfilled": True/False}
Failure response: {'errors': <compilation errors>,
'warnings': <compilation warnings>,
'exception': <exception string>}
"""
re_post_deploy = re.compile(r"POST /deploy\sHTTP/1")
control_api_doc += \
"""
POST /application/{application-id}/migrate
Update deployment requirements of application application-id
and initiate migration of actors.
Body:
{
"deploy_info":
{"requirements": {
"<actor instance 1 name>": [ {"op": "<matching rule name>",
"kwargs": {<rule param key>: <rule param value>, ...},
"type": "+" or "-" for set intersection or set removal, respectively
}, ...
],
...
}
}
}
For more details on deployment information see application deploy.
Response status code: OK, INTERNAL_ERROR or NOT_FOUND
Response: none
"""
re_post_application_migrate = re.compile(r"POST /application/(APP_" + uuid_re + "|" + uuid_re + ")/migrate\sHTTP/1")
control_api_doc += \
"""
POST /disconnect
Disconnect a port.
If port fields are empty, all ports of the actor are disconnected
Body:
{
"actor_id": <actor-id>,
"port_name": <port-name>,
"port_dir": <in/out>,
"port_id": <port-id>
}
Response status code: OK, INTERNAL_ERROR or NOT_FOUND
Response: none
"""
re_post_disconnect = re.compile(r"POST /disconnect\sHTTP/1")
control_api_doc += \
"""
DELETE /node
Stop (this) calvin node
Response status code: ACCEPTED
Response: none
"""
re_delete_node = re.compile(r"DELETE /node\sHTTP/1")
control_api_doc += \
"""
POST /meter
Register for metering information
Body:
{
"user_id": <user-id> optional user id
}
Response status code: OK or BAD_REQUEST
Response:
{
"user_id": <user-id>,
"timeout": <seconds data is kept>,
"epoch_year": <the year the epoch starts at Jan 1 00:00, e.g. 1970>
}
"""
re_post_meter = re.compile(r"POST /meter\sHTTP/1")
control_api_doc += \
"""
DELETE /meter/{user-id}
Unregister for metering information
Response status code: OK or NOT_FOUND
"""
re_delete_meter = re.compile(r"DELETE /meter/(METERING_" + uuid_re + "|" + uuid_re + ")\sHTTP/1")
control_api_doc += \
"""
GET /meter/{user-id}/timed
Get timed metering information
Response status code: OK or NOT_FOUND
Response:
{
<actor-id>:
[
[<seconds since epoch>, <name of action>],
...
],
...
}
"""
re_get_timed_meter = re.compile(r"GET /meter/(METERING_" + uuid_re + "|" + uuid_re + ")/timed\sHTTP/1")
control_api_doc += \
"""
GET /meter/{user-id}/aggregated
Get aggregated metering information
Response status code: OK or NOT_FOUND
Response:
{
'activity':
{
<actor-id>:
{
<action-name>: <total fire count>,
...
},
...
},
'time':
{
<actor-id>: [<start time of counter>, <last modification time>],
...
}
}
"""
re_get_aggregated_meter = re.compile(r"GET /meter/(METERING_" + uuid_re + "|" + uuid_re + ")/aggregated\sHTTP/1")
control_api_doc += \
"""
GET /meter/{user-id}/metainfo
Get metering meta information on actors
Response status code: OK or NOT_FOUND
Response:
{
<actor-id>:
{
<action-name>:
{
'inports': {
<port-name> : <number of tokens per firing>,
...
},
'outports': {
<port-name> : <number of tokens per firing>,
...
}
},
...
}
}
"""
re_get_metainfo_meter = re.compile(r"GET /meter/(METERING_" + uuid_re + "|" + uuid_re + ")/metainfo\sHTTP/1")
control_api_doc += \
"""
POST /index/{key}
Store value under index key
Body:
{
"value": <string>
}
Response status code: OK or INTERNAL_ERROR
Response: none
"""
re_post_index = re.compile(r"POST /index/([0-9a-zA-Z\.\-/_]*)\sHTTP/1")
control_api_doc += \
"""
DELETE /index/{key}
Remove value from index key
Body:
{
"value": <string>
}
Response status code: OK or INTERNAL_ERROR
Response: none
"""
re_delete_index = re.compile(r"DELETE /index/([0-9a-zA-Z\.\-/_]*)\sHTTP/1")
control_api_doc += \
"""
GET /index/{key}
Fetch values under index key
Response status code: OK or NOT_FOUND
Response: {"result": <list of strings>}
"""
re_get_index = re.compile(r"GET /index/([0-9a-zA-Z\.\-/_]*)\sHTTP/1")
control_api_doc += \
"""
GET /storage/{prefix-key}
Fetch value under prefix-key
Response status code: OK or NOT_FOUND
Response: {"result": <value>}
"""
re_get_storage = re.compile(r"GET /storage/([0-9a-zA-Z\.\-/_]*)\sHTTP/1")
control_api_doc += \
"""
POST /storage/{prefix-key}
Store value under prefix-key
Body:
{
"value": <string>
}
Response status code: OK or INTERNAL_ERROR
Response: none
"""
re_post_storage = re.compile(r"POST /storage/([0-9a-zA-Z\.\-/_]*)\sHTTP/1")
control_api_doc += \
"""
OPTIONS /url
Request for information about the communication options available on url
Response status code: OK
Response: Available communication options
"""
# re_options = re.compile(r"OPTIONS /[0-9a-z/-_.]*\sHTTP/1.1")
re_options = re.compile(r"OPTIONS /[^\s]*\sHTTP/1.1")
_calvincontrol = None
def get_calvincontrol():
""" Returns the CalvinControl singleton
"""
global _calvincontrol
if _calvincontrol is None:
_calvincontrol = CalvinControl()
return _calvincontrol
class Logger(object):
""" Log object
"""
def __init__(self, actors, events):
self.handle = None
self.connection = None
self.actors = actors
self.events = events
def set_connection(self, handle, connection):
self.handle = handle
self.connection = connection
class CalvinControl(object):
""" A HTTP REST API for calvin nodes
"""
LOG_ACTOR_FIRING = 0
LOG_ACTION_RESULT = 1
LOG_ACTOR_NEW = 2
LOG_ACTOR_DESTROY = 3
LOG_ACTOR_MIGRATE = 4
LOG_APPLICATION_NEW = 5
LOG_APPLICATION_DESTROY = 6
def __init__(self):
self.node = None
self.loggers = {}
self.routes = None
self.server = None
self.connections = {}
self.tunnel = None
self.host = None
self.tunnel_server = None
self.tunnel_client = None
self.metering = None
# Set routes for requests
self.routes = [
(re_get_actor_doc, self.handle_get_actor_doc),
(re_post_log, self.handle_post_log),
(re_delete_log, self.handle_delete_log),
(re_get_log, self.handle_get_log),
(re_get_node_id, self.handle_get_node_id),
(re_get_nodes, self.handle_get_nodes),
(re_get_node, self.handle_get_node),
(re_post_peer_setup, self.handle_peer_setup),
(re_get_applications, self.handle_get_applications),
(re_get_application, self.handle_get_application),
(re_del_application, self.handle_del_application),
(re_post_new_actor, self.handle_new_actor),
(re_get_actors, self.handle_get_actors),
(re_get_actor, self.handle_get_actor),
(re_del_actor, self.handle_del_actor),
(re_get_actor_report, self.handle_get_actor_report),
(re_post_actor_migrate, self.handle_actor_migrate),
(re_post_actor_disable, self.handle_actor_disable),
(re_get_port, self.handle_get_port),
(re_get_port_state, self.handle_get_port_state),
(re_post_connect, self.handle_connect),
(re_set_port_property, self.handle_set_port_property),
(re_post_deploy, self.handle_deploy),
(re_post_application_migrate, self.handle_post_application_migrate),
(re_delete_node, self.handle_quit),
(re_post_disconnect, self.handle_disconnect),
(re_post_meter, self.handle_post_meter),
(re_delete_meter, self.handle_delete_meter),
(re_get_timed_meter, self.handle_get_timed_meter),
(re_get_aggregated_meter, self.handle_get_aggregated_meter),
(re_get_metainfo_meter, self.handle_get_metainfo_meter),
(re_post_index, self.handle_post_index),
(re_delete_index, self.handle_delete_index),
(re_get_index, self.handle_get_index),
(re_get_storage, self.handle_get_storage),
(re_post_storage, self.handle_post_storage),
(re_options, self.handle_options)
]
def start(self, node, uri, tunnel=False):
""" If not tunnel, start listening on uri and handle http requests.
If tunnel, setup a tunnel to uri and handle requests.
"""
self.metering = metering.get_metering()
self.node = node
schema, _ = uri.split(':', 1)
if tunnel:
# Connect to tunnel server
self.tunnel_client = CalvinControlTunnelClient(uri, self)
else:
url = urlparse(uri)
self.port = int(url.port)
self.host = url.hostname
_log.info("Control API listening on: %s:%s" % (self.host, self.port))
self.server = server_connection.ServerProtocolFactory(self.handle_request, "http")
self.server.start(self.host, self.port)
# Create tunnel server
self.tunnel_server = CalvinControlTunnelServer(self.node)
def stop(self):
""" Stop """
self.server.stop()
if self.tunnel_server is not None:
self.tunnel_server.stop()
if self.tunnel_client is not None:
self.tunnel_client.stop()
def close_log_tunnel(self, handle):
""" Close log tunnel
"""
for user_id, logger in self.loggers:
if logger.handle == handle:
del self.loggers[user_id]
def handle_request(self, actor_ids=None):
""" Handle incoming requests on socket
"""
if self.server.pending_connections:
addr, conn = self.server.accept()
self.connections[addr] = conn
for handle, connection in self.connections.items():
if connection.data_available:
command, headers, data = connection.data_get()
self.route_request(handle, connection, command, headers, data)
def route_request(self, handle, connection, command, headers, data):
found = False
for route in self.routes:
match = route[0].match(command)
if match:
if data:
data = json.loads(data)
_log.debug("Calvin control handles:%s\n%s\n---------------" % (command, data))
route[1](handle, connection, match, data, headers)
found = True
break
if not found:
_log.error("No route found for: %s\n%s" % (command, data))
self.send_response(handle, connection, None, status=404)
def send_response(self, handle, connection, data, status=200):
""" Send response header text/html
"""
header = "HTTP/1.0 " + \
str(status) + " " + calvinresponse.RESPONSE_CODES[status] + \
"\n" + ("" if data is None else "Content-Type: application/json\n") + \
"Access-Control-Allow-Methods: GET, POST, PUT, DELETE, OPTIONS\n" + \
"Access-Control-Allow-Origin: *\r\n" + "\n"
if connection is None:
msg = {"cmd": "httpresp", "msgid": handle, "header": header, "data": data}
self.tunnel_client.send(msg)
else:
if not connection.connection_lost:
connection.send(header)
if data:
connection.send(data)
connection.close()
del self.connections[handle]
def send_streamheader(self, handle, connection):
""" Send response header for text/event-stream
"""
response = "HTTP/1.0 200 OK\n" + "Content-Type: text/event-stream\n" + \
"Access-Control-Allow-Origin: *\r\n" + "\n"
if connection is not None:
if not connection.connection_lost:
connection.send(response)
elif self.tunnel_client is not None:
msg = {"cmd": "logresp", "msgid": handle, "header": response, "data": None}
self.tunnel_client.send(msg)
def storage_cb(self, key, value, handle, connection):
self.send_response(handle, connection, None if value is None else json.dumps(value),
status=calvinresponse.NOT_FOUND if None else calvinresponse.OK)
def handle_get_actor_doc(self, handle, connection, match, data, hdr):
""" Query ActorStore for documentation
"""
path = match.group(1)
what = '.'.join(path.strip('/').split('/'))
ds = DocumentationStore()
data = ds.help_raw(what)
self.send_response(handle, connection, json.dumps(data))
def handle_post_log(self, handle, connection, match, data, hdr):
""" Create log session
"""
status = calvinresponse.OK
actors = []
events = []
if data and 'user_id' in data:
user_id = data['user_id']
else:
user_id = calvinuuid.uuid("TRACE")
if user_id not in self.loggers:
if 'actors' in data and data['actors']:
actors = data['actors']
if 'events' in data:
events = []
for event in data['events']:
if event == 'actor_firing':
events.append(self.LOG_ACTOR_FIRING)
elif event == 'action_result':
events.append(self.LOG_ACTION_RESULT)
elif event == 'actor_new':
events.append(self.LOG_ACTOR_NEW)
elif event == 'actor_destroy':
events.append(self.LOG_ACTOR_DESTROY)
elif event == 'actor_migrate':
events.append(self.LOG_ACTOR_MIGRATE)
elif event == 'application_new':
events.append(self.LOG_APPLICATION_NEW)
elif event == 'application_destroy':
events.append(self.LOG_APPLICATION_DESTROY)
else:
status = calvinresponse.BAD_REQUEST
break
if status == calvinresponse.OK:
self.loggers[user_id] = Logger(actors=actors, events=events)
else:
status = calvinresponse.BAD_REQUEST
self.send_response(handle, connection,
json.dumps({'user_id': user_id, 'epoch_year': time.gmtime(0).tm_year})
if status == calvinresponse.OK else None,
status=status)
def handle_delete_log(self, handle, connection, match, data, hdr):
""" Delete log session
"""
if match.group(1) in self.loggers:
del self.loggers[match.group(1)]
status = calvinresponse.OK
else:
status = calvinresponse.NOT_FOUND
self.send_response(handle, connection, None, status=status)
def handle_get_log(self, handle, connection, match, data, hdr):
""" Get log stream
"""
if match.group(1) in self.loggers:
self.loggers[match.group(1)].set_connection(handle, connection)
self.send_streamheader(handle, connection)
else:
self.send_response(handle, connection, None, calvinresponse.NOT_FOUND)
def handle_get_node_id(self, handle, connection, match, data, hdr):
""" Get node id from this node
"""
self.send_response(handle, connection, json.dumps({'id': self.node.id}))
def handle_peer_setup(self, handle, connection, match, data, hdr):
_log.analyze(self.node.id, "+", data)
self.node.peersetup(data['peers'], cb=CalvinCB(self.handle_peer_setup_cb, handle, connection))
def handle_peer_setup_cb(self, handle, connection, status=None, peer_node_ids=None):
_log.analyze(self.node.id, "+", status.encode())
if peer_node_ids:
data = json.dumps({k: (v[0], v[1].status) for k, v in peer_node_ids.items()})
else:
data = None
self.send_response(handle, connection, data, status=status.status)
def handle_get_nodes(self, handle, connection, match, data, hdr):
""" Get active nodes
"""
self.send_response(handle, connection, json.dumps(self.node.network.list_links()))
def handle_get_node(self, handle, connection, match, data, hdr):
""" Get node information from id
"""
self.node.storage.get_node(match.group(1), CalvinCB(
func=self.storage_cb, handle=handle, connection=connection))
def handle_get_applications(self, handle, connection, match, data, hdr):
""" Get applications
"""
self.send_response(
handle, connection, json.dumps(self.node.app_manager.list_applications()))
def handle_get_application(self, handle, connection, match, data, hdr):
""" Get application from id
"""
self.node.storage.get_application(match.group(1), CalvinCB(
func=self.storage_cb, handle=handle, connection=connection))
def handle_del_application(self, handle, connection, match, data, hdr):
""" Delete application from id
"""
try:
self.node.app_manager.destroy(match.group(1), cb=CalvinCB(self.handle_del_application_cb,
handle, connection))
except:
_log.exception("Destroy application failed")
self.send_response(handle, connection, None, status=calvinresponse.INTERNAL_ERROR)
def handle_del_application_cb(self, handle, connection, status=None):
self.send_response(handle, connection, None, status=status.status)
def handle_new_actor(self, handle, connection, match, data, hdr):
""" Create actor
"""
try:
actor_id = self.node.new(actor_type=data['actor_type'], args=data[
'args'], deploy_args=data['deploy_args'])
status = calvinresponse.OK
except:
actor_id = None
status = calvinresponse.INTERNAL_ERROR
self.send_response(
handle, connection, None if actor_id is None else json.dumps({'actor_id': actor_id}), status=status)
def handle_get_actors(self, handle, connection, match, data, hdr):
""" Get actor list
"""
actors = self.node.am.list_actors()
self.send_response(
handle, connection, json.dumps(actors))
def handle_get_actor(self, handle, connection, match, data, hdr):
""" Get actor from id
"""
self.node.storage.get_actor(match.group(1), CalvinCB(
func=self.storage_cb, handle=handle, connection=connection))
def handle_del_actor(self, handle, connection, match, data, hdr):
""" Delete actor from id
"""
try:
self.node.am.destroy(match.group(1))
status = calvinresponse.OK
except:
_log.exception("Destroy actor failed")
status = calvinresponse.NOT_FOUND
self.send_response(handle, connection, None, status=status)
def handle_get_actor_report(self, handle, connection, match, data, hdr):
""" Get report from actor
"""
try:
report = self.node.am.report(match.group(1))
status = calvinresponse.OK
except:
_log.exception("Actor report failed")
report = None
status = calvinresponse.NOT_FOUND
self.send_response(
handle, connection, None if report is None else json.dumps(report), status=status)
def handle_actor_migrate(self, handle, connection, match, data, hdr):
""" Migrate actor
"""
status = calvinresponse.OK
if 'peer_node_id' in data:
try:
self.node.am.migrate(match.group(1), data['peer_node_id'],
callback=CalvinCB(self.actor_migrate_cb, handle, connection))
except:
_log.exception("Migration failed")
status = calvinresponse.INTERNAL_ERROR
elif 'requirements' in data:
try:
self.node.am.update_requirements(match.group(1), data['requirements'],
extend=data['extend'] if 'extend' in data else False,
move=data['move'] if 'move' in data else False,
callback=CalvinCB(self.actor_migrate_cb, handle, connection))
except:
_log.exception("Migration failed")
status = calvinresponse.INTERNAL_ERROR
else:
status=calvinresponse.BAD_REQUEST
if status != calvinresponse.OK:
self.send_response(handle, connection,
None, status=status)
def actor_migrate_cb(self, handle, connection, status, *args, **kwargs):
""" Migrate actor respons
"""
self.send_response(handle, connection,
None, status=status.status)
def handle_actor_disable(self, handle, connection, match, data, hdr):
try:
self.node.am.disable(match.group(1))
status = calvinresponse.OK
except:
status = calvinresponse.NOT_FOUND
self.send_response(handle, connection, None, status)
def handle_get_port(self, handle, connection, match, data, hdr):
""" Get port from id
"""
self.node.storage.get_port(match.group(2), CalvinCB(
func=self.storage_cb, handle=handle, connection=connection))
def handle_get_port_state(self, handle, connection, match, data, hdr):
""" Get port from id
"""
state = {}
try:
state = self.node.am.get_port_state(match.group(1), match.group(2))
status = calvinresponse.OK
except:
status = calvinresponse.NOT_FOUND
self.send_response(handle, connection, json.dumps(state), status)
def handle_connect(self, handle, connection, match, data, hdr):
""" Connect port
"""
self.node.connect(
actor_id=data.get("actor_id"),
port_name=data.get("port_name"),
port_dir=data.get("port_dir"),
port_id=data.get("port_id"),
peer_node_id=data.get("peer_node_id"),
peer_actor_id=data.get("peer_actor_id"),
peer_port_name=data.get("peer_port_name"),
peer_port_dir=data.get("peer_port_dir"),
peer_port_id=data.get("peer_port_id"),
cb=CalvinCB(self.handle_connect_cb, handle, connection))
def handle_connect_cb(self, handle, connection, **kwargs):
status = kwargs.get('status', None)
peer_port_id = kwargs.get('peer_port_id', None)
self.send_response(handle, connection, json.dumps({'peer_port_id': peer_port_id}) if status else None,
status=status.status)
def handle_set_port_property(self, handle, connection, match, data, hdr):
try:
self.node.am.set_port_property(
actor_id=data["actor_id"],
port_type=data["port_type"],
port_name=data["port_name"],
port_property=data["port_property"],
value=data["value"])
status = calvinresponse.OK
except:
status = calvinresponse.NOT_FOUND
self.send_response(handle, connection, None, status=status)
def handle_deploy(self, handle, connection, match, data, hdr):
try:
_log.analyze(self.node.id, "+", data)
if 'app_info' not in data:
kwargs = {}
# Supply security verification data when available
if "sec_credentials" in data:
kwargs['credentials'] = data['sec_credentials']
if "sec_sign" in data:
kwargs['content'] = {
'file': data["script"],
'sign': {h: s.decode('hex_codec') for h, s in data['sec_sign'].iteritems()}}
app_info, errors, warnings = compiler.compile(data["script"], filename=data["name"],
verify=data["check"] if "check" in data else True, **kwargs)
if errors:
if any([e['reason'].startswith("401:") for e in errors]):
_log.error("Security verification of script failed")
self.send_response(handle, connection, None, status=calvinresponse.UNAUTHORIZED)
else:
_log.exception("Compilation failed")
self.send_response(handle, connection, json.dumps({'errors': errors, 'warnings': warnings}),
status=calvinresponse.BAD_REQUEST)
return
else:
# Supplying app_info is for backward compatibility hence abort if node configured security
# Main user is csruntime when deploying script at the same time and some tests used
# via calvin.Tools.deployer (the Deployer below is the new in appmanager)
# TODO rewrite these users to send the uncompiled script as cscontrol does.
if security_needed_check():
_log.error("Can't combine compiled script with runtime having security")
self.send_response(handle, connection, None, status=calvinresponse.UNAUTHORIZED)
return
app_info = data['app_info']
errors = [""]
warnings = [""]
_log.analyze(self.node.id, "+ COMPILED", {'app_info': app_info, 'errors': errors, 'warnings': warnings})
d = Deployer(deployable=app_info, deploy_info=data["deploy_info"] if "deploy_info" in data else None,
node=self.node, name=data["name"] if "name" in data else None,
credentials=data["sec_credentials"] if "sec_credentials" in data else None,
verify=data["check"] if "check" in data else True,
cb=CalvinCB(self.handle_deploy_cb, handle, connection))
_log.analyze(self.node.id, "+ Deployer instanciated", {})
d.deploy()
_log.analyze(self.node.id, "+ DEPLOYING", {})
except Exception as e:
_log.exception("Deployer failed")
self.send_response(handle, connection, json.dumps({'errors': errors, 'warnings': warnings,
'exception': str(e)}),
status=calvinresponse.BAD_REQUEST if errors else calvinresponse.INTERNAL_ERROR)
def handle_deploy_cb(self, handle, connection, status, deployer, **kwargs):
_log.analyze(self.node.id, "+ DEPLOYED", {'status': status.status})
if status:
self.send_response(handle, connection,
json.dumps({'application_id': deployer.app_id,
'actor_map': deployer.actor_map,
'placement': kwargs.get('placement', None),
'requirements_fulfilled': status.status == calvinresponse.OK}
) if deployer.app_id else None,
status=status.status)
else:
self.send_response(handle, connection, None, status=status.status)
def handle_post_application_migrate(self, handle, connection, match, data, hdr):
app_id = match.group(1)
try:
self.node.app_manager.migrate_with_requirements(app_id,
deploy_info=data["deploy_info"] if "deploy_info" in data else None,
move=data["move"] if "move" in data else False,
cb=CalvinCB(self.handle_post_application_migrate_cb, handle, connection))
except:
_log.exception("App migration failed")
self.send_response(handle, connection, None, status=calvinresponse.INTERNAL_ERROR)
def handle_post_application_migrate_cb(self, handle, connection, status, **kwargs):
_log.analyze(self.node.id, "+ MIGRATED", {'status': status.status})
self.send_response(handle, connection, None, status=status.status)
def handle_quit(self, handle, connection, match, data, hdr):
async.DelayedCall(.2, self.node.stop)
self.send_response(handle, connection, None, status=calvinresponse.ACCEPTED)
def handle_disconnect(self, handle, connection, match, data, hdr):
self.node.disconnect(
data['actor_id'], data['port_name'], data['port_dir'], data['port_id'],
cb=CalvinCB(self.handle_disconnect_cb, handle, connection))
def handle_disconnect_cb(self, handle, connection, **kwargs):
status = kwargs.get('status', None)
self.send_response(handle, connection, None, status=status.status)
def handle_post_meter(self, handle, connection, match, data, hdr):
try:
user_id = self.metering.register(data['user_id'] if data and 'user_id' in data else None)
timeout = self.metering.timeout
status = calvinresponse.OK
except:
_log.exception("handle_post_meter")
status = calvinresponse.BAD_REQUEST
self.send_response(handle, connection, json.dumps({ 'user_id': user_id,
'timeout': timeout,
'epoch_year': time.gmtime(0).tm_year})
if status == calvinresponse.OK else None, status=status)
def handle_delete_meter(self, handle, connection, match, data, hdr):
try:
self.metering.unregister(match.group(1))
status = calvinresponse.OK
except:
_log.exception("handle_delete_meter")
status = calvinresponse.NOT_FOUND
self.send_response(handle, connection, None, status=status)
def handle_get_timed_meter(self, handle, connection, match, data, hdr):
try:
data = self.metering.get_timed_meter(match.group(1))
status = calvinresponse.OK
except:
_log.exception("handle_get_timed_meter")
status = calvinresponse.NOT_FOUND
self.send_response(handle, connection,
json.dumps(data) if status == calvinresponse.OK else None, status=status)
def handle_get_aggregated_meter(self, handle, connection, match, data, hdr):
try:
data = self.metering.get_aggregated_meter(match.group(1))
status = calvinresponse.OK
except:
_log.exception("handle_get_aggregated_meter")
status = calvinresponse.NOT_FOUND
self.send_response(handle, connection,
json.dumps(data) if status == calvinresponse.OK else None, status=status)
def handle_get_metainfo_meter(self, handle, connection, match, data, hdr):
try:
data = self.metering.get_actors_info(match.group(1))
status = calvinresponse.OK
except:
_log.exception("handle_get_metainfo_meter")
status = calvinresponse.NOT_FOUND
self.send_response(handle, connection,
json.dumps(data) if status == calvinresponse.OK else None, status=status)
def handle_post_index(self, handle, connection, match, data, hdr):
""" Add to index
"""
self.node.storage.add_index(
match.group(1), data['value'], cb=CalvinCB(self.index_cb, handle, connection))
def handle_delete_index(self, handle, connection, match, data, hdr):
""" Remove from index
"""
self.node.storage.remove_index(
match.group(1), data['value'], cb=CalvinCB(self.index_cb, handle, connection))
def handle_get_index(self, handle, connection, match, data, hdr):
""" Get from index
"""
self.node.storage.get_index(
match.group(1), cb=CalvinCB(self.get_index_cb, handle, connection))
def index_cb(self, handle, connection, *args, **kwargs):
""" Index operation response
"""
_log.debug("index cb (in control) %s, %s" % (args, kwargs))
if 'value' in kwargs:
value = kwargs['value']
else:
value = None
self.send_response(handle, connection, None,
status=calvinresponse.INTERNAL_ERROR if value is None else calvinresponse.OK)
def get_index_cb(self, handle, connection, key, value, *args, **kwargs):
""" Index operation response
"""
_log.debug("get index cb (in control) %s, %s" % (key, value))
self.send_response(handle, connection, None if value is None else json.dumps({'result': value}),
status=calvinresponse.NOT_FOUND if value is None else calvinresponse.OK)
def handle_post_storage(self, handle, connection, match, data, hdr):
""" Store in storage
"""
self.node.storage.set("", match.group(1), data['value'], cb=CalvinCB(self.index_cb, handle, connection))
def handle_get_storage(self, handle, connection, match, data, hdr):
""" Get from storage
"""
self.node.storage.get("", match.group(1), cb=CalvinCB(self.get_index_cb, handle, connection))
def log_actor_firing(self, actor_id, action_method, tokens_produced, tokens_consumed, production):
""" Trace actor firing
"""
disconnected = []
for user_id, logger in self.loggers.iteritems():
if not logger.events or self.LOG_ACTOR_FIRING in logger.events:
if not logger.actors or actor_id in logger.actors:
data = {}
data['timestamp'] = time.time()
data['node_id'] = self.node.id
data['type'] = 'actor_fire'
data['actor_id'] = actor_id
data['action_method'] = action_method
data['produced'] = tokens_produced
data['consumed'] = tokens_consumed
if self.LOG_ACTION_RESULT in logger.events:
data['action_result'] = production
if logger.connection is not None:
if not logger.connection.connection_lost:
logger.connection.send("data: %s\n\n" % json.dumps(data))
else:
disconnected.append(user_id)
elif self.tunnel_client is not None and logger.handle is not None:
msg = {"cmd": "logevent", "msgid": logger.handle, "header": None, "data": "data: %s\n\n" % json.dumps(data)}
self.tunnel_client.send(msg)
for user_id in disconnected:
del self.loggers[user_id]
def log_actor_new(self, actor_id, actor_name, actor_type, is_shadow):
""" Trace actor new
"""
disconnected = []
for user_id, logger in self.loggers.iteritems():
if not logger.events or self.LOG_ACTOR_NEW in logger.events:
if not logger.actors or actor_id in logger.actors:
data = {}
data['timestamp'] = time.time()
data['node_id'] = self.node.id
data['type'] = 'actor_new'
data['actor_id'] = actor_id
data['actor_name'] = actor_name
data['actor_type'] = actor_type
data['is_shadow'] = is_shadow
if logger.connection is not None:
if not logger.connection.connection_lost:
logger.connection.send("data: %s\n\n" % json.dumps(data))
else:
disconnected.append(user_id)
elif self.tunnel_client is not None and logger.handle is not None:
msg = {"cmd": "logevent", "msgid": logger.handle, "header": None, "data": "data: %s\n\n" % json.dumps(data)}
self.tunnel_client.send(msg)
for user_id in disconnected:
del self.loggers[user_id]
def log_actor_destroy(self, actor_id):
""" Trace actor destroy
"""
disconnected = []
for user_id, logger in self.loggers.iteritems():
if not logger.events or self.LOG_ACTOR_DESTROY in logger.events:
if not logger.actors or actor_id in logger.actors:
data = {}
data['timestamp'] = time.time()
data['node_id'] = self.node.id
data['type'] = 'actor_destroy'
data['actor_id'] = actor_id
if logger.connection is not None:
if not logger.connection.connection_lost:
logger.connection.send("data: %s\n\n" % json.dumps(data))
else:
disconnected.append(user_id)
elif self.tunnel_client is not None and logger.handle is not None:
msg = {"cmd": "logevent", "msgid": logger.handle, "header": None, "data": "data: %s\n\n" % json.dumps(data)}
self.tunnel_client.send(msg)
for user_id in disconnected:
del self.loggers[user_id]
def log_actor_migrate(self, actor_id, dest_node_id):
""" Trace actor migrate
"""
disconnected = []
for user_id, logger in self.loggers.iteritems():
if not logger.events or self.LOG_ACTOR_MIGRATE in logger.events:
if not logger.actors or actor_id in logger.actors:
data = {}
data['timestamp'] = time.time()
data['node_id'] = self.node.id
data['type'] = 'actor_migrate'
data['actor_id'] = actor_id
data['dest_node_id'] = dest_node_id
if logger.connection is not None:
if not logger.connection.connection_lost:
logger.connection.send("data: %s\n\n" % json.dumps(data))
else:
disconnected.append(user_id)
elif self.tunnel_client is not None and logger.handle is not None:
msg = {"cmd": "logevent", "msgid": logger.handle, "header": None, "data": "data: %s\n\n" % json.dumps(data)}
self.tunnel_client.send(msg)
for user_id in disconnected:
del self.loggers[user_id]
def log_application_new(self, application_id, application_name):
""" Trace application new
"""
disconnected = []
for user_id, logger in self.loggers.iteritems():
if not logger.events or self.LOG_APPLICATION_NEW in logger.events:
data = {}
data['timestamp'] = time.time()
data['node_id'] = self.node.id
data['type'] = 'application_new'
data['application_id'] = application_id
data['application_name'] = application_name
if logger.connection is not None:
if not logger.connection.connection_lost:
logger.connection.send("data: %s\n\n" % json.dumps(data))
else:
disconnected.append(user_id)
elif self.tunnel_client is not None and logger.handle is not None:
msg = {"cmd": "logevent", "msgid": logger.handle, "header": None, "data": "data: %s\n\n" % json.dumps(data)}
self.tunnel_client.send(msg)
for user_id in disconnected:
del self.loggers[user_id]
def log_application_destroy(self, application_id):
""" Trace application destroy
"""
disconnected = []
for user_id, logger in self.loggers.iteritems():
if not logger.events or self.LOG_APPLICATION_DESTROY in logger.events:
data = {}
data['timestamp'] = time.time()
data['node_id'] = self.node.id
data['type'] = 'application_destroy'
data['application_id'] = application_id
if logger.connection is not None:
if not logger.connection.connection_lost:
logger.connection.send("data: %s\n\n" % json.dumps(data))
else:
disconnected.append(user_id)
elif self.tunnel_client is not None and logger.handle is not None:
msg = {"cmd": "logevent", "msgid": logger.handle, "header": None, "data": "data: %s\n\n" % json.dumps(data)}
self.tunnel_client.send(msg)
for user_id in disconnected:
del self.loggers[user_id]
def handle_options(self, handle, connection, match, data, hdr):
""" Handle HTTP OPTIONS requests
"""
response = "HTTP/1.1 200 OK\n"
""" Copy the content of Access-Control-Request-Headers to the response
"""
if 'access-control-request-headers' in hdr:
response += "Access-Control-Allow-Headers: " + \
hdr['access-control-request-headers'] + "\n"
response += "Content-Length: 0\n" \
"Access-Control-Allow-Origin: *\n" \
"Access-Control-Allow-Methods: GET, POST, PUT, DELETE, OPTIONS\n" \
"Content-Type: *\n" \
"\n\r\n"
if connection is None:
msg = {"cmd": "httpresp", "msgid": handle, "header": response, "data": None}
self.tunnel_client.send(msg)
else:
connection.send(response)
class CalvinControlTunnelServer(object):
""" A Calvin control tunnel server
"""
def __init__(self, node):
self.node = node
self.tunnels = {}
self.controltunnels = {}
# Register for incomming control proxy requests
self.node.proto.register_tunnel_handler("control", CalvinCB(self.tunnel_request_handles))
def stop(self):
for _, control in self.controltunnels.items():
control.close()
def tunnel_request_handles(self, tunnel):
""" Incoming tunnel request for storage proxy server
Start a socket server and update peer node with control uri
"""
# Register tunnel
self.tunnels[tunnel.peer_node_id] = tunnel
self.controltunnels[tunnel.peer_node_id] = CalvinControlTunnel(tunnel)
tunnel.register_tunnel_down(CalvinCB(self.tunnel_down, tunnel))
tunnel.register_tunnel_up(CalvinCB(self.tunnel_up, tunnel))
tunnel.register_recv(CalvinCB(self.tunnel_recv_handler, tunnel))
# We accept it by returning True
return True
def tunnel_down(self, tunnel):
""" Callback that the tunnel is not accepted or is going down """
self.controltunnels[tunnel.peer_node_id].close()
del self.tunnels[tunnel.peer_node_id]
del self.controltunnels[tunnel.peer_node_id]
# We should always return True which sends an ACK on the destruction of the tunnel
return True
def tunnel_up(self, tunnel):
""" Callback that the tunnel is working """
_log.analyze(self.node.id, "+ SERVER", {"tunnel_id": tunnel.id})
# We should always return True which sends an ACK on the destruction of the tunnel
return True
def tunnel_recv_handler(self, tunnel, payload):
""" Gets called when a storage client request"""
self.controltunnels[tunnel.peer_node_id].handle_response(payload)
class CalvinControlTunnel(object):
""" A Calvin control socket to tunnel proxy
"""
def __init__(self, tunnel):
self.tunnel = tunnel
self.connections = {}
# Start a socket server on same interface as calvincontrol
self.host = get_calvincontrol().host
for x in range(0, 10):
try:
self.port = randint(5100, 5200)
self.server = server_connection.ServerProtocolFactory(self.handle_request, "http")
self.server.start(self.host, self.port)
_log.info("Control proxy for %s listening on: %s:%s" % (tunnel.peer_node_id, self.host, self.port))
break
except:
pass
# Tell peer node that we a listening and on what uri
msg = {"cmd": "started",
"controluri": "http://" + self.host + ":" + str(self.port)}
self.tunnel.send(msg)
def close(self):
self.server.stop()
def handle_request(self, actor_ids=None):
""" Handle connections and tunnel requests
"""
if self.server.pending_connections:
addr, conn = self.server.accept()
msg_id = calvinuuid.uuid("MSGID")
self.connections[msg_id] = conn
_log.debug("New connection msg_id: %s" % msg_id)
for msg_id, connection in self.connections.items():
if connection.data_available:
command, headers, data = connection.data_get()
_log.debug("CalvinControlTunnel handle_request msg_id: %s command: %s" % (msg_id, command))
msg = {"cmd": "httpreq",
"msgid": msg_id,
"command": command,
"headers": headers,
"data": data}
self.tunnel.send(msg)
def handle_response(self, payload):
""" Handle a tunnel response
"""
if "msgid" in payload:
msgid = payload["msgid"]
if msgid in self.connections:
if "cmd" in payload and "header" in payload and "data" in payload:
cmd = payload["cmd"]
if cmd == "httpresp":
self.send_response(msgid, payload["header"], payload["data"], True)
return
elif cmd == "logresp":
self.send_response(msgid, payload["header"], payload["data"], False)
return
elif cmd == "logevent":
result = self.send_response(msgid, payload["header"], payload["data"], False)
if not result:
msg = {"cmd": "logclose"}
self.tunnel.send(msg)
return
_log.error("Unknown control proxy response %s" % payload)
def send_response(self, msgid, header, data, closeConnection):
""" Send response header text/html
"""
connection = self.connections[msgid]
if not connection.connection_lost:
if header is not None:
connection.send(str(header))
if data is not None:
connection.send(str(data))
if closeConnection:
connection.close()
del self.connections[msgid]
return True
del self.connections[msgid]
return False
class CalvinControlTunnelClient(object):
""" A Calvin control tunnel client
"""
def __init__(self, uri, calvincontrol):
self.uri = uri
self.calvincontrol = calvincontrol
self.tunnel = None
self.calvincontrol.node.network.join([uri], CalvinCB(self._start_link_cb))
def stop(self):
pass
def _start_link_cb(self, status, uri, peer_node_id):
if status == "NACK":
return
# Got link set up tunnel
master_id = peer_node_id
self.tunnel = self.calvincontrol.node.proto.tunnel_new(master_id, 'control', {})
self.tunnel.register_tunnel_down(CalvinCB(self.tunnel_down))
self.tunnel.register_tunnel_up(CalvinCB(self.tunnel_up))
self.tunnel.register_recv(self.tunnel_recv_handler)
def tunnel_down(self):
""" Callback that the tunnel is not accepted or is going down """
if not self.tunnel:
return True
self.tunnel = None
# We should always return True which sends an ACK on the destruction of the tunnel
return True
def tunnel_up(self):
""" Callback that the tunnel is working """
if not self.tunnel:
return True
# We should always return True which sends an ACK on the destruction of the tunnel
return True
def tunnel_recv_handler(self, payload):
""" Gets called when a control proxy replies"""
if "cmd" in payload:
if payload["cmd"] == "httpreq":
try:
self.calvincontrol.route_request(
payload["msgid"], None, payload["command"], payload["headers"], payload["data"])
except:
_log.exception("FIXME! Caught exception in calvincontrol when tunneling.")
self.calvincontrol.send_response(payload["msgid"], None, None, status=calvinresponse.INTERNAL_ERROR)
elif payload["cmd"] == "started":
self.calvincontrol.node.control_uri = payload["controluri"]
self.calvincontrol.node.storage.add_node(self.calvincontrol.node)
return
elif payload["cmd"] == "logclose":
self.calvincontrol.close_log_tunnel(payload["msg_id"])
return
_log.error("Tunnel client received unknown command %s" % payload['cmd'] if 'cmd' in payload else "")
def send(self, msg):
if self.tunnel:
self.tunnel.send(msg)
else:
_log.error("No tunnel connected")
| {
"content_hash": "243fe8b96dd55fef35fbf96e9d3499f6",
"timestamp": "",
"source": "github",
"line_count": 1610,
"max_line_length": 134,
"avg_line_length": 39.74596273291925,
"alnum_prop": 0.5603444234345455,
"repo_name": "les69/calvin-base",
"id": "2836e0cacae30e6fd49512236507336ff68a46bb",
"size": "64601",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "calvin/runtime/north/calvincontrol.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1727"
},
{
"name": "HTML",
"bytes": "7958"
},
{
"name": "JavaScript",
"bytes": "59355"
},
{
"name": "Python",
"bytes": "1614514"
},
{
"name": "Shell",
"bytes": "18513"
}
],
"symlink_target": ""
} |
"""
You are given a list of 999,998 integers, which include all the integers between 1 and 1,000,000 (inclusive on both
ends) in some unknown order, with the exception of two numbers which have been removed. By making only one pass through
the data and using only a constant amount of memory (i.e. O(1) memory usage), can you figure out what two numbers have
been excluded?
Note that since you are only allowed one pass through the data, you are not allowed to sort the list!
EDIT: clarified problem
Thanks to Cosmologicon for suggesting this problem at /r/dailyprogrammer_ideas? Do you have a problem you think
would be good for us? Why not head over there and suggest it?
"""
from random import shuffle
from math import sqrt
def main():
""" inspired by a genius solution from the reddit post """
sums = sum([n for n in range(1, 1000000+1)])
sq_sums = sum([n*n for n in range(1, 1000000+1)])
test = list(range(1, 1000000+1))
shuffle(test)
test = test[2:]
for t in test:
sums -= t
sq_sums -= t*t
a = 1
b = -sums
c = ((sums ** 2) - sq_sums) / 2
print(int((-b + sqrt((b ** 2) - (4 * c))) / 2), int((-b - sqrt((b ** 2) - (4 * c))) / 2))
if __name__ == "__main__":
main()
| {
"content_hash": "b21bb64ed3c912dd0870d58cd03e9151",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 119,
"avg_line_length": 31.3,
"alnum_prop": 0.6445686900958466,
"repo_name": "DayGitH/Python-Challenges",
"id": "801a4961f64abe1cf55a02cbdef9d0ed7a674aff",
"size": "1252",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DailyProgrammer/DP20120620B.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "OpenEdge ABL",
"bytes": "5002"
},
{
"name": "Python",
"bytes": "2471582"
}
],
"symlink_target": ""
} |
import cv2
import sys
faceCascade = cv2.CascadeClassifier('haarcascade_frontalface_alt.xml')
video_capture = cv2.VideoCapture(0)
while True:
# Capture frame-by-frame
ret, frame = video_capture.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30, 30),
flags=cv2.cv.CV_HAAR_SCALE_IMAGE
)
# Draw a rectangle around the faces
for (x, y, w, h) in faces:
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
# Display the resulting frame
cv2.imshow('Video', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything is done, release the capture
video_capture.release()
cv2.destroyAllWindows()
| {
"content_hash": "e2b7812f9c9d3cf787e2499783cdd81c",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 70,
"avg_line_length": 23.470588235294116,
"alnum_prop": 0.6365914786967418,
"repo_name": "creativcoder/opencv_exp",
"id": "59f91377a457550c19a05f5608b8abf1de2de6a0",
"size": "798",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/vdetect.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "611"
},
{
"name": "C++",
"bytes": "682"
},
{
"name": "Python",
"bytes": "4252"
}
],
"symlink_target": ""
} |
import array
import ImpactPacket
import dot11
import IP6
import ICMP6
import IP6_Extension_Headers
from cdp import CDP
from Dot11Crypto import RC4
from impacket import wps, eap, dhcp
from impacket.dot11 import Dot11WEPData
from impacket import LOG
"""Classes to convert from raw packets into a hierarchy of
ImpactPacket derived objects.
The protocol of the outermost layer must be known in advance, and the
packet must be fed to the corresponding decoder. From there it will
try to decode the raw data into a hierarchy of ImpactPacket derived
objects; if a layer's protocol is unknown, all the remaining data will
be wrapped into a ImpactPacket.Data object.
"""
class Decoder:
__decoded_protocol = None
def decode(self, aBuffer):
pass
def set_decoded_protocol(self, protocol):
self.__decoded_protocol = protocol
def get_protocol(self, aprotocol):
protocol = self.__decoded_protocol
while protocol:
if protocol.__class__ == aprotocol:
break
protocol=protocol.child()
return protocol
def __str__(self):
protocol = self.__decoded_protocol
i=0
out=''
while protocol:
tabline=' '*i+'+-'+str(protocol.__class__)
out+="%s"%tabline+'\n'
protocol=protocol.child()
i+=1
return out
class EthDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
e = ImpactPacket.Ethernet(aBuffer)
self.set_decoded_protocol( e )
off = e.get_header_size()
if e.get_ether_type() == ImpactPacket.IP.ethertype:
self.ip_decoder = IPDecoder()
packet = self.ip_decoder.decode(aBuffer[off:])
elif e.get_ether_type() == IP6.IP6.ethertype:
self.ip6_decoder = IP6Decoder()
packet = self.ip6_decoder.decode(aBuffer[off:])
elif e.get_ether_type() == ImpactPacket.ARP.ethertype:
self.arp_decoder = ARPDecoder()
packet = self.arp_decoder.decode(aBuffer[off:])
elif e.get_ether_type() == eap.DOT1X_AUTHENTICATION:
self.eapol_decoder = EAPOLDecoder()
packet = self.eapol_decoder.decode(aBuffer[off:])
# LLC ?
elif e.get_ether_type() < 1500:
self.llc_decoder = LLCDecoder()
packet = self.llc_decoder.decode(aBuffer[off:])
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
e.contains(packet)
return e
# Linux "cooked" capture encapsulation.
# Used, for instance, for packets returned by the "any" interface.
class LinuxSLLDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
e = ImpactPacket.LinuxSLL(aBuffer)
self.set_decoded_protocol( e )
off = 16
if e.get_ether_type() == ImpactPacket.IP.ethertype:
self.ip_decoder = IPDecoder()
packet = self.ip_decoder.decode(aBuffer[off:])
elif e.get_ether_type() == ImpactPacket.ARP.ethertype:
self.arp_decoder = ARPDecoder()
packet = self.arp_decoder.decode(aBuffer[off:])
elif e.get_ether_type() == eap.DOT1X_AUTHENTICATION:
self.eapol_decoder = EAPOLDecoder()
packet = self.eapol_decoder.decode(aBuffer[off:])
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
e.contains(packet)
return e
class IPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
i = ImpactPacket.IP(aBuffer)
self.set_decoded_protocol ( i )
off = i.get_header_size()
end = i.get_ip_len()
# If ip_len == 0 we might be facing TCP segmentation offload, let's calculate the right len
if end == 0:
LOG.warning('IP len reported as 0, most probably because of TCP segmentation offload. Attempting to fix its size')
i.set_ip_len(len(aBuffer))
end = i.get_ip_len()
if i.get_ip_p() == ImpactPacket.UDP.protocol:
self.udp_decoder = UDPDecoder()
packet = self.udp_decoder.decode(aBuffer[off:end])
elif i.get_ip_p() == ImpactPacket.TCP.protocol:
self.tcp_decoder = TCPDecoder()
packet = self.tcp_decoder.decode(aBuffer[off:end])
elif i.get_ip_p() == ImpactPacket.ICMP.protocol:
self.icmp_decoder = ICMPDecoder()
packet = self.icmp_decoder.decode(aBuffer[off:end])
elif i.get_ip_p() == ImpactPacket.IGMP.protocol:
self.igmp_decoder = IGMPDecoder()
packet = self.igmp_decoder.decode(aBuffer[off:end])
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:end])
i.contains(packet)
return i
class IP6MultiProtocolDecoder(Decoder):
def __init__(self, a_protocol_id):
self.protocol_id = a_protocol_id
def decode(self, buffer):
if self.protocol_id == ImpactPacket.UDP.protocol:
self.udp_decoder = UDPDecoder()
packet = self.udp_decoder.decode(buffer)
elif self.protocol_id == ImpactPacket.TCP.protocol:
self.tcp_decoder = TCPDecoder()
packet = self.tcp_decoder.decode(buffer)
elif self.protocol_id == ICMP6.ICMP6.protocol:
self.icmp6_decoder = ICMP6Decoder()
packet = self.icmp6_decoder.decode(buffer)
else:
# IPv6 Extension Headers lookup
extension_headers = IP6_Extension_Headers.IP6_Extension_Header.get_extension_headers()
if buffer and self.protocol_id in extension_headers:
extension_header_decoder_class = extension_headers[self.protocol_id].get_decoder()
self.extension_header_decoder = extension_header_decoder_class()
packet = self.extension_header_decoder.decode(buffer)
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(buffer)
return packet
class IP6Decoder(Decoder):
def __init__(self):
pass
def decode(self, buffer):
ip6_packet = IP6.IP6(buffer)
self.set_decoded_protocol(ip6_packet)
start_pos = ip6_packet.get_header_size()
end_pos = ip6_packet.get_payload_length() + start_pos
contained_protocol = ip6_packet.get_next_header()
multi_protocol_decoder = IP6MultiProtocolDecoder(contained_protocol)
child_packet = multi_protocol_decoder.decode(buffer[start_pos:end_pos])
ip6_packet.contains(child_packet)
return ip6_packet
class HopByHopDecoder(Decoder):
def __init__(self):
pass
def decode(self, buffer):
hop_by_hop = IP6_Extension_Headers.Hop_By_Hop(buffer)
self.set_decoded_protocol(hop_by_hop)
start_pos = hop_by_hop.get_header_size()
contained_protocol = hop_by_hop.get_next_header()
multi_protocol_decoder = IP6MultiProtocolDecoder(contained_protocol)
child_packet = multi_protocol_decoder.decode(buffer[start_pos:])
hop_by_hop.contains(child_packet)
return hop_by_hop
class DestinationOptionsDecoder(Decoder):
def __init__(self):
pass
def decode(self, buffer):
destination_options = IP6_Extension_Headers.Destination_Options(buffer)
self.set_decoded_protocol(destination_options)
start_pos = destination_options.get_header_size()
contained_protocol = destination_options.get_next_header()
multi_protocol_decoder = IP6MultiProtocolDecoder(contained_protocol)
child_packet = multi_protocol_decoder.decode(buffer[start_pos:])
destination_options.contains(child_packet)
return destination_options
class RoutingOptionsDecoder(Decoder):
def __init__(self):
pass
def decode(self, buffer):
routing_options = IP6_Extension_Headers.Routing_Options(buffer)
self.set_decoded_protocol(routing_options)
start_pos = routing_options.get_header_size()
contained_protocol = routing_options.get_next_header()
multi_protocol_decoder = IP6MultiProtocolDecoder(contained_protocol)
child_packet = multi_protocol_decoder.decode(buffer[start_pos:])
routing_options.contains(child_packet)
return routing_options
class ICMP6Decoder(Decoder):
def __init__(self):
pass
def decode(self, buffer):
icmp6_packet = ICMP6.ICMP6(buffer)
self.set_decoded_protocol(icmp6_packet)
start_pos = icmp6_packet.get_header_size()
self.data_decoder = DataDecoder()
child_packet = self.data_decoder.decode(buffer[start_pos:])
icmp6_packet.contains(child_packet)
return icmp6_packet
class ARPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
arp = ImpactPacket.ARP(aBuffer)
self.set_decoded_protocol( arp )
off = arp.get_header_size()
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
arp.contains(packet)
return arp
class UDPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
u = ImpactPacket.UDP(aBuffer)
self.set_decoded_protocol( u )
off = u.get_header_size()
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
u.contains(packet)
return u
class TCPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
t = ImpactPacket.TCP(aBuffer)
self.set_decoded_protocol( t )
off = t.get_header_size()
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
t.contains(packet)
return t
class IGMPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
ig = ImpactPacket.IGMP(aBuffer)
off = ig.get_header_size()
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
ig.contains(packet)
return ig
class IPDecoderForICMP(Decoder):
"""This class was added to parse the IP header of ICMP unreachables packets
If you use the "standard" IPDecoder, it might crash (see bug #4870) ImpactPacket.py
because the TCP header inside the IP header is incomplete"""
def __init__(self):
pass
def decode(self, aBuffer):
i = ImpactPacket.IP(aBuffer)
self.set_decoded_protocol( i )
off = i.get_header_size()
if i.get_ip_p() == ImpactPacket.UDP.protocol:
self.udp_decoder = UDPDecoder()
packet = self.udp_decoder.decode(aBuffer[off:])
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
i.contains(packet)
return i
class ICMPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
ic = ImpactPacket.ICMP(aBuffer)
self.set_decoded_protocol( ic )
off = ic.get_header_size()
if ic.get_icmp_type() == ImpactPacket.ICMP.ICMP_UNREACH:
self.ip_decoder = IPDecoderForICMP()
packet = self.ip_decoder.decode(aBuffer[off:])
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
ic.contains(packet)
return ic
class DataDecoder(Decoder):
def decode(self, aBuffer):
d = ImpactPacket.Data(aBuffer)
self.set_decoded_protocol( d )
return d
class BaseDot11Decoder(Decoder):
def __init__(self, key_manager=None):
self.set_key_manager(key_manager)
def set_key_manager(self, key_manager):
self.key_manager = key_manager
def find_key(self, bssid):
try:
key = self.key_manager.get_key(bssid)
except:
return False
return key
class RadioTapDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
rt = dot11.RadioTap(aBuffer)
self.set_decoded_protocol( rt )
self.do11_decoder = Dot11Decoder()
self.do11_decoder.set_key_manager(self.key_manager)
flags=rt.get_flags()
if flags is not None:
fcs=flags&dot11.RadioTap.RTF_FLAGS.PROPERTY_FCS_AT_END
self.do11_decoder.FCS_at_end(fcs)
packet = self.do11_decoder.decode(rt.get_body_as_string())
rt.contains(packet)
return rt
class Dot11Decoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
self.__FCS_at_end = True
def FCS_at_end(self, fcs_at_end=True):
self.__FCS_at_end=not not fcs_at_end
def decode(self, aBuffer):
d = dot11.Dot11(aBuffer, self.__FCS_at_end)
self.set_decoded_protocol( d )
type = d.get_type()
if type == dot11.Dot11Types.DOT11_TYPE_CONTROL:
dot11_control_decoder = Dot11ControlDecoder()
packet = dot11_control_decoder.decode(d.body_string)
elif type == dot11.Dot11Types.DOT11_TYPE_DATA:
dot11_data_decoder = Dot11DataDecoder(self.key_manager)
dot11_data_decoder.set_dot11_hdr(d)
packet = dot11_data_decoder.decode(d.body_string)
elif type == dot11.Dot11Types.DOT11_TYPE_MANAGEMENT:
dot11_management_decoder = Dot11ManagementDecoder()
dot11_management_decoder.set_subtype(d.get_subtype())
packet = dot11_management_decoder.decode(d.body_string)
else:
data_decoder = DataDecoder()
packet = data_decoder.decode(d.body_string)
d.contains(packet)
return d
class Dot11ControlDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
self.__FCS_at_end = True
def FCS_at_end(self, fcs_at_end=True):
self.__FCS_at_end=not not fcs_at_end
def decode(self, aBuffer):
d = dot11.Dot11(aBuffer, self.__FCS_at_end)
self.set_decoded_protocol(d)
self.subtype = d.get_subtype()
if self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_CONTROL_CLEAR_TO_SEND:
self.ctrl_cts_decoder = Dot11ControlFrameCTSDecoder()
packet = self.ctrl_cts_decoder.decode(d.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_CONTROL_ACKNOWLEDGMENT:
self.ctrl_ack_decoder = Dot11ControlFrameACKDecoder()
packet = self.ctrl_ack_decoder.decode(d.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_CONTROL_REQUEST_TO_SEND:
self.ctrl_rts_decoder = Dot11ControlFrameRTSDecoder()
packet = self.ctrl_rts_decoder.decode(d.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_CONTROL_POWERSAVE_POLL:
self.ctrl_pspoll_decoder = Dot11ControlFramePSPollDecoder()
packet = self.ctrl_pspoll_decoder.decode(d.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_CONTROL_CF_END:
self.ctrl_cfend_decoder = Dot11ControlFrameCFEndDecoder()
packet = self.ctrl_cfend_decoder.decode(d.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_CONTROL_CF_END_CF_ACK:
self.ctrl_cfendcfack_decoder = Dot11ControlFrameCFEndCFACKDecoder()
packet = self.ctrl_cfendcfack_decoder.decode(d.body_string)
else:
data_decoder = DataDecoder()
packet = data_decoder.decode(d.body_string)
d.contains(packet)
return d
class Dot11ControlFrameCTSDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ControlFrameCTS(aBuffer)
self.set_decoded_protocol(p)
return p
class Dot11ControlFrameACKDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ControlFrameACK(aBuffer)
self.set_decoded_protocol(p)
return p
class Dot11ControlFrameRTSDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ControlFrameRTS(aBuffer)
self.set_decoded_protocol(p)
return p
class Dot11ControlFramePSPollDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ControlFramePSPoll(aBuffer)
self.set_decoded_protocol(p)
return p
class Dot11ControlFrameCFEndDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ControlFrameCFEnd(aBuffer)
self.set_decoded_protocol(p)
return p
class Dot11ControlFrameCFEndCFACKDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ControlFrameCFEndCFACK(aBuffer)
self.set_decoded_protocol(p)
return p
class Dot11DataDecoder(BaseDot11Decoder):
def __init__(self, key_manager):
BaseDot11Decoder.__init__(self, key_manager)
def set_dot11_hdr(self, dot11_obj):
self.dot11 = dot11_obj
def decode(self, aBuffer):
if self.dot11.get_fromDS() and self.dot11.get_toDS():
if self.dot11.is_QoS_frame():
p = dot11.Dot11DataAddr4QoSFrame(aBuffer)
else:
p = dot11.Dot11DataAddr4Frame(aBuffer)
elif self.dot11.is_QoS_frame():
p = dot11.Dot11DataQoSFrame(aBuffer)
else:
p = dot11.Dot11DataFrame(aBuffer)
self.set_decoded_protocol( p )
if not self.dot11.get_protectedFrame():
self.llc_decoder = LLCDecoder()
packet = self.llc_decoder.decode(p.body_string)
else:
if not self.dot11.get_fromDS() and self.dot11.get_toDS():
bssid = p.get_address1()
elif self.dot11.get_fromDS() and not self.dot11.get_toDS():
bssid = p.get_address2()
elif not self.dot11.get_fromDS() and not self.dot11.get_toDS():
bssid = p.get_address3()
else:
# WDS, this is the RA
bssid = p.get_address1()
wep_decoder = Dot11WEPDecoder(self.key_manager)
wep_decoder.set_bssid(bssid)
packet = wep_decoder.decode(p.body_string)
if packet is None:
wpa_decoder = Dot11WPADecoder()
packet = wpa_decoder.decode(p.body_string)
if packet is None:
wpa2_decoder = Dot11WPA2Decoder()
packet = wpa2_decoder.decode(p.body_string)
if packet is None:
data_decoder = DataDecoder()
packet = data_decoder.decode(p.body_string)
p.contains(packet)
return p
class Dot11WEPDecoder(BaseDot11Decoder):
def __init__(self, key_manager):
BaseDot11Decoder.__init__(self, key_manager)
self.bssid = None
def set_bssid(self, bssid):
self.bssid = bssid
def decode(self, aBuffer):
wep = dot11.Dot11WEP(aBuffer)
self.set_decoded_protocol( wep )
if wep.is_WEP() is False:
return None
key = self.find_key(self.bssid)
if key:
decoded_string=wep.get_decrypted_data(key)
wep_data = Dot11WEPDataDecoder()
packet = wep_data.decode(decoded_string)
else:
data_decoder = DataDecoder()
packet = data_decoder.decode(wep.body_string)
wep.contains(packet)
return wep
def decrypt_data(self, key_string):
'Return \'WEP Data\' decrypted'
# Needs to be at least 8 bytes of payload
if len(self.body_string)<8:
return self.body_string
# initialize the first bytes of the key from the IV
# and copy rest of the WEP key (the secret part)
key=self.get_iv()+key_string
rc4=RC4(key)
out=rc4.decrypt(data)
dwd=Dot11WEPData(out)
if False: # is ICV correct
return dwd
else:
return self.body_string
class Dot11WEPDataDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
wep_data = dot11.Dot11WEPData(aBuffer)
if not wep_data.check_icv():
# TODO: Do something when the icv is not correct
pass
self.set_decoded_protocol( wep_data )
llc_decoder = LLCDecoder()
packet = llc_decoder.decode(wep_data.body_string)
wep_data.contains(packet)
return wep_data
class Dot11WPADecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer, key=None):
wpa = dot11.Dot11WPA(aBuffer)
self.set_decoded_protocol( wpa )
if wpa.is_WPA() is False:
return None
if key:
decoded_string=wpa.get_decrypted_data()
wpa_data = Dot11DataWPADataDecoder()
packet = wpa_data.decode(decoded_string)
else:
data_decoder = DataDecoder()
packet = data_decoder.decode(wpa.body_string)
wpa.contains(packet)
return wpa
class Dot11WPADataDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
wpa_data = dot11.Dot11WPAData(aBuffer)
self.set_decoded_protocol( wpa_data )
llc_decoder = LLCDecoder()
packet = self.llc_decoder.decode(wpa_data.body_string)
wpa_data.contains(packet)
return wpa_data
class Dot11WPA2Decoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer, key=None):
wpa2 = dot11.Dot11WPA2(aBuffer)
self.set_decoded_protocol( wpa2 )
if wpa2.is_WPA2() is False:
return None
if key:
decoded_string=wpa2.get_decrypted_data()
wpa2_data = Dot11WPA2DataDecoder()
packet = wpa2_data.decode(decoded_string)
else:
data_decoder = DataDecoder()
packet = data_decoder.decode(wpa2.body_string)
wpa2.contains(packet)
return wpa2
class Dot11WPA2DataDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
wpa2_data = dot11.Dot11WPA2Data(aBuffer)
self.set_decoded_protocol( wpa2_data )
llc_decoder = LLCDecoder()
packet = self.llc_decoder.decode(wpa2_data.body_string)
wpa2_data.contains(packet)
return wpa2_data
class LLCDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
d = dot11.LLC(aBuffer)
self.set_decoded_protocol( d )
if d.get_DSAP()==dot11.SAPTypes.SNAP:
if d.get_SSAP()==dot11.SAPTypes.SNAP:
if d.get_control()==dot11.LLC.DLC_UNNUMBERED_FRAMES:
snap_decoder = SNAPDecoder()
packet = snap_decoder.decode(d.body_string)
d.contains(packet)
return d
# Only SNAP is implemented
data_decoder = DataDecoder()
packet = data_decoder.decode(d.body_string)
d.contains(packet)
return d
class SNAPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
s = dot11.SNAP(aBuffer)
self.set_decoded_protocol( s )
if s.get_OUI()==CDP.OUI and s.get_protoID()==CDP.Type:
dec = CDPDecoder()
packet = dec.decode(s.body_string)
elif s.get_OUI()!=0x000000:
# We don't know how to handle other than OUI=0x000000 (EtherType)
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(s.body_string)
elif s.get_protoID() == ImpactPacket.IP.ethertype:
self.ip_decoder = IPDecoder()
packet = self.ip_decoder.decode(s.body_string)
elif s.get_protoID() == ImpactPacket.ARP.ethertype:
self.arp_decoder = ARPDecoder()
packet = self.arp_decoder.decode(s.body_string)
elif s.get_protoID() == eap.DOT1X_AUTHENTICATION:
self.eapol_decoder = EAPOLDecoder()
packet = self.eapol_decoder.decode(s.body_string)
else:
self.data_decoder = DataDecoder()
packet = self.data_decoder.decode(s.body_string)
s.contains(packet)
return s
class CDPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
s = CDP(aBuffer)
self.set_decoded_protocol( s )
return s
class Dot11ManagementDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
self.subtype = None
def set_subtype(self, subtype):
self.subtype=subtype
def decode(self, aBuffer):
p = dot11.Dot11ManagementFrame(aBuffer)
self.set_decoded_protocol( p )
if self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_MANAGEMENT_BEACON:
self.mgt_beacon_decoder = Dot11ManagementBeaconDecoder()
packet = self.mgt_beacon_decoder.decode(p.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_MANAGEMENT_PROBE_REQUEST:
self.mgt_probe_request_decoder = Dot11ManagementProbeRequestDecoder()
packet = self.mgt_probe_request_decoder.decode(p.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_MANAGEMENT_PROBE_RESPONSE:
self.mgt_probe_response_decoder = Dot11ManagementProbeResponseDecoder()
packet = self.mgt_probe_response_decoder.decode(p.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_MANAGEMENT_DEAUTHENTICATION:
self.mgt_deauthentication_decoder = Dot11ManagementDeauthenticationDecoder()
packet = self.mgt_deauthentication_decoder.decode(p.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_MANAGEMENT_AUTHENTICATION:
self.mgt_Authentication_decoder = Dot11ManagementAuthenticationDecoder()
packet = self.mgt_Authentication_decoder.decode(p.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_MANAGEMENT_DISASSOCIATION:
self.mgt_disassociation_decoder = Dot11ManagementDisassociationDecoder()
packet = self.mgt_disassociation_decoder.decode(p.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_MANAGEMENT_ASSOCIATION_REQUEST:
self.mgt_association_request_decoder = Dot11ManagementAssociationRequestDecoder()
packet = self.mgt_association_request_decoder.decode(p.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_MANAGEMENT_ASSOCIATION_RESPONSE:
self.mgt_association_response_decoder = Dot11ManagementAssociationResponseDecoder()
packet = self.mgt_association_response_decoder.decode(p.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_MANAGEMENT_REASSOCIATION_REQUEST:
self.mgt_reassociation_request_decoder = Dot11ManagementReassociationRequestDecoder()
packet = self.mgt_reassociation_request_decoder.decode(p.body_string)
elif self.subtype is dot11.Dot11Types.DOT11_SUBTYPE_MANAGEMENT_REASSOCIATION_RESPONSE:
self.mgt_reassociation_response_decoder = Dot11ManagementReassociationResponseDecoder()
packet = self.mgt_reassociation_response_decoder.decode(p.body_string)
else:
data_decoder = DataDecoder()
packet = data_decoder.decode(p.body_string)
p.contains(packet)
return p
class Dot11ManagementBeaconDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ManagementBeacon(aBuffer)
self.set_decoded_protocol( p )
return p
class Dot11ManagementProbeRequestDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ManagementProbeRequest(aBuffer)
self.set_decoded_protocol( p )
return p
class Dot11ManagementProbeResponseDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ManagementProbeResponse(aBuffer)
self.set_decoded_protocol( p )
return p
class Dot11ManagementDeauthenticationDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ManagementDeauthentication(aBuffer)
self.set_decoded_protocol( p )
return p
class Dot11ManagementAuthenticationDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ManagementAuthentication(aBuffer)
self.set_decoded_protocol(p)
return p
class Dot11ManagementDisassociationDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ManagementDisassociation(aBuffer)
self.set_decoded_protocol(p)
return p
class Dot11ManagementAssociationRequestDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ManagementAssociationRequest(aBuffer)
self.set_decoded_protocol(p)
return p
class Dot11ManagementAssociationResponseDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ManagementAssociationResponse(aBuffer)
self.set_decoded_protocol(p)
return p
class Dot11ManagementReassociationRequestDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ManagementReassociationRequest(aBuffer)
self.set_decoded_protocol(p)
return p
class Dot11ManagementReassociationResponseDecoder(BaseDot11Decoder):
def __init__(self):
BaseDot11Decoder.__init__(self)
def decode(self, aBuffer):
p = dot11.Dot11ManagementReassociationResponse(aBuffer)
self.set_decoded_protocol(p)
return p
class BaseDecoder(Decoder):
def decode(self, buff):
packet = self.klass(buff)
self.set_decoded_protocol(packet)
cd = self.child_decoders.get(self.child_key(packet), DataDecoder())
packet.contains(cd.decode(packet.get_body_as_string()))
return packet
class SimpleConfigDecoder(BaseDecoder):
child_decoders = {}
klass = wps.SimpleConfig
child_key = lambda s,p: None
def decode(self, buff):
sc = BaseDecoder.decode(self, buff)
ary = array.array('B', sc.child().get_packet())
sc.unlink_child()
tlv = wps.SimpleConfig.build_tlv_container()
tlv.from_ary(ary)
sc.contains(tlv)
return sc
class EAPExpandedDecoder(BaseDecoder):
child_decoders = {
(eap.EAPExpanded.WFA_SMI, eap.EAPExpanded.SIMPLE_CONFIG): SimpleConfigDecoder(),
}
klass = eap.EAPExpanded
child_key = lambda s,p: (p.get_vendor_id(), p.get_vendor_type())
class EAPRDecoder(BaseDecoder):
child_decoders = {
eap.EAPR.EXPANDED:EAPExpandedDecoder()
}
klass = eap.EAPR
child_key = lambda s, p: p.get_type()
class EAPDecoder(BaseDecoder):
child_decoders = {
eap.EAP.REQUEST: EAPRDecoder(),
eap.EAP.RESPONSE: EAPRDecoder(),
}
klass = eap.EAP
child_key = lambda s, p: p.get_code()
class EAPOLDecoder(BaseDecoder):
child_decoders = {
eap.EAPOL.EAP_PACKET: EAPDecoder()
}
klass = eap.EAPOL
child_key = lambda s, p: p.get_packet_type()
class BootpDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
d = dhcp.BootpPacket(aBuffer)
self.set_decoded_protocol( d )
off = len(d.getData())
if dhcp.DhcpPacket(aBuffer[off:])['cookie'] == dhcp.DhcpPacket.MAGIC_NUMBER:
self.data_decoder = DHCPDecoder()
packet = self.data_decoder.decode(aBuffer[off:])
d.contains(packet)
return d
class DHCPDecoder(Decoder):
def __init__(self):
pass
def decode(self, aBuffer):
d = dhcp.DhcpPacket(aBuffer)
self.set_decoded_protocol( d )
return d
| {
"content_hash": "ddac2c3f62fc29de43a5f82e9ace86bd",
"timestamp": "",
"source": "github",
"line_count": 985,
"max_line_length": 126,
"avg_line_length": 34.387817258883246,
"alnum_prop": 0.6133384506376949,
"repo_name": "tholum/PiBunny",
"id": "eb475e58755dcf0cbf891b018a7d24dba3d24ba9",
"size": "34289",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "system.d/library/tools_installer/tools_to_install/impacket/impacket/ImpactDecoder.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3527"
},
{
"name": "HTML",
"bytes": "195334"
},
{
"name": "JavaScript",
"bytes": "1156309"
},
{
"name": "PowerShell",
"bytes": "5359"
},
{
"name": "Python",
"bytes": "6368546"
},
{
"name": "Shell",
"bytes": "40720"
},
{
"name": "Visual Basic",
"bytes": "5660"
}
],
"symlink_target": ""
} |
"""
EasyBuild support for building and installing foofoo, implemented as an easyblock
@author: Kenneth Hoste (Ghent University)
"""
from easybuild.easyblocks.foo import EB_foo
from easybuild.framework.easyconfig import CUSTOM, MANDATORY
class EB_foofoo(EB_foo):
"""Support for building/installing foofoo."""
@staticmethod
def extra_options():
"""Custom easyconfig parameters for foofoo."""
extra_vars = {
'foofoo_extra1': [None, "first foofoo-specific easyconfig parameter (mandatory)", MANDATORY],
'foofoo_extra2': ['FOOFOO', "second foofoo-specific easyconfig parameter", CUSTOM],
}
return EB_foo.extra_options(extra_vars)
| {
"content_hash": "b595bb57e720e80ccac23d54a3f11f9c",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 105,
"avg_line_length": 33.23809523809524,
"alnum_prop": 0.6919770773638968,
"repo_name": "ULHPC/modules",
"id": "94ec86ef89924b9c92359ad9cc45e9091fa0e1c7",
"size": "1756",
"binary": false,
"copies": "9",
"ref": "refs/heads/devel",
"path": "easybuild/easybuild-framework/test/framework/sandbox/easybuild/easyblocks/foofoo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Groff",
"bytes": "36174"
},
{
"name": "Perl",
"bytes": "34780"
},
{
"name": "Python",
"bytes": "2711250"
},
{
"name": "Ruby",
"bytes": "932"
},
{
"name": "Shell",
"bytes": "51560"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
from salt.ext.tornado.ioloop import IOLoop
from salt.ext.tornado.netutil import ThreadedResolver
# When this module is imported, it runs getaddrinfo on a thread. Since
# the hostname is unicode, getaddrinfo attempts to import encodings.idna
# but blocks on the import lock. Verify that ThreadedResolver avoids
# this deadlock.
resolver = ThreadedResolver()
IOLoop.current().run_sync(lambda: resolver.resolve(u'localhost', 80))
| {
"content_hash": "d755ebbe792604c01a0e03347262757b",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 72,
"avg_line_length": 44.90909090909091,
"alnum_prop": 0.7975708502024291,
"repo_name": "saltstack/salt",
"id": "111a7269e0e42a2a95d7cea4c0d8b2f695259b8d",
"size": "514",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt/ext/tornado/test/resolve_test_helper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Suggest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('suggest', models.TextField(max_length=200, verbose_name='意见')),
('suggest_time', models.DateTimeField(auto_now_add=True, verbose_name='提出时间')),
],
),
]
| {
"content_hash": "927da850988e9339b88744a258f23f69",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 114,
"avg_line_length": 29.047619047619047,
"alnum_prop": 0.5754098360655737,
"repo_name": "tomming233/blog",
"id": "fbebfa00f31a5e1516224c2d5dbe65cfd09ce223",
"size": "695",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/migrations/0002_suggest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "390"
},
{
"name": "HTML",
"bytes": "19251"
},
{
"name": "Python",
"bytes": "29776"
}
],
"symlink_target": ""
} |
import collections
import uuid
import mock
import webob
from ooi.api import helpers
from ooi.api import network_link as network_link_api
from ooi import exception
from ooi.occi.core import collection
from ooi.occi.infrastructure import compute
from ooi.occi.infrastructure import network
from ooi.openstack import network as os_network
from ooi.tests import base
from ooi.tests import fakes
class TestNetworkLinkController(base.TestController):
def setUp(self):
super(TestNetworkLinkController, self).setUp()
self.controller = network_link_api.Controller(mock.MagicMock(), None)
def _build_req(self, tenant_id, path="/whatever", **kwargs):
m = mock.MagicMock()
m.user.project_id = tenant_id
environ = {"keystone.token_auth": m}
kwargs["base_url"] = self.application_url
return webob.Request.blank(path, environ=environ, **kwargs)
@mock.patch.object(helpers.OpenStackHelper, "get_floating_ips")
def test_index(self, mock_floating_ips):
for tenant in fakes.tenants.values():
ips = fakes.floating_ips[tenant["id"]]
mock_floating_ips.return_value = ips
ret = self.controller.index(None)
self.assertIsInstance(ret, collection.Collection)
if tenant["name"] == "baz":
for idx, ip in enumerate(ips):
if ip["instance_id"]:
self.assertIsInstance(ret.resources[idx],
os_network.OSNetworkInterface)
else:
self.assertEqual([], ret.resources)
mock_floating_ips.assert_called_with(None)
@mock.patch.object(network_link_api.Controller, "_get_interface_from_id")
def test_delete_invalid(self, mock_get):
class FakeNetworkLink(object):
target = collections.namedtuple("Target", ["id"])("fixed")
server_id = uuid.uuid4().hex
server_addr = "192.168.253.1"
link_id = "%s_%s" % (server_id, server_addr)
mock_get.return_value = FakeNetworkLink()
self.assertRaises(exception.Invalid,
self.controller.delete, None, link_id)
mock_get.assert_called_with(None, link_id)
@mock.patch.object(helpers.OpenStackHelper, "release_floating_ip")
@mock.patch.object(helpers.OpenStackHelper, "remove_floating_ip")
@mock.patch.object(network_link_api.Controller, "_get_interface_from_id")
def test_delete(self, mock_get, mock_remove, mock_release):
class FakeNetworkLink(object):
target = collections.namedtuple("Target", ["id"])("floating")
source = collections.namedtuple("Source", ["id"])(uuid.uuid4().hex)
address = "192.168.253.1"
id = "%s_%s" % (source.id, address)
ip_id = "foo"
link = FakeNetworkLink()
mock_get.return_value = link
mock_release.return_value = None
mock_remove.return_value = None
ret = self.controller.delete(None, link.id)
self.assertEqual([], ret)
mock_get.assert_called_with(None, link.id)
mock_remove.assert_called_with(None, link.source.id, link.address)
mock_release.assert_called_with(None, link.ip_id)
@mock.patch.object(network_link_api.Controller, "_get_interface_from_id")
def test_show(self, mock_get):
mock_get.return_value = "foo"
ret = self.controller.show(None, "bar")
self.assertEqual(["foo"], ret)
mock_get.assert_called_with(None, "bar")
def test_get_interface_from_id_invalid(self):
self.assertRaises(exception.LinkNotFound,
self.controller._get_interface_from_id,
None,
"foobarbaz")
@mock.patch.object(helpers.OpenStackHelper, "get_server")
def test_get_interface_from_id_invalid_no_matching_server(self, mock_get):
mock_get.return_value = {"addresses": {"foo": [{"addr": "1.1.1.2"}]}}
self.assertRaises(exception.LinkNotFound,
self.controller._get_interface_from_id,
None,
"%s_1.1.1.1" % uuid.uuid4().hex)
@mock.patch.object(network_link_api.Controller, "_get_os_network_ip")
@mock.patch.object(helpers.OpenStackHelper, "get_server")
def test_get_interface_from_id(self, mock_get_server, mock_get_ip):
server_id = uuid.uuid4().hex
server_addr = "1.1.1.1"
link_id = "%s_%s" % (server_id, server_addr)
c = compute.ComputeResource(title="Compute", id=server_id)
mock_get_server.return_value = {"addresses": {"foo": [
{"addr": server_addr}]}}
mock_get_ip.return_value = ("bar", "baz")
a = os_network.OSNetworkInterface(c, "bar", "mac", server_addr, "baz")
ret = self.controller._get_interface_from_id(None, link_id)
self.assertIsInstance(ret, os_network.OSNetworkInterface)
self.assertEqual(a.ip_id, ret.ip_id)
self.assertEqual(c.id, ret.source.id)
mock_get_server.assert_called_with(None, server_id)
mock_get_ip.assert_called_with(None, {"addr": server_addr})
def test_get_os_network_ip_fixed(self):
addr = {"addr": "1.1.1.1",
"OS-EXT-IPS:type": "fixed",
"OS-EXT-IPS-MAC:mac_addr": "1234"}
ret = self.controller._get_os_network_ip(None, addr)
self.assertIsInstance(ret, tuple)
self.assertIsInstance(ret[0], network.NetworkResource)
self.assertIsInstance(ret[1], type(None))
@mock.patch.object(helpers.OpenStackHelper, "get_floating_ips")
def test_get_os_network_ip(self, mock_floating_ips):
ips = fakes.floating_ips[fakes.tenants["baz"]["id"]]
for ip in ips:
addr = {"addr": ip["ip"],
"OS-EXT-IPS:type": "floating",
"OS-EXT-IPS-MAC:mac_addr": "1234"}
mock_floating_ips.return_value = ips
ret = self.controller._get_os_network_ip(None, addr)
self.assertIsInstance(ret, tuple)
self.assertIsInstance(ret[0], network.NetworkResource)
self.assertEqual(ip["id"], ret[1])
@mock.patch.object(helpers.OpenStackHelper, "get_floating_ips")
def test_get_os_network_ip_invalid(self, mock_floating_ips):
addr = {"addr": "1.1.1.1",
"OS-EXT-IPS:type": "floating",
"OS-EXT-IPS-MAC:mac_addr": "1234"}
for tenant in fakes.tenants.values():
ips = fakes.floating_ips[tenant["id"]]
mock_floating_ips.return_value = ips
self.assertRaises(exception.NetworkNotFound,
self.controller._get_os_network_ip,
None,
addr)
@mock.patch("ooi.occi.validator.Validator")
def test_create_invalid(self, mock_validator):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
net_id = "fixed"
server_id = uuid.uuid4().hex
obj = {
"attributes": {
"occi.core.target": net_id,
"occi.core.source": server_id,
}
}
req.get_parser = mock.MagicMock()
req.get_parser.return_value.return_value.parse.return_value = obj
mock_validator.validate.return_value = True
self.assertRaises(exception.Invalid,
self.controller.create, req, None)
@mock.patch("ooi.occi.validator.Validator")
def test_create_invalid_net_id(self, mock_validator):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
net_id = "foobarbaz"
server_id = uuid.uuid4().hex
obj = {
"attributes": {
"occi.core.target": net_id,
"occi.core.source": server_id,
}
}
req.get_parser = mock.MagicMock()
req.get_parser.return_value.return_value.parse.return_value = obj
mock_validator.validate.return_value = True
self.assertRaises(exception.NetworkPoolFound,
self.controller.create, req, None)
@mock.patch.object(helpers.OpenStackHelper, "associate_floating_ip")
@mock.patch.object(helpers.OpenStackHelper, "allocate_floating_ip")
@mock.patch("ooi.occi.validator.Validator")
def test_create(self, mock_validator, mock_allocate, mock_associate):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
pool_name = "public"
net_id = '/'.join(["floating", pool_name])
server_id = uuid.uuid4().hex
obj = {
"attributes": {
"occi.core.target": net_id,
"occi.core.source": server_id,
}
}
ips = fakes.floating_ips[fakes.tenants["baz"]["id"]]
for ip in ips:
req.get_parser = mock.MagicMock()
req.get_parser.return_value.return_value.parse.return_value = obj
mock_validator.validate.return_value = True
mock_allocate.return_value = ip
mock_associate.return_value = None
ret = self.controller.create(req, None)
link = ret.resources.pop()
self.assertIsInstance(link, os_network.OSNetworkInterface)
self.assertIsInstance(link.source, compute.ComputeResource)
self.assertIsInstance(link.target, network.NetworkResource)
self.assertEqual(net_id, link.target.id)
self.assertEqual(server_id, link.source.id)
mock_allocate.assert_called_with(mock.ANY, pool_name)
mock_associate.assert_called_with(mock.ANY, server_id, ip["id"])
| {
"content_hash": "a517d699db8e4ab699d55f70217e9cf3",
"timestamp": "",
"source": "github",
"line_count": 236,
"max_line_length": 79,
"avg_line_length": 41.26271186440678,
"alnum_prop": 0.5949887040460053,
"repo_name": "orviz/ooi",
"id": "fc230b0f8c7136c5c13fa4e83f16d756bf07179c",
"size": "10362",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ooi/tests/controllers/test_network_links.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "351580"
}
],
"symlink_target": ""
} |
import tensorflow as tf
import argparse
import sys
import time
FLAGS = None
def main(_):
ps_hosts = FLAGS.ps_hosts.split(',')
worker_hosts = FLAGS.worker_hosts.split(',')
cluster = tf.train.ClusterSpec({"ps":ps_hosts, "worker":worker_hosts})
# start a server for a specific task
server = tf.train.Server(
cluster,
job_name=FLAGS.job_name,
task_index=FLAGS.task_index)
# config
batch_size = 100
learning_rate = 0.0005
training_epochs = 20
logs_path = "/tmp/train_logs"
# load mnist data set
from tensorflow.examples.tutorials.mnist import input_data
# mnist = input_data.read_data_sets('MNIST_data', one_hot=True)
if FLAGS.job_name == "ps":
server.join()
elif FLAGS.job_name == "worker":
# Between-graph replication
with tf.device(tf.train.replica_device_setter(
worker_device="/job:worker/task:%d" % FLAGS.task_index,
cluster=cluster)):
mnist = input_data.read_data_sets('MNIST_data', one_hot=True)
# count the number of updates
global_step = tf.get_variable(
'global_step',
[],
initializer = tf.constant_initializer(0),
trainable = False)
# input images
with tf.name_scope('input'):
# None -> batch size can be any size, 784 -> flattened mnist image
x = tf.placeholder(tf.float32, shape=[None, 784], name="x-input")
# target 10 output classes
y_ = tf.placeholder(tf.float32, shape=[None, 10], name="y-input")
# model parameters will change during training so we use tf.Variable
tf.set_random_seed(1)
with tf.name_scope("weights"):
W1 = tf.Variable(tf.random_normal([784, 100]))
W2 = tf.Variable(tf.random_normal([100, 10]))
# bias
with tf.name_scope("biases"):
b1 = tf.Variable(tf.zeros([100]))
b2 = tf.Variable(tf.zeros([10]))
# implement model
with tf.name_scope("softmax"):
# y is our prediction
z2 = tf.add(tf.matmul(x,W1),b1)
a2 = tf.nn.sigmoid(z2)
z3 = tf.add(tf.matmul(a2,W2),b2)
y = tf.nn.softmax(z3)
# specify cost function
with tf.name_scope('cross_entropy'):
# this is our cost
cross_entropy = tf.reduce_mean(
-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))
# specify optimizer
with tf.name_scope('train'):
# optimizer is an "operation" which we can execute in a session
grad_op = tf.train.GradientDescentOptimizer(learning_rate)
'''
rep_op = tf.train.SyncReplicasOptimizer(
grad_op,
replicas_to_aggregate=len(workers),
replica_id=FLAGS.task_index,
total_num_replicas=len(workers),
use_locking=True)
train_op = rep_op.minimize(cross_entropy, global_step=global_step)
'''
train_op = grad_op.minimize(cross_entropy, global_step=global_step)
'''
init_token_op = rep_op.get_init_tokens_op()
chief_queue_runner = rep_op.get_chief_queue_runner()
'''
with tf.name_scope('Accuracy'):
# accuracy
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# create a summary for our cost and accuracy
tf.summary.scalar("cost", cross_entropy)
tf.summary.scalar("accuracy", accuracy)
# merge all summaries into a single "operation" which we can execute in a session
summary_op = tf.summary.merge_all()
init_op = tf.global_variables_initializer()
print("Variables initialized ...")
sv = tf.train.Supervisor(is_chief=(FLAGS.task_index == 0),
global_step=global_step,
init_op=init_op)
begin_time = time.time()
frequency = 100
with sv.prepare_or_wait_for_session(server.target) as sess:
'''
# is chief
if FLAGS.task_index == 0:
sv.start_queue_runners(sess, [chief_queue_runner])
sess.run(init_token_op)
'''
# create log writer object (this will log on every machine)
writer = tf.summary.FileWriter(logs_path, graph=tf.get_default_graph())
# perform training cycles
start_time = time.time()
for epoch in range(training_epochs):
# number of batches in one epoch
batch_count = int(mnist.train.num_examples/batch_size)
count = 0
for i in range(batch_count):
batch_x, batch_y = mnist.train.next_batch(batch_size)
# perform the operations we defined earlier on batch
_, cost, summary, step = sess.run(
[train_op, cross_entropy, summary_op, global_step],
feed_dict={x: batch_x, y_: batch_y})
writer.add_summary(summary, step)
count += 1
if count % frequency == 0 or i+1 == batch_count:
elapsed_time = time.time() - start_time
start_time = time.time()
print("Step: %d," % (step+1),
" Epoch: %2d," % (epoch+1),
" Batch: %3d of %3d," % (i+1, batch_count),
" Cost: %.4f," % cost,
" AvgTime: %3.2fms" % float(elapsed_time*1000/frequency))
count = 0
print("Test-Accuracy: %2.2f" % sess.run(accuracy, feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
print("Total Time: %3.2fs" % float(time.time() - begin_time))
print("Final Cost: %.4f" % cost)
sv.stop()
print("done")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.register("type", "bool", lambda v: v.lower() == "true")
parser.add_argument(
"--ps_hosts",
type=str,
default="",
help="Comma-separated list of hostname:port pairs"
)
parser.add_argument(
"--worker_hosts",
type=str,
default="",
help="Comma-separated list of hostname:port pairs"
)
parser.add_argument(
"--job_name",
type=str,
default="",
help="One of 'ps', 'worker'"
)
parser.add_argument(
"--task_index",
type=int,
default=0,
help="Index of task within the job"
)
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
| {
"content_hash": "6c94395427885e4ef70f9c76b9be4b10",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 119,
"avg_line_length": 38.45177664974619,
"alnum_prop": 0.486996699669967,
"repo_name": "pozhijisi/Distributed-TensorFlow-Using-MPI",
"id": "75c7e5014be5c9e8765288f4e355bb039e6eb43f",
"size": "7575",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10711"
},
{
"name": "Shell",
"bytes": "521"
}
],
"symlink_target": ""
} |
import argparse
def parse(args):
parser = argparse.ArgumentParser()
parser.add_argument(
"--debug", action="store_true", default=False, help="Enable debug logging"
)
parser.add_argument("--xml")
parser.add_argument(
"--module", action="store_true", default=False, help="Trace a module"
)
parser.add_argument("progname", help="file to run as main program")
parser.add_argument(
"arguments", nargs=argparse.REMAINDER, help="arguments to the program"
)
return parser.parse_args(args)
| {
"content_hash": "9afb576b55a0ac63f3db2127ad691502",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 82,
"avg_line_length": 25.09090909090909,
"alnum_prop": 0.6521739130434783,
"repo_name": "github/codeql",
"id": "8e909367c32716b4ef71d73dcfaf9407c3211a9b",
"size": "552",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "python/tools/recorded-call-graph-metrics/src/cg_trace/cmdline.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP.NET",
"bytes": "3739"
},
{
"name": "Batchfile",
"bytes": "3534"
},
{
"name": "C",
"bytes": "410440"
},
{
"name": "C#",
"bytes": "21146000"
},
{
"name": "C++",
"bytes": "1352639"
},
{
"name": "CMake",
"bytes": "1809"
},
{
"name": "CodeQL",
"bytes": "32583145"
},
{
"name": "Dockerfile",
"bytes": "496"
},
{
"name": "EJS",
"bytes": "1478"
},
{
"name": "Emacs Lisp",
"bytes": "3445"
},
{
"name": "Go",
"bytes": "697562"
},
{
"name": "HTML",
"bytes": "58008"
},
{
"name": "Handlebars",
"bytes": "1000"
},
{
"name": "Java",
"bytes": "5417683"
},
{
"name": "JavaScript",
"bytes": "2432320"
},
{
"name": "Kotlin",
"bytes": "12163740"
},
{
"name": "Lua",
"bytes": "13113"
},
{
"name": "Makefile",
"bytes": "8631"
},
{
"name": "Mustache",
"bytes": "17025"
},
{
"name": "Nunjucks",
"bytes": "923"
},
{
"name": "Perl",
"bytes": "1941"
},
{
"name": "PowerShell",
"bytes": "1295"
},
{
"name": "Python",
"bytes": "1649035"
},
{
"name": "RAML",
"bytes": "2825"
},
{
"name": "Ruby",
"bytes": "299268"
},
{
"name": "Rust",
"bytes": "234024"
},
{
"name": "Shell",
"bytes": "23973"
},
{
"name": "Smalltalk",
"bytes": "23"
},
{
"name": "Starlark",
"bytes": "27062"
},
{
"name": "Swift",
"bytes": "204309"
},
{
"name": "Thrift",
"bytes": "3020"
},
{
"name": "TypeScript",
"bytes": "219623"
},
{
"name": "Vim Script",
"bytes": "1949"
},
{
"name": "Vue",
"bytes": "2881"
}
],
"symlink_target": ""
} |
"""
* Copyright 2007 Fred Sauer
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http:#www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
"""
from pyjamas import DOM
"""*
* {@link com.allen_sauer.gwt.dnd.client.util.DOMUtil} implementation for
* Webkit/Safari.
"""
class DOMUtilImplSafari(DOMUtilImplStandard):
def cancelAllDocumentSelections(self):
JS("""
// While all Safari/Webkit release appear to define the 'collapse' function,
// this function results in "Error: TYPE_MISMATCH_ERR: DOM Exception 17"
// on Safari 3.0.4 (5523.10) on Mac OS X Version 10.5 (Leopard)
// So, newer Safari use 'removeAllRanges', older Safari fall back to 'collapse'
var s = $wnd.getSelection();
if (s.removeAllRanges) {
s.removeAllRanges();
} else {
s.collapse();
}
""")
def getBorderLeft(self, elem):
JS("""
var computedStyle = $doc.defaultView.getComputedStyle(elem, null);
if (computedStyle != null) {
var borderLeftWidth = computedStyle.getPropertyValue("border-left-width");
return borderLeftWidth.indexOf("px") == -1 ? 0 : parseInt(borderLeftWidth.substr(0, borderLeftWidth.length - 2));
} else {
// When elem is hidden
return 0;
}
""")
def getBorderTop(self, elem):
JS("""
var computedStyle = $doc.defaultView.getComputedStyle(elem, null);
if (computedStyle != null) {
var borderTopWidth = computedStyle.getPropertyValue("border-top-width");
return borderTopWidth.indexOf("px") == -1 ? 0 : parseInt(borderTopWidth.substr(0, borderTopWidth.length - 2));
} else {
// When elem is hidden
return 0;
}
""")
def getClientHeight(self, elem):
JS("""
return elem.clientHeight || 0;
""")
def getClientWidth(self, elem):
JS("""
return elem.clientWidth || 0;
""")
def isOrContains(self, parent, child):
# While Safari 1.3.2 / Safari 2.0.4 support the 'contains' method on DOM
# elements, the method does not appear to return valid results in all cases.
# Revert to a DOM walk from DOM.isOrHasChild instead.
return DOM.isOrHasChild(parent, child)
| {
"content_hash": "2c4adb202dae7c401976febbb67290a0",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 125,
"avg_line_length": 32.81395348837209,
"alnum_prop": 0.6169383416017009,
"repo_name": "jaredly/pyjamas",
"id": "207d48f3629b7391259b4b94741d06b3319a04ed",
"size": "2822",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "library/pyjamas/dnd/util/impl/DOMUtilImplSafari.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "411613"
},
{
"name": "PHP",
"bytes": "121841"
},
{
"name": "Python",
"bytes": "4243623"
},
{
"name": "Shell",
"bytes": "14781"
}
],
"symlink_target": ""
} |
'''
Dependency checker for CAmkES.
'''
# This script can only import parts of the Python standard library, or it
# becomes useless as a dependency checker.
import abc, argparse, importlib, os, shutil, subprocess, sys, tempfile
class CheckDepException(Exception):
pass
class Package(object):
__metaclass__ = abc.ABCMeta
def __init__(self, name, description):
self.name = name
self.description = description
@abc.abstractmethod
def exists(self):
'''
Whether this package is available.
'''
raise NotImplementedError
class Binary(Package):
def exists(self):
with open(os.devnull, 'w') as f:
return subprocess.call(['which', self.name], stdout=f, stderr=f) \
== 0
class PythonModule(Package):
def exists(self):
try:
importlib.import_module(self.name)
return True
except ImportError:
return False
class PythonModuleWith(PythonModule):
def __init__(self, name, description, attr):
super(PythonModuleWith, self).__init__(name, description)
self.attr = attr
def exists(self):
if not super(PythonModuleWith, self).exists():
return False
mod = importlib.import_module(self.name)
if not hasattr(mod, self.attr):
raise CheckDepException('module exists, but %s.%s not found '
'(upgrade required?)' % (self.name, self.attr))
return True
class CLibrary(Package):
def exists(self):
with open(os.devnull, 'w') as f:
return subprocess.call(['pkg-config', '--cflags', self.name],
stdout=f, stderr=f) == 0
class HaskellModule(Package):
def __init__(self, name, description, import_target):
super(HaskellModule, self).__init__(name, description)
self.import_target = import_target
def exists(self):
# If only GHCI would exit with non-zero on error, the below shenanigans
# would not be necessary.
tmp = tempfile.mkdtemp()
try:
source = os.path.join(tmp, 'Main.hs')
with open(source, 'w') as f:
f.write('import %s\nmain = return 0' % self.import_target)
with open(os.devnull, 'w') as f:
return subprocess.call(['ghc', 'Main.hs'], cwd=tmp, stdout=f,
stderr=f) == 0
finally:
shutil.rmtree(tmp)
class Or(Package):
def __init__(self, *packages):
self.name = ' or '.join(p.name for p in packages)
self.description = '...'
self.packages = packages
def exists(self):
return any(p.exists() for p in self.packages)
def green(string):
return '\033[32;1m%s\033[0m' % string
def red(string):
return '\033[31;1m%s\033[0m' % string
def yellow(string):
return '\033[33m%s\033[0m' % string
DEPENDENCIES = {
'CAmkES runner':(PythonModule('jinja2', 'Python templating module'),
PythonModule('ply', 'Python parsing module'),
PythonModule('elftools', 'Python ELF parsing module'),
PythonModuleWith('six', 'Python 2/3 compatibility layer', 'assertCountEqual')),
'seL4':(Binary('gcc', 'C compiler'),
PythonModule('tempita', 'Python templating module'),
Binary('xmllint', 'XML validator'),
Binary('bash', 'shell'),
Binary('make', 'GNU Make build tool'),
Binary('cpio', 'CPIO file system tool')),
'CapDL translator':(Binary('ghc', 'Haskell compiler'),
HaskellModule('parsec', 'Haskell parsing module', 'Text.ParserCombinators.Parsec'),
HaskellModule('mtl', 'Haskell monad transformers module', 'Control.Monad.State'),
HaskellModule('containers', 'Haskell containers module', 'Data.Set'),
HaskellModule('MissingH', 'Haskell extras module', 'Data.String.Utils'),
HaskellModule('split', 'Haskell split utilities', 'Data.List.Split'),
HaskellModule('array', 'Haskell arrays module', 'Data.Array.IO'),
HaskellModule('pretty', 'Haskell pretty printing module', 'Text.PrettyPrint'),
HaskellModule('filepath', 'Haskell file paths module', 'System.FilePath.Posix'),
Binary('cabal', 'Haskell package manager')),
'CAmkES test suite':(Binary('expect', 'automation utility'),
Binary('pylint', 'Python linter'),
Binary('qemu-system-arm', 'ARM emulator'),
Binary('qemu-system-i386', 'IA32 emulator')),
}
EXTRAS = frozenset((
(Binary('sponge', 'input coalescer from moreutils'),
'installing this will give a marginal improvement in compilation times'),
(Binary('qemu-system-arm', 'ARM emulator'),
'this is required to simulate ARM systems'),
(Binary('qemu-system-i386', 'IA32 emulator'),
'this is required to simulate IA32 systems'),
(Binary('ccache', 'C compiler accelerator'),
'installing this will speed up your C compilation times'),
(Binary('clang-format', 'Clang code reformatter'),
'installing this will reflow generated C code to make it more readable'),
(CLibrary('ncurses', 'terminal menus library'),
'you will need to install this if you want to run menuconfig'),
(Or(Binary('arm-none-eabi-gcc', 'ARM C compiler'),
Binary('arm-linux-gnueabi-gcc', 'ARM C compiler')),
'you will need one of these if you want to target ARM systems'),
(Binary('pandoc', 'document format translator'),
'you will need this if you want to build the CAmkES documentation'),
(Binary('astyle', 'code reformater'),
'installing this will allow you to use the "style" Makefile targets to reformat C code'),
(Binary('c-parser', 'NICTA C-to-Simpl parser'),
'you will need this installed if you want to validate code for verification'),
(Or(Binary('arm-none-eabi-objdump', 'ARM disassembler'),
Binary('arm-linux-gnueabi-objdump', 'ARM disassembler')),
'installing one of these will speed up CapDL generation times for ARM builds'),
(Binary('objdump', 'disassembler'),
'installing this will speed up CapDL generation times for IA32 builds'),
(Binary('VBoxManage', 'VirtualBox administration tool'),
'you will need this installed if you want to build VMWare images'),
(Binary('syslinux', 'Linux bootloader tool'),
'you will need this installed if you want to build QEMU images for IA32'),
(Binary('mpartition', 'partitioning tool for MSDOS disks'),
'you will need this installed if you want to build QEMU images for IA32'),
(Binary('mformat', 'formatting tool for MSDOS disks'),
'you will need this installed if you want to build QEMU images for IA32'),
(Binary('mcopy', 'copying tool for MSDOS disks'),
'you will need this installed if you want to build QEMU images for IA32'),
))
def main(argv):
parser = argparse.ArgumentParser(description='CAmkES dependency checker')
parser.add_argument('--component', '-c', action='append',
choices=DEPENDENCIES.keys(), help='component whose dependencies should '
'be checked (default: all)')
options = parser.parse_args(argv[1:])
ret = 0
for k, v in sorted(DEPENDENCIES.items()):
if options.component is not None and k not in options.component:
continue
ok = True
sys.stdout.write('Dependencies of %s\n' % k)
for p in v:
sys.stdout.write(' %s (%s)... ' % (p.name, p.description))
if p.exists():
sys.stdout.write(green('Found\n'))
else:
ok = False
ret = -1
sys.stdout.write(red('Not found\n'))
if not ok:
sys.stdout.write(red('You will not be able to build/run this component\n'))
sys.stdout.write('\n')
printed_header = False
for p, note in EXTRAS:
if not p.exists():
if not printed_header:
sys.stdout.write('Suggestions:\n')
printed_header = True
sys.stdout.write(yellow(' %s (%s): %s\n' % (p.name, p.description, note)))
return ret
if __name__ == '__main__':
sys.exit(main(sys.argv))
| {
"content_hash": "401847668dba196e18f49252bb9918b7",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 107,
"avg_line_length": 42.115,
"alnum_prop": 0.6016858601448415,
"repo_name": "smaccm/camkes-tool",
"id": "7a3684ce6843ee77ff6b106d717d7b60a87530c0",
"size": "8694",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/check_deps.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "3407"
},
{
"name": "C",
"bytes": "383162"
},
{
"name": "C++",
"bytes": "740"
},
{
"name": "Isabelle",
"bytes": "242975"
},
{
"name": "Makefile",
"bytes": "38834"
},
{
"name": "Python",
"bytes": "229476"
},
{
"name": "Shell",
"bytes": "3298"
},
{
"name": "VimL",
"bytes": "3143"
}
],
"symlink_target": ""
} |
revision = '67fb47689b'
down_revision = '619369f92a'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('text_version', sa.Column('title', sa.Text(), nullable=True))
op.execute("UPDATE text_version SET title=''")
op.alter_column('text_version', 'title', nullable=False)
op.alter_column('text_version', 'content', nullable=False)
op.alter_column('text_version', 'more_content', nullable=False)
def downgrade():
op.alter_column('text_version', 'more_content', nullable=True)
op.alter_column('text_version', 'content', nullable=True)
op.drop_column('text_version', 'title')
| {
"content_hash": "e78f8e57f73474535fcb60e175676977",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 79,
"avg_line_length": 33.31578947368421,
"alnum_prop": 0.6951026856240127,
"repo_name": "mgax/mptracker",
"id": "1764ebe827f3ce5e08bc234c39ed946714f16dfd",
"size": "633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alembic/versions/67fb47689b_text_version_title.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "388319"
},
{
"name": "HTML",
"bytes": "146505"
},
{
"name": "JavaScript",
"bytes": "300815"
},
{
"name": "Makefile",
"bytes": "85"
},
{
"name": "Mako",
"bytes": "268"
},
{
"name": "Python",
"bytes": "438347"
},
{
"name": "Shell",
"bytes": "1893"
}
],
"symlink_target": ""
} |
__author__ = 'steffenfb'
import os
import re
import pickle
import datetime
import time
import hashlib
import operator
def localtest(userList, fromhour):
#os.chdir('/Users/steffenfb/Documents/facelogs')
logpath= 'facelogs'
files = os.listdir(logpath)
files.pop(0)
mydict={}
#Adding users to the list
for user in userList:
mydict[user]=[]
mydict['ref']=[]
today = datetime.datetime.fromtimestamp(time.time()).date()
for file in files:
try:
filetime = re.search('\d+', file)
filetime = datetime.datetime.fromtimestamp(int(file[filetime.start():filetime.end()]))
#time[time.start():time.end()]
fileDate = filetime.date()
filehour = filetime.time().hour
#print 'this is filedate '+str(fileDate)
#To create time reference
#mydict['ref'].append(datetime.datetime.fromtimestamp(int(file[filetime.start():filetime.end()])).time())
except:
print str(filetime)+' could not find time '
#Only add the files today
if(fileDate == today and filehour > fromhour ):
#if( filehour > 8 ):
mydict['ref'].append(filetime.time())
for user in userList:
found = False
data = pickle.load( open( logpath+'/'+file, "rb" ) )
#Insert name here
#hash = hashlib.md5('').hexdigest()
hash =user
for username in data:
if(hash in username):
#print 'found on at '+str(time)
found = True
mydict[user].append(1)
if(not found):
mydict[user].append(0)
#print '\t not active at '+str(time)
return mydict
#Only return on last three hours
def last_x_hours(userList, backtrack_hours):
logpath= 'facelogs'
files = os.listdir(logpath)
files.pop(0)
mydict={}
#Adding users to the list
for user in userList:
mydict[user]=[]
mydict['ref']=[]
today = datetime.datetime.fromtimestamp(time.time()).date()
hournow = datetime.datetime.fromtimestamp(time.time()).hour
for file in files:
try:
filetime = re.search('\d+', file)
filetime = datetime.datetime.fromtimestamp(int(file[filetime.start():filetime.end()]))
#time[time.start():time.end()]
fileDate = filetime.date()
filehour = filetime.time().hour
#print 'this is filedate '+str(fileDate)
#To create time reference
#mydict['ref'].append(datetime.datetime.fromtimestamp(int(file[filetime.start():filetime.end()])).time())
except:
print str(filetime)+' could not find time '
#Only add the files today
if(fileDate == today and (hournow-filehour) <= backtrack_hours ):
mydict['ref'].append(filetime.time())
for user in userList:
found = False
data = pickle.load( open( logpath+'/'+file, "rb" ) )
#Insert name here
#hash = hashlib.md5('').hexdigest()
hash =user
for username in data:
if(hash in username):
#print 'found on at '+str(time)
found = True
mydict[user].append(1)
if(not found):
mydict[user].append(0)
#print '\t not active at '+str(time)
namelist = sort_on_most_active(mydict)
return mydict,namelist
def sort_on_most_active(user_dict):
count_dict = {}
for key,value in user_dict.iteritems():
total_counter = 0
for element in value:
if(element) : total_counter +=1
count_dict[key] = total_counter
count_dict = sorted(count_dict.items(), key=operator.itemgetter(1))
count_dict.reverse()
sorted_name_list = []
for i in count_dict:
sorted_name_list.append(i[0])
return sorted_name_list | {
"content_hash": "3abb8ff746a9d734deaef79b86a7aac3",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 117,
"avg_line_length": 28.244897959183675,
"alnum_prop": 0.5411849710982659,
"repo_name": "Steffb/facelogger",
"id": "449d3d8f922957bea1ff13854ab80fbe8d8cdcbd",
"size": "4152",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "analyzer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "7484"
},
{
"name": "JavaScript",
"bytes": "130483"
},
{
"name": "Python",
"bytes": "11460"
}
],
"symlink_target": ""
} |
import webbrowser
class ScriptHandler:
def __init__(self):
self.commands = ['google ([\w ]+)', 'search ([\w ]+)']
def runScript(self, args):
searchTerm = str(args(1))
terms = searchTerm.split()
first = True
url = 'www.google.com/search?q='
for term in terms:
if first:
url += term
first = False
else:
url += '+' + term
webbrowser.open(url)
| {
"content_hash": "678320333b12ff3cc7fc29417b4dba12",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 62,
"avg_line_length": 26.555555555555557,
"alnum_prop": 0.47280334728033474,
"repo_name": "edbrown23/Chives",
"id": "9e5bf1eb662469ee0170662a655158535520fd68",
"size": "478",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/googleScript.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10232"
}
],
"symlink_target": ""
} |
"""A non-blocking, single-threaded TCP server."""
from __future__ import absolute_import, division, print_function, with_statement
import errno
import os
import socket
from hurray.server.log import app_log
from hurray.server.ioloop import IOLoop
from hurray.server.iostream import IOStream, SSLIOStream
from hurray.server.netutil import bind_sockets, add_accept_handler, ssl_wrap_socket
from hurray.server import process
from hurray.server.util import errno_from_exception
try:
import ssl
except ImportError:
# ssl is not available on Google App Engine.
ssl = None
class TCPServer(object):
r"""A non-blocking, single-threaded TCP server.
To use `TCPServer`, define a subclass which overrides the `handle_stream`
method. For example, a simple echo server could be defined like this::
from hurray.server.tcpserver import TCPServer
from hurray.server.iostream import StreamClosedError
from hurray.server import gen
class EchoServer(TCPServer):
@gen.coroutine
def handle_stream(self, stream, address):
while True:
try:
data = yield stream.read_until(b"\n")
yield stream.write(data)
except StreamClosedError:
break
To make this server serve SSL traffic, send the ``ssl_options`` keyword
argument with an `ssl.SSLContext` object. For compatibility with older
versions of Python ``ssl_options`` may also be a dictionary of keyword
arguments for the `ssl.wrap_socket` method.::
ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ssl_ctx.load_cert_chain(os.path.join(data_dir, "mydomain.crt"),
os.path.join(data_dir, "mydomain.key"))
TCPServer(ssl_options=ssl_ctx)
`TCPServer` initialization follows one of three patterns:
1. `listen`: simple single-process::
server = TCPServer()
server.listen(8888)
IOLoop.current().start()
2. `bind`/`start`: simple multi-process::
server = TCPServer()
server.bind(8888)
server.start(0) # Forks multiple sub-processes
IOLoop.current().start()
When using this interface, an `.IOLoop` must *not* be passed
to the `TCPServer` constructor. `start` will always start
the server on the default singleton `.IOLoop`.
3. `add_sockets`: advanced multi-process::
sockets = bind_sockets(8888)
hurray.server.process.fork_processes(0)
server = TCPServer()
server.add_sockets(sockets)
IOLoop.current().start()
The `add_sockets` interface is more complicated, but it can be
used with `hurray.server.process.fork_processes` to give you more
flexibility in when the fork happens. `add_sockets` can
also be used in single-process servers if you want to create
your listening sockets in some way other than
`~hurray.server.netutil.bind_sockets`.
.. versionadded:: 3.1
The ``max_buffer_size`` argument.
"""
def __init__(self, io_loop=None, ssl_options=None, max_buffer_size=None,
read_chunk_size=None):
self.io_loop = io_loop
self.ssl_options = ssl_options
self._sockets = {} # fd -> socket object
self._pending_sockets = []
self._started = False
self.max_buffer_size = max_buffer_size
self.read_chunk_size = read_chunk_size
# Verify the SSL options. Otherwise we don't get errors until clients
# connect. This doesn't verify that the keys are legitimate, but
# the SSL module doesn't do that until there is a connected socket
# which seems like too much work
if self.ssl_options is not None and isinstance(self.ssl_options, dict):
# Only certfile is required: it can contain both keys
if 'certfile' not in self.ssl_options:
raise KeyError('missing key "certfile" in ssl_options')
if not os.path.exists(self.ssl_options['certfile']):
raise ValueError('certfile "%s" does not exist' %
self.ssl_options['certfile'])
if ('keyfile' in self.ssl_options and
not os.path.exists(self.ssl_options['keyfile'])):
raise ValueError('keyfile "%s" does not exist' %
self.ssl_options['keyfile'])
def listen(self, port, address=""):
"""Starts accepting connections on the given port.
This method may be called more than once to listen on multiple ports.
`listen` takes effect immediately; it is not necessary to call
`TCPServer.start` afterwards. It is, however, necessary to start
the `.IOLoop`.
"""
sockets = bind_sockets(port, address=address)
self.add_sockets(sockets)
def add_sockets(self, sockets):
"""Makes this server start accepting connections on the given sockets.
The ``sockets`` parameter is a list of socket objects such as
those returned by `~hurray.server.netutil.bind_sockets`.
`add_sockets` is typically used in combination with that
method and `hurray.server.process.fork_processes` to provide greater
control over the initialization of a multi-process server.
"""
if self.io_loop is None:
self.io_loop = IOLoop.current()
for sock in sockets:
self._sockets[sock.fileno()] = sock
add_accept_handler(sock, self._handle_connection,
io_loop=self.io_loop)
def add_socket(self, socket):
"""Singular version of `add_sockets`. Takes a single socket object."""
self.add_sockets([socket])
def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128,
reuse_port=False):
"""Binds this server to the given port on the given address.
To start the server, call `start`. If you want to run this server
in a single process, you can call `listen` as a shortcut to the
sequence of `bind` and `start` calls.
Address may be either an IP address or hostname. If it's a hostname,
the server will listen on all IP addresses associated with the
name. Address may be an empty string or None to listen on all
available interfaces. Family may be set to either `socket.AF_INET`
or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise
both will be used if available.
The ``backlog`` argument has the same meaning as for
`socket.listen <socket.socket.listen>`. The ``reuse_port`` argument
has the same meaning as for `.bind_sockets`.
This method may be called multiple times prior to `start` to listen
on multiple ports or interfaces.
.. versionchanged:: 4.4
Added the ``reuse_port`` argument.
"""
sockets = bind_sockets(port, address=address, family=family,
backlog=backlog, reuse_port=reuse_port)
if self._started:
self.add_sockets(sockets)
else:
self._pending_sockets.extend(sockets)
def start(self, num_processes=1):
"""Starts this server in the `.IOLoop`.
By default, we run the server in this process and do not fork any
additional child process.
If num_processes is ``None`` or <= 0, we detect the number of cores
available on this machine and fork that number of child
processes. If num_processes is given and > 1, we fork that
specific number of sub-processes.
Since we use processes and not threads, there is no shared memory
between any server code.
Note that multiple processes are not compatible with the autoreload
module.
When using multiple processes, no IOLoops can be created or
referenced until after the call to ``TCPServer.start(n)``.
"""
assert not self._started
self._started = True
if num_processes != 1:
process.fork_processes(num_processes)
sockets = self._pending_sockets
self._pending_sockets = []
self.add_sockets(sockets)
def stop(self):
"""Stops listening for new connections.
Requests currently in progress may still continue after the
server is stopped.
"""
for fd, sock in self._sockets.items():
self.io_loop.remove_handler(fd)
sock.close()
def handle_stream(self, stream, address):
"""Override to handle a new `.IOStream` from an incoming connection.
This method may be a coroutine; if so any exceptions it raises
asynchronously will be logged. Accepting of incoming connections
will not be blocked by this coroutine.
If this `TCPServer` is configured for SSL, ``handle_stream``
may be called before the SSL handshake has completed. Use
`.SSLIOStream.wait_for_handshake` if you need to verify the client's
certificate or use NPN/ALPN.
.. versionchanged:: 4.2
Added the option for this method to be a coroutine.
"""
raise NotImplementedError()
def _handle_connection(self, connection, address):
if self.ssl_options is not None:
assert ssl, "Python 2.6+ and OpenSSL required for SSL"
try:
connection = ssl_wrap_socket(connection,
self.ssl_options,
server_side=True,
do_handshake_on_connect=False)
except ssl.SSLError as err:
if err.args[0] == ssl.SSL_ERROR_EOF:
return connection.close()
else:
raise
except socket.error as err:
# If the connection is closed immediately after it is created
# (as in a port scan), we can get one of several errors.
# wrap_socket makes an internal call to getpeername,
# which may return either EINVAL (Mac OS X) or ENOTCONN
# (Linux). If it returns ENOTCONN, this error is
# silently swallowed by the ssl module, so we need to
# catch another error later on (AttributeError in
# SSLIOStream._do_ssl_handshake).
# To test this behavior, try nmap with the -sT flag.
# https://github.com/tornadoweb/tornado/pull/750
if errno_from_exception(err) in (errno.ECONNABORTED, errno.EINVAL):
return connection.close()
else:
raise
try:
if self.ssl_options is not None:
stream = SSLIOStream(connection, io_loop=self.io_loop,
max_buffer_size=self.max_buffer_size,
read_chunk_size=self.read_chunk_size)
else:
stream = IOStream(connection, io_loop=self.io_loop,
max_buffer_size=self.max_buffer_size,
read_chunk_size=self.read_chunk_size)
future = self.handle_stream(stream, address)
if future is not None:
self.io_loop.add_future(future, lambda f: f.result())
except Exception:
app_log.error("Error in connection callback", exc_info=True)
| {
"content_hash": "fcc22674e3947ce384d3af2688e688d4",
"timestamp": "",
"source": "github",
"line_count": 275,
"max_line_length": 83,
"avg_line_length": 42.36,
"alnum_prop": 0.6065756717314791,
"repo_name": "meteotest/hurray",
"id": "50015d86a60c39109e736dc960a60d27d7c602a8",
"size": "12287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hurray/server/tcpserver.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "391421"
},
{
"name": "Shell",
"bytes": "291"
}
],
"symlink_target": ""
} |
from setuptools import setup
setup(
name='azure-mgmt-common',
version='0.20.0rc1',
description='Microsoft Azure Resource Management Client Library for Python (Common)',
long_description=open('README.rst', 'r').read(),
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'License :: OSI Approved :: Apache Software License',
],
zip_safe=False,
packages=[
'azure',
'azure.mgmt',
'azure.mgmt.common',
],
install_requires=[
'azure-common',
'azure-mgmt-nspkg',
],
)
| {
"content_hash": "bdd0532b54fc61e6e720bd625797b8df",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 89,
"avg_line_length": 31.59375,
"alnum_prop": 0.5974282888229476,
"repo_name": "ParallaxIT/azure-sdk-for-python",
"id": "e67ddea6ba7f962e1935452e7e23094ed99c5c70",
"size": "1777",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-mgmt-common/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4201"
},
{
"name": "PowerShell",
"bytes": "918"
},
{
"name": "Python",
"bytes": "2818576"
},
{
"name": "Shell",
"bytes": "860"
}
],
"symlink_target": ""
} |
from feti.models.national_qualifications_framework import \
NationalQualificationsFramework
from haystack import indexes
__author__ = 'Rizky Maulana Nugraha "lucernae" <lana.pcfre@gmail.com>'
__date__ = '24/04/15'
class NationalQualificationsFrameworkIndex(indexes.SearchIndex,
indexes.Indexable):
text = indexes.EdgeNgramField(document=True, use_template=True)
certification = indexes.CharField(model_attr='certification')
class Meta:
app_label = 'feti'
def get_model(self):
return NationalQualificationsFramework
def index_queryset(self, using=None):
"""Used to reindex model"""
return self.get_model().objects.all() | {
"content_hash": "971ae1b1e31b2931d53f518fa90734be",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 70,
"avg_line_length": 32.86363636363637,
"alnum_prop": 0.681881051175657,
"repo_name": "lucernae/feti",
"id": "cb04a1bacc84c4d626bd8d46ad9b5c5c38f0be2a",
"size": "738",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_project/feti/indexes/national_qualification_framework.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "22931"
},
{
"name": "HTML",
"bytes": "15830"
},
{
"name": "JavaScript",
"bytes": "110104"
},
{
"name": "Makefile",
"bytes": "11616"
},
{
"name": "PLpgSQL",
"bytes": "9784393"
},
{
"name": "Python",
"bytes": "291603"
},
{
"name": "SQLPL",
"bytes": "21594"
},
{
"name": "Shell",
"bytes": "1330"
}
],
"symlink_target": ""
} |
from pygenius import *
#Bambu's Golden Era Shower
#s = song.song(489522)
#Cool Calm Pete's 2 A.M.
#s = song.song(255949)
#Blue Scholar's Sagab
s = song(3893)
#prints the song title
print s.name
#prints the lyrics
for i in s.lyrics:
print i
#concatenate the lyrics into one string
str = cat_lyrics(s)
print str
#prints primary artist
print s.primary_artist
#prints featured artists
for i in s.featured_artists:
print i
print "----------"
#Bambu
#a = artist.artist(12246)
#Cool Calm Pete
#a = artist.artist(12283)
#Blue Scholars
a = artist(544)
#prints artist's name
print a.name
#prints artist's description if available
print a.description
#prints the artist's songs
for i in a.songs:
print i
print "----------"
#search for the song "Weight is Gone by Intuition & Equalibrum"
result = search_song("weight is gone")
print result.name
print result.primary_artist
| {
"content_hash": "de9c78144651be36f192030beb9e2547",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 63,
"avg_line_length": 15.892857142857142,
"alnum_prop": 0.7112359550561798,
"repo_name": "aorti017/pygenius",
"id": "2806d731aba8cddb6c696986d5f9480e5b62051f",
"size": "890",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "5026"
}
],
"symlink_target": ""
} |
"""Set up pytradfri."""
from pathlib import Path
from setuptools import find_packages, setup
PROJECT_DIR = Path(__file__).parent.resolve()
README_FILE = PROJECT_DIR / "README.md"
LONG_DESCRIPTION = README_FILE.read_text(encoding="utf-8")
VERSION = (PROJECT_DIR / "pytradfri" / "VERSION").read_text().strip()
GITHUB_URL = "https://github.com/home-assistant-libs/pytradfri"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.zip"
EXTRAS_REQUIRE = {"async": ["aiocoap==0.4.1", "DTLSSocket==0.1.12"]}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
setup(
name="pytradfri",
packages=PACKAGES,
python_requires=">=3.7",
version=VERSION,
description="IKEA Trådfri/Tradfri API. Control and observe your "
"lights from Python.",
long_description=LONG_DESCRIPTION,
author="balloob, lwis, ggravlingen",
author_email="no@email.com",
long_description_content_type="text/markdown",
url=GITHUB_URL,
include_package_data=True,
license="MIT",
keywords="ikea tradfri api iot light homeautomation",
download_url=DOWNLOAD_URL,
extras_require=EXTRAS_REQUIRE,
)
| {
"content_hash": "59cbf6c5275d7b837a4aa05fcb6b089d",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 69,
"avg_line_length": 30.97222222222222,
"alnum_prop": 0.694170403587444,
"repo_name": "rubenbe/pytradfri",
"id": "d954d857343af96a372f952593fefbf857717785",
"size": "1139",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "339"
},
{
"name": "Python",
"bytes": "116785"
},
{
"name": "Shell",
"bytes": "1330"
}
],
"symlink_target": ""
} |
import collections
import pecan
from barbican import api
from barbican.api import controllers
from barbican.common import hrefs
from barbican.common import utils
from barbican.common import validators
from barbican import i18n as u
from barbican.model import repositories as repo
LOG = utils.getLogger(__name__)
def _secret_metadata_not_found():
"""Throw exception indicating secret metadata not found."""
pecan.abort(404, u._('Secret metadata not found.'))
class SecretMetadataController(controllers.ACLMixin):
"""Handles SecretMetadata requests by a given secret id."""
def __init__(self, secret):
LOG.debug('=== Creating SecretMetadataController ===')
super().__init__()
self.secret = secret
self.secret_project_id = self.secret.project.external_id
self.secret_repo = repo.get_secret_repository()
self.user_meta_repo = repo.get_secret_user_meta_repository()
self.metadata_validator = validators.NewSecretMetadataValidator()
self.metadatum_validator = validators.NewSecretMetadatumValidator()
@pecan.expose(generic=True)
def index(self, **kwargs):
pecan.abort(405) # HTTP 405 Method Not Allowed as default
@index.when(method='GET', template='json')
@utils.allow_all_content_types
@controllers.handle_exceptions(u._('Secret metadata retrieval'))
@controllers.enforce_rbac('secret_meta:get')
def on_get(self, external_project_id, **kwargs):
"""Handles retrieval of existing secret metadata requests."""
LOG.debug('Start secret metadata on_get '
'for secret-ID %s:', self.secret.id)
resp = self.user_meta_repo.get_metadata_for_secret(self.secret.id)
pecan.response.status = 200
return {"metadata": resp}
@index.when(method='PUT', template='json')
@controllers.handle_exceptions(u._('Secret metadata creation'))
@controllers.enforce_rbac('secret_meta:put')
@controllers.enforce_content_types(['application/json'])
def on_put(self, external_project_id, **kwargs):
"""Handles creation/update of secret metadata."""
data = api.load_body(pecan.request, validator=self.metadata_validator)
LOG.debug('Start secret metadata on_put...%s', data)
self.user_meta_repo.create_replace_user_metadata(self.secret.id,
data)
url = hrefs.convert_user_meta_to_href(self.secret.id)
LOG.debug('URI to secret metadata is %s', url)
pecan.response.status = 201
return {'metadata_ref': url}
@index.when(method='POST', template='json')
@controllers.handle_exceptions(u._('Secret metadatum creation'))
@controllers.enforce_rbac('secret_meta:post')
@controllers.enforce_content_types(['application/json'])
def on_post(self, external_project_id, **kwargs):
"""Handles creation of secret metadatum."""
data = api.load_body(pecan.request, validator=self.metadatum_validator)
key = data.get('key')
value = data.get('value')
metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id)
if key in metadata:
pecan.abort(409, u._('Conflict. Key in request is already in the '
'secret metadata'))
LOG.debug('Start secret metadatum on_post...%s', metadata)
self.user_meta_repo.create_replace_user_metadatum(self.secret.id,
key, value)
url = hrefs.convert_user_meta_to_href(self.secret.id)
LOG.debug('URI to secret metadata is %s', url)
pecan.response.status = 201
pecan.response.headers['Location'] = url + '/' + key
return {'key': key, 'value': value}
class SecretMetadatumController(controllers.ACLMixin):
def __init__(self, secret):
LOG.debug('=== Creating SecretMetadatumController ===')
super().__init__()
self.user_meta_repo = repo.get_secret_user_meta_repository()
self.secret = secret
self.metadatum_validator = validators.NewSecretMetadatumValidator()
@pecan.expose(generic=True)
def index(self, **kwargs):
pecan.abort(405) # HTTP 405 Method Not Allowed as default
@index.when(method='GET', template='json')
@controllers.handle_exceptions(u._('Secret metadatum retrieval'))
@controllers.enforce_rbac('secret_meta:get')
def on_get(self, external_project_id, remainder, **kwargs):
"""Handles retrieval of existing secret metadatum."""
LOG.debug('Start secret metadatum on_get '
'for secret-ID %s:', self.secret.id)
metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id)
if remainder in metadata:
pecan.response.status = 200
pair = {'key': remainder, 'value': metadata[remainder]}
return collections.OrderedDict(sorted(pair.items()))
else:
_secret_metadata_not_found()
@index.when(method='PUT', template='json')
@utils.allow_all_content_types
@controllers.handle_exceptions(u._('Secret metadatum update'))
@controllers.enforce_rbac('secret_meta:put')
@controllers.enforce_content_types(['application/json'])
def on_put(self, external_project_id, remainder, **kwargs):
"""Handles update of existing secret metadatum."""
metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id)
data = api.load_body(pecan.request, validator=self.metadatum_validator)
key = data.get('key')
value = data.get('value')
if remainder not in metadata:
_secret_metadata_not_found()
elif remainder != key:
msg = 'Key in request data does not match key in the '
'request url.'
pecan.abort(409, msg)
else:
LOG.debug('Start secret metadatum on_put...%s', metadata)
self.user_meta_repo.create_replace_user_metadatum(self.secret.id,
key, value)
pecan.response.status = 200
pair = {'key': key, 'value': value}
return collections.OrderedDict(sorted(pair.items()))
@index.when(method='DELETE', template='json')
@controllers.handle_exceptions(u._('Secret metadatum removal'))
@controllers.enforce_rbac('secret_meta:delete')
def on_delete(self, external_project_id, remainder, **kwargs):
"""Handles removal of existing secret metadatum."""
self.user_meta_repo.delete_metadatum(self.secret.id,
remainder)
msg = 'Deleted secret metadatum: %s for secret %s' % (remainder,
self.secret.id)
pecan.response.status = 204
LOG.info(msg)
| {
"content_hash": "6fa2f0fef2ed575613d4630ea680c05d",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 79,
"avg_line_length": 40.70414201183432,
"alnum_prop": 0.6278528855938363,
"repo_name": "openstack/barbican",
"id": "8f31a8224214886ff5f6558d684adc93f6375fa3",
"size": "7433",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "barbican/api/controllers/secretmeta.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "API Blueprint",
"bytes": "1586"
},
{
"name": "Mako",
"bytes": "979"
},
{
"name": "Python",
"bytes": "2626403"
},
{
"name": "Shell",
"bytes": "43567"
}
],
"symlink_target": ""
} |
"""Tests for tensorflow.ops.resource_variable_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import gc
import os
import pickle
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import momentum
from tensorflow.python.training import saver
from tensorflow.python.training import training_util
from tensorflow.python.util import compat
class ResourceVariableOpsTest(test_util.TensorFlowTestCase):
def tearDown(self):
gc.collect()
# This will only contain uncollectable garbage, i.e. reference cycles
# involving objects with __del__ defined.
self.assertEqual(0, len(gc.garbage))
def testHandleDtypeShapeMatch(self):
with self.cached_session():
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
with self.assertRaises(ValueError):
resource_variable_ops.assign_variable_op(
handle, constant_op.constant(0.0, dtype=dtypes.float32)).run()
with self.assertRaises(ValueError):
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
[0],
dtype=dtypes.int32)).run()
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
0,
dtype=dtypes.int32)).run()
def testGPUInt64(self):
if not context.context().num_gpus():
return
with context.eager_mode(), context.device("gpu:0"):
v = resource_variable_ops.ResourceVariable(1, dtype=dtypes.int64)
self.assertAllEqual(1, v.numpy())
def testEagerNameNotIdentity(self):
with context.eager_mode():
v0 = resource_variable_ops.ResourceVariable(1.0, name="a")
v1 = resource_variable_ops.ResourceVariable(2.0, name="a")
self.assertAllEqual(v0.numpy(), 1.0)
self.assertAllEqual(v1.numpy(), 2.0)
def testEagerNameNotNeeded(self):
with context.eager_mode():
v0 = resource_variable_ops.ResourceVariable(1.0)
self.assertAllEqual(v0.numpy(), 1.0)
def testReadVariableDtypeMismatchEager(self):
with context.eager_mode():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
resource_variable_ops.assign_variable_op(handle, 1)
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"Trying to read variable with wrong dtype. "
"Expected float got int32."):
_ = resource_variable_ops.read_variable_op(handle, dtype=dtypes.float32)
def testEagerInitializedValue(self):
with context.eager_mode():
variable = resource_variable_ops.ResourceVariable(1.0, name="eager-init")
self.assertAllEqual(variable.numpy(), 1.0)
self.assertAllEqual(variable.initialized_value().numpy(), 1.0)
def testEagerBool(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(False, name="bool_test")
self.assertAllEqual(bool(v), False)
def testEagerDeepCopy(self):
with context.eager_mode():
init_value = np.ones((4, 4, 4))
variable = resource_variable_ops.ResourceVariable(init_value,
name="init")
copied_variable = copy.deepcopy(variable)
copied_variable.assign(4 * np.ones((4, 4, 4)))
# Copying the variable should create a new underlying tensor with distinct
# values.
self.assertFalse(np.allclose(variable.numpy(), copied_variable.numpy()))
def testGraphDeepCopy(self):
with self.cached_session():
init_value = np.ones((4, 4, 4))
variable = resource_variable_ops.ResourceVariable(init_value,
name="init")
with self.assertRaises(NotImplementedError):
copy.deepcopy(variable)
@test_util.run_in_graph_and_eager_modes
def testStridedSliceAssign(self):
v = resource_variable_ops.ResourceVariable([1.0, 2.0])
self.evaluate(variables.global_variables_initializer())
self.evaluate(v[0].assign(2.0))
self.assertAllEqual(self.evaluate(v), [2.0, 2.0])
def testDifferentAssignGraph(self):
with ops.Graph().as_default():
v = resource_variable_ops.ResourceVariable(1.0)
ops.reset_default_graph()
v.assign(2.0) # Note: this fails if we run convert_to_tensor on not the
# variable graph.
def testFetchHandle(self):
with self.cached_session():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
self.assertGreater(len(handle.eval()), 0)
def testCachedValueReadBeforeWrite(self):
with self.cached_session() as sess:
v = resource_variable_ops.ResourceVariable(0.0, caching_device="cpu:0")
sess.run(v.initializer)
value, _ = sess.run([v, v.assign_add(1.0)])
self.assertAllEqual(value, 0.0)
def testAssignVariableDtypeMismatchEager(self):
with context.eager_mode():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([1]))
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"Trying to assign variable with wrong "
"dtype. Expected int32 got float."):
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([1.], dtype=dtypes.float32))
def testUnprintableHandle(self):
with context.eager_mode():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1], name="foo")
self.assertIn("<unprintable>", str(handle))
self.assertIn("<unprintable>", repr(handle))
@test_util.run_in_graph_and_eager_modes
def testDtypeSurvivesIdentity(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
id_handle = array_ops.identity(handle)
self.evaluate(resource_variable_ops.assign_variable_op(
id_handle, constant_op.constant(0, dtype=dtypes.int32)))
def testUnreadOpName(self):
v = resource_variable_ops.ResourceVariable(1.0)
self.assertNotEqual(v.name, v.assign_add(1.0).name)
@test_util.run_in_graph_and_eager_modes
def testCreateRead(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
self.evaluate(resource_variable_ops.assign_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32)))
value = self.evaluate(
resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32))
self.assertAllEqual(1, value)
@test_util.run_in_graph_and_eager_modes
def testManyAssigns(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
create = resource_variable_ops.assign_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32))
with ops.control_dependencies([create]):
first_read = resource_variable_ops.read_variable_op(
handle, dtype=dtypes.int32)
with ops.control_dependencies([first_read]):
write = resource_variable_ops.assign_variable_op(
handle, constant_op.constant(2, dtype=dtypes.int32))
with ops.control_dependencies([write]):
second_read = resource_variable_ops.read_variable_op(
handle, dtype=dtypes.int32)
f, s = self.evaluate([first_read, second_read])
self.assertEqual(f, 1)
self.assertEqual(s, 2)
@test_util.run_in_graph_and_eager_modes
def testAssignAdd(self):
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
self.evaluate(resource_variable_ops.assign_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32)))
self.evaluate(resource_variable_ops.assign_add_variable_op(
handle, constant_op.constant(1, dtype=dtypes.int32)))
read = self.evaluate(
resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32))
self.assertEqual(read, 2)
@test_util.run_in_graph_and_eager_modes
def testScatterAdd(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_add(
handle, [0], constant_op.constant([[2]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
@test_util.run_in_graph_and_eager_modes
def testScatterSub(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_sub(
handle, [0], constant_op.constant([[2]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[-1]])
@test_util.run_in_graph_and_eager_modes
def testScatterMul(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_mul(
handle, [0], constant_op.constant([[5]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[5]])
def testEagerPickle(self):
with context.eager_mode():
tmp_dir = self.get_temp_dir()
fname = os.path.join(tmp_dir, "var.pickle")
with open(fname, "wb") as f:
v = resource_variable_ops.ResourceVariable(10.0)
pickle.dump(v, f)
with open(fname, "rb") as f:
v = pickle.load(f)
self.assertAllEqual(v.numpy(), 10.0)
@test_util.run_in_graph_and_eager_modes
def testScatterDiv(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_div(
handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[2]])
def testUseResource(self):
v = variables.VariableV1(1.0, use_resource=True)
self.assertTrue(isinstance(v, resource_variable_ops.ResourceVariable))
def testEagerNoUseResource(self):
with context.eager_mode():
v = variables.Variable(1.0)
self.assertTrue(isinstance(v, resource_variable_ops.ResourceVariable))
@test_util.run_in_graph_and_eager_modes
def testScatterMin(self):
with ops.device("cpu:0"):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
[[6]],
dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_min(handle, [0],
constant_op.constant(
[[3]],
dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
def testMetagraph(self):
with ops.Graph().as_default():
with variable_scope.variable_scope("foo", use_resource=True):
a = variable_scope.get_variable("a", initializer=10.0)
momentum.MomentumOptimizer(
learning_rate=0.001, momentum=0.1).minimize(
a,
colocate_gradients_with_ops=True,
global_step=training_util.get_or_create_global_step())
graph = ops.get_default_graph()
meta_graph_def = saver.export_meta_graph(graph=graph)
with ops.Graph().as_default():
saver.import_meta_graph(meta_graph_def, import_scope="")
meta_graph_two = saver.export_meta_graph(graph=graph)
self.assertEqual(meta_graph_def, meta_graph_two)
@test_util.run_in_graph_and_eager_modes
def testScatterMax(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_max(
handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[6]])
@test_util.run_in_graph_and_eager_modes
def testScatterAddScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_add(
handle, [0], constant_op.constant(2, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
@test_util.run_in_graph_and_eager_modes
def testScatterSubScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_sub(
handle, [0], constant_op.constant(2, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[-1]])
@test_util.run_in_graph_and_eager_modes
def testScatterMulScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_mul(
handle, [0], constant_op.constant(5, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[5]])
@test_util.run_in_graph_and_eager_modes
def testScatterDivScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_div(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[2]])
@test_util.run_in_graph_and_eager_modes
def testScatterMinScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_min(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
@test_util.run_in_graph_and_eager_modes
def testScatterMaxScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
self.evaluate(
resource_variable_ops.resource_scatter_max(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[6]])
def testScatterUpdateString(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.string, shape=[1, 1])
self.evaluate(resource_variable_ops.assign_variable_op(
handle, constant_op.constant([["a"]], dtype=dtypes.string)))
self.evaluate(resource_variable_ops.resource_scatter_update(
handle, [0], constant_op.constant([["b"]], dtype=dtypes.string)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.string)
self.assertEqual(compat.as_bytes(self.evaluate(read)[0][0]),
compat.as_bytes("b"))
def testScatterUpdateStringScalar(self):
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.string, shape=[1, 1])
self.evaluate(
resource_variable_ops.assign_variable_op(handle,
constant_op.constant(
[["a"]],
dtype=dtypes.string)))
self.evaluate(
resource_variable_ops.resource_scatter_update(handle, [0],
constant_op.constant(
"b",
dtype=dtypes.string)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.string)
self.assertEqual(
compat.as_bytes(self.evaluate(read)[0][0]), compat.as_bytes("b"))
# TODO(alive): get this to work in Eager mode.
def testGPU(self):
with self.test_session(use_gpu=True):
abc = variable_scope.get_variable(
"abc",
shape=[1],
initializer=init_ops.ones_initializer(),
use_resource=True)
self.evaluate(variables.global_variables_initializer())
self.assertEqual(
self.evaluate(
resource_variable_ops.var_is_initialized_op(abc.handle)),
True)
def testScatterBool(self):
with context.eager_mode():
ref = resource_variable_ops.ResourceVariable(
[False, True, False], trainable=False)
indices = math_ops.range(3)
updates = constant_op.constant([True, True, True])
state_ops.scatter_update(ref, indices, updates)
self.assertAllEqual(ref.read_value(), [True, True, True])
@test_util.run_in_graph_and_eager_modes
def testConstraintArg(self):
constraint = lambda x: x
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, constraint=constraint, name="var0")
self.assertEqual(v.constraint, constraint)
constraint = 0
with self.assertRaises(ValueError):
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, constraint=constraint, name="var1")
# TODO(alive): how should this work in Eager mode?
def testInitFn(self):
with self.cached_session():
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, dtype=dtypes.float32)
self.assertEqual(v.handle.op.colocation_groups(),
v.initializer.inputs[1].op.colocation_groups())
def testHandleNumpy(self):
with context.eager_mode():
with self.assertRaises(ValueError):
resource_variable_ops.ResourceVariable(
1.0, name="handle-numpy").handle.numpy()
def testCountUpTo(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(0, name="upto")
self.assertAllEqual(v.count_up_to(1), 0)
with self.assertRaises(errors.OutOfRangeError):
v.count_up_to(1)
def testCountUpToFunction(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(0, name="upto")
self.assertAllEqual(state_ops.count_up_to(v, 1), 0)
with self.assertRaises(errors.OutOfRangeError):
state_ops.count_up_to(v, 1)
@test_util.run_in_graph_and_eager_modes
def testInitFnDtype(self):
v = resource_variable_ops.ResourceVariable(
initial_value=lambda: 1, dtype=dtypes.float32, name="var0")
self.assertEqual(dtypes.float32, v.value().dtype)
@test_util.run_in_graph_and_eager_modes
def testInitFnNoDtype(self):
v = resource_variable_ops.ResourceVariable(initial_value=lambda: 1,
name="var2")
self.assertEqual(dtypes.int32, v.value().dtype)
@test_util.run_in_graph_and_eager_modes
def testInitializeAllVariables(self):
v = resource_variable_ops.ResourceVariable(1, dtype=dtypes.float32,
name="var0")
self.evaluate(variables.global_variables_initializer())
self.assertEqual(1.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
def testOperatorOverload(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.assertEqual(2.0, self.evaluate(v + v))
@test_util.run_in_graph_and_eager_modes
def testAssignMethod(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.evaluate(v.assign(2.0))
self.assertEqual(2.0, self.evaluate(v.value()))
# Tests for the 'read_value' argument:
assign_with_read = v.assign(3.0, read_value=True)
self.assertEqual(3.0, self.evaluate(assign_with_read))
assign_without_read = v.assign(4.0, read_value=False)
if context.executing_eagerly():
self.assertIsNone(assign_without_read)
else:
self.assertIsInstance(assign_without_read, ops.Operation)
self.evaluate(assign_without_read)
self.assertEqual(4.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
def testLoad(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
v.load(2.0)
self.assertEqual(2.0, self.evaluate(v.value()))
def testToFromProtoCachedValue(self):
with ops.Graph().as_default():
v_def = resource_variable_ops.ResourceVariable(
initial_value=constant_op.constant(3.0)).to_proto()
v_prime = resource_variable_ops.ResourceVariable(variable_def=v_def)
self.assertTrue(getattr(v_prime, "_cached_value", None) is None)
other_v_def = resource_variable_ops.ResourceVariable(
caching_device="cpu:0",
initial_value=constant_op.constant(3.0)).to_proto()
other_v_prime = resource_variable_ops.ResourceVariable(
variable_def=other_v_def)
self.assertTrue(other_v_prime._cached_value is not None)
def testVariableDefInitializedInstances(self):
with ops.Graph().as_default(), self.cached_session() as sess:
v_def = resource_variable_ops.ResourceVariable(
initial_value=constant_op.constant(3.0)).to_proto()
with ops.Graph().as_default(), self.cached_session() as sess:
# v describes a VariableDef-based variable without an initial value.
v = resource_variable_ops.ResourceVariable(variable_def=v_def)
self.assertEqual(3.0, sess.run(v.initialized_value()))
# initialized_value should not rerun the initializer_op if the variable
# has already been initialized elsewhere.
sess.run(v.assign(1.0))
self.assertEqual(1.0, v.initialized_value().eval())
v_def.ClearField("initial_value_name")
with ops.Graph().as_default(), self.cached_session() as sess:
# Restoring a legacy VariableDef proto that does not have
# initial_value_name set should still work.
v = resource_variable_ops.ResourceVariable(variable_def=v_def)
# We should also be able to re-export the variable to a new meta graph.
self.assertProtoEquals(v_def, v.to_proto())
# But attempts to use initialized_value will result in errors.
with self.assertRaises(ValueError):
sess.run(v.initialized_value())
def testTrainableInProto(self):
with ops.Graph().as_default():
non_trainable_variable = resource_variable_ops.ResourceVariable(
trainable=False,
initial_value=constant_op.constant(10.0))
self.assertEqual(
False,
resource_variable_ops.ResourceVariable(
variable_def=non_trainable_variable.to_proto())
.trainable)
trainable_variable = resource_variable_ops.ResourceVariable(
trainable=True,
initial_value=constant_op.constant(10.0))
self.assertEqual(
True,
resource_variable_ops.ResourceVariable(
variable_def=trainable_variable.to_proto())
.trainable)
@test_util.run_in_graph_and_eager_modes
def testSparseRead(self):
init_value = np.reshape(np.arange(np.power(4, 3)), (4, 4, 4))
v = resource_variable_ops.ResourceVariable(
constant_op.constant(init_value, dtype=dtypes.int32), name="var3")
self.evaluate(variables.global_variables_initializer())
value = self.evaluate(v.sparse_read([0, 3, 1, 2]))
self.assertAllEqual(init_value[[0, 3, 1, 2], ...], value)
def testToFromProto(self):
with self.cached_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
w = resource_variable_ops.ResourceVariable.from_proto(v.to_proto())
self.assertEquals(2, math_ops.add(w, 1).eval())
self.assertEquals(v._handle, w._handle)
self.assertEquals(v._graph_element, w._graph_element)
@test_util.run_in_graph_and_eager_modes
def testAssignAddMethod(self):
v = resource_variable_ops.ResourceVariable(1.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.evaluate(v.assign_add(1.0))
self.assertEqual(2.0, self.evaluate(v.value()))
# Tests for the 'read_value' argument:
assign_with_read = v.assign_add(1.0, read_value=True)
self.assertEqual(3.0, self.evaluate(assign_with_read))
assign_without_read = v.assign_add(1.0, read_value=False)
if context.executing_eagerly():
self.assertIsNone(assign_without_read)
else:
self.assertIsInstance(assign_without_read, ops.Operation)
self.evaluate(assign_without_read)
self.assertEqual(4.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
def testAssignSubMethod(self):
v = resource_variable_ops.ResourceVariable(3.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.evaluate(v.assign_sub(1.0))
self.assertEqual(2.0, self.evaluate(v.value()))
# Tests for the 'read_value' argument:
assign_with_read = v.assign_sub(1.0, read_value=True)
self.assertEqual(1.0, self.evaluate(assign_with_read))
assign_without_read = v.assign_sub(1.0, read_value=False)
if context.executing_eagerly():
self.assertIsNone(assign_without_read)
else:
self.assertIsInstance(assign_without_read, ops.Operation)
self.evaluate(assign_without_read)
self.assertEqual(0.0, self.evaluate(v.value()))
@test_util.run_in_graph_and_eager_modes
def testDestroyResource(self):
v = resource_variable_ops.ResourceVariable(3.0, name="var0")
self.evaluate(variables.global_variables_initializer())
self.assertEqual(3.0, self.evaluate(v.value()))
self.evaluate(resource_variable_ops.destroy_resource_op(v.handle))
with self.assertRaises(errors.FailedPreconditionError):
self.evaluate(v.value())
# Handle to a resource not actually created.
handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[])
# Should raise no exception
self.evaluate(resource_variable_ops.destroy_resource_op(
handle, ignore_lookup_error=True))
def testAssignDifferentShapes(self):
with self.cached_session() as sess, variable_scope.variable_scope(
"foo", use_resource=True):
var = variable_scope.get_variable("x", shape=[1, 1], dtype=dtypes.float32)
placeholder = array_ops.placeholder(dtypes.float32)
assign = var.assign(placeholder)
sess.run(
[assign],
feed_dict={placeholder: np.zeros(shape=[2, 2], dtype=np.float32)})
def testAssignDifferentShapesEager(self):
with context.eager_mode():
with variable_scope.variable_scope("foo"):
var = variable_scope.get_variable("x", shape=[1, 1],
dtype=dtypes.float32)
with self.assertRaisesRegexp(ValueError,
"Shapes.*and.*are incompatible"):
assign = var.assign(np.zeros(shape=[2, 2]))
self.evaluate(assign)
def testDtypeAfterFromProto(self):
v = resource_variable_ops.ResourceVariable(2.0)
w = resource_variable_ops.ResourceVariable.from_proto(v.to_proto())
self.assertIsInstance(w.dtype, dtypes.DType)
self.assertEqual(v.dtype, w.dtype)
# TODO(alive): get caching to work in eager mode.
def testCachingDevice(self):
with ops.device("/job:server/task:1"):
v = resource_variable_ops.ResourceVariable(
2.0, caching_device="/job:localhost")
self.assertEqual("/job:localhost", v.value().device)
with self.assertRaises(ValueError):
_ = v.value().op.get_attr("_class")
with ops.colocate_with(v.op):
w = resource_variable_ops.ResourceVariable(
2.0, caching_device="/job:localhost")
self.assertEqual("/job:localhost", w.value().device)
with self.assertRaises(ValueError):
_ = w.value().op.get_attr("_class")
def testSharedName(self):
with self.cached_session():
v = resource_variable_ops.ResourceVariable(300.0, name="var4")
variables.global_variables_initializer().run()
w = resource_variable_ops.var_handle_op(
dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="var4",
# Needed in Eager since we get a unique container name by default.
container=ops.get_default_graph()._container)
w_read = resource_variable_ops.read_variable_op(w, v.dtype.base_dtype)
self.assertEqual(300.0, self.evaluate(w_read))
x = resource_variable_ops.var_handle_op(
dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="var5",
container=ops.get_default_graph()._container)
with self.assertRaisesOpError("Resource .*/var5/.* does not exist"):
resource_variable_ops.read_variable_op(x, v.dtype.base_dtype).eval()
def testSharedNameWithNamescope(self):
with self.cached_session():
with ops.name_scope("foo"):
v = resource_variable_ops.ResourceVariable(300.0, name="var6")
self.assertEqual("foo/var6", v._shared_name) # pylint: disable=protected-access
self.assertEqual("foo/var6:0", v.name)
self.evaluate(variables.global_variables_initializer())
w = resource_variable_ops.var_handle_op(
dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="foo/var6",
# Needed in Eager since we get a unique container name by default.
container=ops.get_default_graph()._container)
w_read = resource_variable_ops.read_variable_op(w, v.dtype.base_dtype)
self.assertEqual(300.0, self.evaluate(w_read))
@test_util.run_in_graph_and_eager_modes
def testShape(self):
v = resource_variable_ops.ResourceVariable(
name="var4", initial_value=array_ops.ones(shape=[10, 20, 35]))
self.assertEqual("(10, 20, 35)", str(v.shape))
self.assertEqual("(10, 20, 35)", str(v.get_shape()))
self.assertEqual("(10, 20, 35)", str(v.value().shape))
self.assertEqual("(3, 20, 35)", str(v.sparse_read([0, 1, 2]).shape))
if not context.executing_eagerly():
self.assertEqual(
"<unknown>",
str(v.sparse_read(array_ops.placeholder(dtypes.int32)).shape))
def testSetInitialValue(self):
with self.cached_session():
# Initialize variable with a value different from the initial value passed
# in the constructor.
v = resource_variable_ops.ResourceVariable(2.0)
v.initializer.run(feed_dict={v.initial_value: 3.0})
self.assertEqual(3.0, v.value().eval())
def testControlFlowInitialization(self):
"""Expects an error if an initializer is in a control-flow scope."""
def cond(i, _):
return i < 10
def body(i, _):
zero = array_ops.zeros([], dtype=dtypes.int32)
v = resource_variable_ops.ResourceVariable(initial_value=zero)
return (i + 1, v.read_value())
with self.assertRaisesRegexp(ValueError, "inside a control-flow"):
control_flow_ops.while_loop(cond, body, [0, 0])
def testVariableEager(self):
with context.eager_mode():
init = array_ops.ones(shape=[10, 20, 35], dtype=dtypes.int32)
constraint = lambda x: x
with ops.name_scope("foo"):
v = resource_variable_ops.ResourceVariable(
name="var7",
initial_value=init,
caching_device="cpu:0",
constraint=constraint)
# Test properties
self.assertEqual(dtypes.int32, v.dtype)
self.assertEqual("foo/var7:0", v.name)
self.assertAllEqual([10, 20, 35], v.shape.as_list())
self.assertTrue(isinstance(v.handle, ops.EagerTensor))
self.assertEqual(constraint, v.constraint)
self.assertAllEqual(init.numpy(), v.read_value().numpy())
self.assertAllEqual(init.numpy(), v.value().numpy())
# Callable init.
callable_init = lambda: init * 2
v2 = resource_variable_ops.ResourceVariable(
initial_value=callable_init, name="var7")
self.assertEqual("var7:0", v2.name)
self.assertAllEqual(2 * init.numpy(), v2.read_value().numpy())
# Test assign_add.
new_v2_val = v2.assign_add(v.read_value())
self.assertAllEqual(v.read_value().numpy() * 3, new_v2_val.numpy())
# Test assign_sub.
new_v2_val = v2.assign_sub(v.read_value())
self.assertAllEqual(v.read_value().numpy() * 2, new_v2_val.numpy())
# Test assign.
v2.assign(v.read_value())
self.assertAllEqual(v.read_value().numpy(), v2.read_value().numpy())
# Test load
v2.load(2 * v.read_value())
self.assertAllEqual(2 * v.read_value().numpy(), v2.read_value().numpy())
# Test convert_to_tensor
t = ops.convert_to_tensor(v)
self.assertAllEqual(t.numpy(), v.read_value().numpy())
# Test operations
self.assertAllEqual((v * 2).numpy(), (v + v).numpy())
def testContainerEager(self):
with context.eager_mode():
v1 = resource_variable_ops.ResourceVariable(initial_value=lambda: 1,
name="same")
with ops.container("different"):
v2 = resource_variable_ops.ResourceVariable(initial_value=lambda: 0,
name="same")
v2.assign(2)
self.assertEqual(1, v1.read_value().numpy())
self.assertEqual(2, v2.read_value().numpy())
def testDestruction(self):
with context.eager_mode():
var = resource_variable_ops.ResourceVariable(initial_value=1.0,
name="var8")
var_handle = var._handle
del var
with self.assertRaisesRegexp(errors.NotFoundError,
r"Resource .* does not exist."):
resource_variable_ops.destroy_resource_op(var_handle,
ignore_lookup_error=False)
def testScatterUpdate(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="update")
state_ops.scatter_update(v, [1], [3.0])
self.assertAllEqual([1.0, 3.0], v.numpy())
def testScatterAddStateOps(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="add")
state_ops.scatter_add(v, [1], [3])
self.assertAllEqual([1.0, 5.0], v.numpy())
def testScatterSubStateOps(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="sub")
state_ops.scatter_sub(v, [1], [3])
self.assertAllEqual([1.0, -1.0], v.numpy())
def testScatterNdAddStateOps(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable(
[1, 1, 1, 1, 1, 1, 1, 1], dtype=dtypes.float32, name="add")
indices = constant_op.constant([[4], [3], [1], [7]], dtype=dtypes.int32)
updates = constant_op.constant([9, 10, 11, 12], dtype=dtypes.float32)
expected = np.array([1, 12, 1, 11, 10, 1, 1, 13])
state_ops.scatter_nd_add(v, indices, updates)
self.assertAllClose(expected, v.numpy())
def testScatterUpdateCast(self):
with context.eager_mode():
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="update")
state_ops.scatter_update(v, [1], [3])
self.assertAllEqual([1.0, 3.0], v.numpy())
@test_util.run_in_graph_and_eager_modes
def testScatterUpdateInvalidArgs(self):
v = resource_variable_ops.ResourceVariable([0, 1, 2, 3], name="update")
# The exact error and message differ between graph construction (where the
# error is realized during shape inference at graph construction time) and
# eager execution (where the error is realized during kernel execution).
with self.assertRaisesRegexp(Exception, r"shape.*2.*3"):
state_ops.scatter_update(v, [0, 1], [0, 1, 2])
@test_util.run_in_graph_and_eager_modes
def testAssignIncompatibleShape(self):
v = resource_variable_ops.ResourceVariable([0, 1, 2, 3])
self.evaluate(v.initializer)
with self.assertRaisesRegexp(Exception, r"hapes must be equal"):
self.assertAllEqual(self.evaluate(v.assign_add(1)), [1, 2, 3, 4])
@test_util.run_in_graph_and_eager_modes
def testCopyToGraphUninitialized(self):
v = resource_variable_ops.ResourceVariable([0, 1, 2, 3])
copy_to_graph = ops.Graph()
with copy_to_graph.as_default(): # Intentionally testing v1 behavior
copied = resource_variable_ops.copy_to_graph_uninitialized(v)
self.assertEqual(v.name, copied.name)
with self.session(copy_to_graph) as session:
with self.assertRaises(errors.InvalidArgumentError):
session.run(copied.initializer)
class _MixedPrecisionVariableTest(test_util.TensorFlowTestCase):
@test_util.run_in_graph_and_eager_modes()
def test_dense_var_to_tensor_read_dtype_same_as_var_dtype(self):
# read_dtype is same as dtype
v = resource_variable_ops.ResourceVariable(1.0, dtype=dtypes.float32)
v = resource_variable_ops._MixedPrecisionVariable(v, dtypes.float32)
if not context.executing_eagerly():
v.initializer.run()
# dtype is not read_dtype, return NotImplemented
self.assertEqual(
NotImplemented, v._dense_var_to_tensor(dtype=dtypes.float16))
self.assertEqual(NotImplemented,
v._dense_var_to_tensor(dtype=dtypes.float16, as_ref=True))
# as_ref is False
t = v._dense_var_to_tensor(as_ref=False)
self.assertTrue(isinstance(t, ops.Tensor))
self.assertEqual(t.dtype, dtypes.float32)
self.assertEqual(self.evaluate(t), 1.0)
t = v._dense_var_to_tensor(dtype=dtypes.float32, as_ref=False)
self.assertTrue(isinstance(t, ops.Tensor))
self.assertEqual(t.dtype, dtypes.float32)
self.assertEqual(self.evaluate(t), 1.0)
# as_ref is True
self.assertEqual(NotImplemented, v._dense_var_to_tensor(as_ref=True))
self.assertEqual(NotImplemented,
v._dense_var_to_tensor(dtype=dtypes.float32, as_ref=True))
@test_util.run_in_graph_and_eager_modes()
def test_dense_var_to_tensor_read_dtype_different_from_var_dtype(self):
# read_dtype is different from dtype
v = resource_variable_ops.ResourceVariable(1.0, dtype=dtypes.float32)
v = resource_variable_ops._MixedPrecisionVariable(v, dtypes.float16)
if not context.executing_eagerly():
v.initializer.run()
# as_ref is False
t = v._dense_var_to_tensor(as_ref=False)
self.assertTrue(isinstance(t, ops.Tensor))
self.assertEqual(t.dtype, dtypes.float16)
self.assertEqual(self.evaluate(t), 1.0)
t = v._dense_var_to_tensor(dtype=dtypes.float16, as_ref=False)
self.assertTrue(isinstance(t, ops.Tensor))
self.assertEqual(t.dtype, dtypes.float16)
self.assertEqual(self.evaluate(t), 1.0)
# as_ref is True
self.assertEqual(NotImplemented, v._dense_var_to_tensor(as_ref=True))
self.assertEqual(NotImplemented,
v._dense_var_to_tensor(dtype=dtypes.float16, as_ref=True))
if __name__ == "__main__":
test.main()
| {
"content_hash": "9713d0e79a23dd45d3b9aedc714de587",
"timestamp": "",
"source": "github",
"line_count": 988,
"max_line_length": 88,
"avg_line_length": 42.60931174089069,
"alnum_prop": 0.6543066178915863,
"repo_name": "hehongliang/tensorflow",
"id": "e85b04469b1f97ebad98d24e28aba86970b17a88",
"size": "42787",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/kernel_tests/resource_variable_ops_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7666"
},
{
"name": "C",
"bytes": "194748"
},
{
"name": "C++",
"bytes": "26947133"
},
{
"name": "CMake",
"bytes": "174938"
},
{
"name": "Go",
"bytes": "908627"
},
{
"name": "Java",
"bytes": "323804"
},
{
"name": "Jupyter Notebook",
"bytes": "1833659"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37293"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "63210"
},
{
"name": "Protocol Buffer",
"bytes": "249901"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "22872386"
},
{
"name": "Ruby",
"bytes": "327"
},
{
"name": "Shell",
"bytes": "336334"
}
],
"symlink_target": ""
} |
"""
Context manager for switching the current working directory
"""
from __future__ import unicode_literals
from __future__ import absolute_import, division, print_function
__author__ = "Graham Klyne (GK@ACM.ORG)"
__copyright__ = "Copyright 2011-2014, University of Oxford"
__license__ = "MIT (http://opensource.org/licenses/MIT)"
import sys
import os
class ChangeCurrentDir:
"""
Context handler class that swiches the current working directory for some controlled code.
"""
def __init__(self, usecwd):
self._usecwd = usecwd
self._oldcwd = None
return
def __enter__(self):
self._oldcwd = os.getcwd()
os.chdir(os.path.join(self._oldcwd, self._usecwd))
return
def __exit__(self, exctype, excval, exctraceback):
if self._oldcwd:
os.chdir(self._oldcwd)
return False
if __name__ == "__main__":
print(os.getcwd())
oldcwd = os.getcwd()
with ChangeCurrentDir("test"):
print(os.getcwd())
assert os.getcwd() == oldcwd+"/test"
print(os.getcwd())
assert os.getcwd() == oldcwd
# End.
| {
"content_hash": "beb3765f2397e8d5c136f6b87d9ec148",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 94,
"avg_line_length": 25.818181818181817,
"alnum_prop": 0.6091549295774648,
"repo_name": "gklyne/annalist",
"id": "f15b4d6986ec80e2fae8b7e984287191e1e646bd",
"size": "1154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/annalist_root/utils/SetcwdContext.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "295504"
},
{
"name": "Dockerfile",
"bytes": "2276"
},
{
"name": "HTML",
"bytes": "160550"
},
{
"name": "Haskell",
"bytes": "8403"
},
{
"name": "JavaScript",
"bytes": "3127"
},
{
"name": "Makefile",
"bytes": "3312"
},
{
"name": "Python",
"bytes": "4767305"
},
{
"name": "Shell",
"bytes": "71836"
},
{
"name": "TeX",
"bytes": "131682"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.