repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
ProkopHapala/SimpleSimulationEngine | python/pySimE/space/exp/OrbitalTransferOpt/OrbOpt_map/OrbitalTransfer_non_uniform.py | Python | mit | 8,038 | 0.046778 | #!/usr/bin/env python
from pylab import *
from Simplex_optimization import Simplex
from ClampedCubicSpline import *
from Random_optimization import MCBias_Run,MCBias2_Run
nnodes = 12
nsplines = nnodes + 1
perNode = 10
nsamp = (nsplines)*perNode
Gen= [0.0]*2*nnodes
def R2omega(R):
return sqrt(1.0/R**3)
T = 3.0
R0 = 1.0
R1 = 0.2
v0=R2omega(R0)
v1=R2omega(R1)
P0=array( [R0,0] ); V0=array( [2,v0] );
vo = v0
ph = 0.0
ti = zeros(nnodes+2)
for i in range(nsplines):
tt=(i+1)/float(nsplines)
ti[i+1]= 0.5*( 3*tt - tt**2 ) * T
print " len(ti) ",len(ti)
print " ti: ",ti
for i in range(nnodes):
w=(i+1)/float(nsplines); mw= 1.0-w
ri = mw*R0 + w*R1
Gen[i] = ri
vi = R2omega(ri)
dt = ti[i+1]-ti[i]
ph += 0.5*(vo+vi)*dt
Gen[i+nnodes] = ph
vo = vi
vi = R2omega(R1)
dt = ti[nnodes+1]-ti[nnodes]
ph += 0.5*(vo+vi)*dt
P1=array( [R1,ph] ); V1=array( [0,v1] );
print " P0 ", P0
print " P1 ", P1
print " V0 ", V0
print " V1 ", V1
ts = array( range(nsamp) )*T/float(nsamp)
def gen2controPoints( Gen ):
n = (len(Gen)/2)
ri = [0]*(n+2); oi = [0]*(n+2);
#print " n ri oi ",n, len(oi), len(ri)
ri[0] =P0[0]; oi[0] =P0[1];
ri[n+1]=P1[0]; oi[n+1]=P1[1];
for i in range(0,n):
ri[i+1]=Gen[i ]
oi[i+1]=Gen[i+n]
return ri,oi
def evalGen ( ti, Gen ):
#print len(ti),len(ri)
ri,oi = gen2controPoints( Gen )
#print " len ti,ri,oi: ",len(ti),len(ri),len(oi)
SR, arn = Spline4_clamped( ti, ri, V0[0], V1[0])
SO, aon = Spline4_clamped( ti, oi, V0[1], V1[1])
return evalPolarForces( T, SR, SO, perNode)
maxThrust = 2.0
def fitnesFunc( Fs ):
fsum = 0
tsum = 0
#print "len(Fs) ", len(Fs[4])," len(ts) ", len(ts)
for i in range(len(Fs[4])-1):
dt=(ts[i+1]-ts[i])
#df=0.5*(Fs[4][i+1]+Fs[4][i])
df=0.5*(Fs[4][i+1]**2+Fs[4][i]**2)
fsum+=df*dt
tsum+=dt
#df_over = df-maxThrust
#if(df_over>0):
# fsum+= (df_over**2) * dt # penalty for overloading engine
return -sqrt(fsum/tsum)
#return -T* sqrt((Fs[4]**2).sum()) /len(ts)
def evalFitness( Gen ):
global Os,Rs,Fs
ts,Os,Rs,Fs = evalGen ( ti, Gen )
fitness = fitnesFunc(Fs)
return -fitness
def plotTrj( Os,Rs,Fs, i, clr="k" ):
#subplot(2,5,1+5*i); plot( Os[0], Rs[0], '.-' ); grid()
ri,oi= gen2controPoints( Gen)
subplot(2,5,1, polar=True); plot( Os[0], Rs[0], '.-'+clr); plot( oi, ri, 'o'+clr);
subplot(2,5,2);
print len(ti),len(ri),len(oi)
plot( ts, Rs[0],'-'+clr ); plot( ts, Os[0], '--'+clr );
plot( ti, ri,'o'+clr ); plot( ti, oi, 'x'+clr );
grid()
subplot(2,5,3); plot( ts, Rs[1],'-'+clr ); plot( ts, Os[1], '--'+clr ); grid()
subplot(2,5,5+i);
plot( ts, Rs[2],'r--' ); plot( ts, Os[2], 'b--' );
plot( ts, Fs[1],'r-' ); plot( ts, Fs[0], 'b-' );
plot( ts, Fs[2],'g-'); # G
plot( ts, Fs[3],'m-'); # FTR
plot( ts, Fs[4],'k.-' ); # FT
grid()
def map2D( X, U1, U2, f1, f2, n1, n2 ):
#print " X: ",X
M = zeros((2*n1+1,2*n1+1))
for i in range(-n1,n1+1):
d1 = array(U1)*(i*f1/n1)
for j in range(-n2,n2+1):
d2 = array(U2)*(j*f2/n2)
M[i+n1,j+n2] = evalFitness( array(X)+d1 +d2 )
return M
def plotMaps(irow,nrow, Gen):
for i in range(nnodes):
U1 = zeros(2*nnodes); U1[i ]=1.0
U2 = zeros(2*nnodes); U2[i+nnodes]=1.0
print " maping node",i," U1: ",U1," U2: ", U2
subplot(nrow, nnodes, nnodes*irow+i+1 )
mapa = map2D( Gen, U1, U2, 0.1, 0.1, 3, 3 )
imshow(mapa, interpolation='bicubic', cmap='jet'); colorbar( )
CS = contour(mapa, colors="g"); clabel(CS, inline=0.5, fontsize=8)
def TryNew( GenBest, fitnessBest, stepSize ):
hit = False
GenNew = GenBest[:] + (rand(nnodes*2)[:]-0.5)*stepSize
ts,Os,Rs,Fs = evalGen ( ti, GenNew )
fitnessNew = fitnesFunc(Fs)
#fitnessNew = evalFitness( GenNew )
if(fitnessNew > fitnessBest ):
hit = True
GenBest = GenNew
fitnessBest = fitnessNew
#print " Better is ",GenBest," fitness = ",fitnessBest,
#print " fitness: ",fitnessBest, " stepSize: ", stepSize
subplot(2,5,5); plot( ts, Fs[4], '-', lw=0.25 ); grid()
return GenBest, fitnessBest,hit
def MC_Run( Gen, stepSize, accuracy, stopAfter, missRatio, GenHistory ):
fitness = -100000000
ts,Os,Rs,Fs = evalGen(ti, Gen)
fitness = fitnesFunc(Fs)
print
print " ========= MC Optimization ================= "
print "stopAfter, missRatio: ", stopAfter, missRatio
badInRow = 0
fromMajor = 0
fitnessMajor = fitness
for i in range(10000):
Gen,fitness,hit = TryNew( Gen, fitness, stepSize )
if(hit):
#print " fitness: ",fitnessBest, " stepSize: ", stepSize
badInRow = 0
stepSize *= 1.2
print " fitness: ",fitness," step ",stepSize, " i: ", i, " bad ",badInRow," fromMajor",fromMajor
GenHistory.append(Gen)
if(fitness-fitnessMajor)>accuracy:
fitnessMajor = fitness
fromMajor = 0
if badInRow>missRatio:
stepSize *= 0.5
badInRow = 0
print " stepSize down to ", stepSize
if fromMajor>stopAfter:
print " Not able to improve => Exiting .... "
break
badInRow += 1
fromMajor += 1
return Gen
def Simplex_Run(Gen,steps, GenHistory):
print
print " ========= Simplex Optimization ================= "
Simp = Simplex(evalFitness, Gen, steps )
#values, err, niter = SimplexOpt.minimize()
old_low = 10000000000
lastImprovement = 0
for i in range(0, 10000):
converged, err,low,hi = Simp.simplexStep( 0.00001 )
if converged:
print " converged in ",i," steps "
break;
if(low < old_low):
lastImprovement = i
old_low = low
subplot(2,5,5); plot( ts, Fs[4], '-', lw=0.25 ); grid()
GenHistory.append(list(Simp.simplex[Simp.lowest]))
print " new_low : ", low, " iter: ", i, " err ", err
if(i-lastImprovement)>(nnodes*16):
print " Not able to improve => Exiting .... "
break;
print Simp.simplex[Simp.lowest]
return Simp.simplex[Simp.lowest]
# ================ MAIN PROGRAM BODY =========================
figure(num=None, figsize=(20, 10))
GenHistory = []
print " Initial Gen : ", Gen
ts, Os,Rs,Fs = evalGen( ti, Gen)
plotTrj( Os,Rs,Fs, 1, "r" )
Gen0 = array(Gen).copy()
#Gen = MC_Run( Gen, 10.0, 0.01, 4*8*nnodes, 8*nnodes , GenHistory )
#Gen = MC_Run( Gen, 10.0, 0.05, 8*4*nnodes, 4*nnodes , GenHistory )
#Gen = MCBias_Run( evalFitness, Gen, 0.1, 0.001, 4*4*nnodes, 2*nnodes, 5000, GenHistory )
Gen = MCBias2_Run( evalFitness, Gen, 0.1, 0.0001, 8*4*nnodes, 2*nnodes, 5000, GenHistory )
GenRnd = array(Gen).copy()
ts, Os,Rs,Fs = evalGen( ti, Gen)
plotTrj( Os,Rs,Fs, 2, "g" )
steps = ones(nnodes*2)*0.05
Gen = Simplex_Run(Gen,steps, GenHistory)
ts, Os,Rs,Fs = evalGen( ti, Gen)
plotTrj( Os,Rs,Fs, 3, "k" )
if len(GenH | istory)>2:
GenHistory = transpose(array(GenHistory ))
subplot(2,5,10);
for i in range(nnodes):
plot( GenHistory[i ]-Gen0[i ], 'r-' );
plot( GenHistory[i+nnodes]-Gen0[i+nnodes], 'b-' );
#legend( bbox_to_anchor=(0.5, 1.00, 1., 0.000) )
ts, Os,Rs,Fs = evalGen( ti, Gen)
subplot(2,5,5); plot( ts, Fs[4], 'k-', lw=2 ); grid()
ts, Os,Rs,Fs = evalGen( ti, Gen0)
subplot(2,5,5); plot( ts, Fs[4], 'r-', lw=2 ); grid()
| ts, Os,Rs,Fs = evalGen( ti, GenRnd)
subplot(2,5,5); plot( ts, Fs[4], 'g-', lw=2 ); grid()
print " Initial gen ", Gen0
print " final gen ", Gen
savefig("plost.png", bbox_inches='tight')
'''
figure(num=None, figsize=(20, 5))
plotMaps(0,2, Gen0)
plotMaps(1,2, Gen )
savefig("valley.png", bbox_inches='tight')
'''
show()
|
annoviko/pyclustering | pyclustering/cluster/tests/unit/ut_xmeans.py | Python | gpl-3.0 | 23,984 | 0.004128 | """!
@brief Unit-tests for X-Means algorithm.
@authors Andrei Novikov (pyclustering@yandex.ru)
@date 2014-2020
@copyright BSD-3-Clause
"""
import unittest
# Generate images without having a window appear.
import matplotlib
matplotlib.use('Agg')
from pyclustering.cluster.tests.xmeans_templates import XmeansTestTemplates
from pyclustering.cluster.xmeans import xmeans, splitting_type
from pyclustering.samples.definitions import SIMPLE_SAMPLES, FCPS_SAMPLES
from pyclustering.utils import read_sample
from pyclustering.utils.metric import distance_metric, type_metric
class XmeansUnitTest(unittest.TestCase):
def testBicClusterAllocationSampleSimple1(self):
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False)
def testBicClusterAllocationSampleSimple1Repeat(self):
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False, repeat=5)
def testBicSampleSimple1WithoutInitialCenters(self):
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, None, [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False)
def testBicSampleSimple1WithoutInitialCentersRepeat(self):
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, None, [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False, repeat=3)
def testBicSampleSimple1MaxLessReal(self):
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5]], None, splitting_type.BAYESIAN_INFORMATION_CRITERION, 1, False)
def testBicSampleSimple1MaxLessRealRepeat(self):
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5]], None, splitting_type.BAYESIAN_INFORMATION_CRITERION, 1, False, repeat=5)
def testBicClusterAllocationSampleSimple1MetricEuclidean(self):
metric = distance_metric(type_metric.EUCLIDEAN)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False, metric=metric)
def testBicClusterAllocationSampleSimple1MetricEuclideanSquare(self):
metric = distance_metric(type_metric.EUCLIDEAN_SQUARE)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False, metric=metric)
def testBicClusterAllocationSampleSimple1MetricManhattan(self):
metric = distance_metric(type_metric.MANHATTAN)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False, metric=metric)
def testBicClusterAllocationSampleSimple1MetricChebyshev(self):
metric = distance_metric(type_metric.CHEBYSHEV)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False, metric=metric)
def testBicClusterAllocationSampleSimple1MetricMinkowski2(self):
metric = distance_metric(type_metric.MINKOWSKI, degree=2)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False, metric=metric)
def testBicClust | erAllocationSampleSimple1MetricMinkowski4(self):
metric = distance_metric(type_metric.MINKOWSKI, degree=4)
| XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False, metric=metric)
def testBicClusterAllocationSampleSimple1MetricCanberra(self):
metric = distance_metric(type_metric.CANBERRA)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False, metric=metric)
def testBicClusterAllocationSampleSimple1MetricChiSquare(self):
metric = distance_metric(type_metric.CHI_SQUARE)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False, metric=metric)
def testBicClusterAllocationSampleSimple1MetricGower(self):
metric = distance_metric(type_metric.GOWER, data=read_sample(SIMPLE_SAMPLES.SAMPLE_SIMPLE1))
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.BAYESIAN_INFORMATION_CRITERION, 20, False, metric=metric)
def testMndlClusterAllocationSampleSimple1MetricEuclidean(self):
metric = distance_metric(type_metric.EUCLIDEAN)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH, 20, False, metric=metric)
def testMndlClusterAllocationSampleSimple1MetricEuclideanSquare(self):
metric = distance_metric(type_metric.EUCLIDEAN_SQUARE)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH, 20, False, metric=metric, alpha=0.1, beta=0.1)
def testMndlClusterAllocationSampleSimple1MetricManhattan(self):
metric = distance_metric(type_metric.MANHATTAN)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH, 20, False, metric=metric)
def testMndlClusterAllocationSampleSimple1MetricChebyshev(self):
metric = distance_metric(type_metric.CHEBYSHEV)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH, 20, False, metric=metric)
def testMndlClusterAllocationSampleSimple1MetricMinkowski2(self):
metric = distance_metric(type_metric.MINKOWSKI, degree=2)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH, 20, False, metric=metric)
def testMndlClusterAllocationSampleSimple1MetricMinkowski4(self):
metric = distance_metric(type_metric.MINKOWSKI, degree=4)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH, 20, False, metric=metric)
def testMndlClusterAllocationSampleSimple1MetricCanberra(self):
metric = distance_metric(type_metric.CANBERRA)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH, 20, False, metric=metric)
def testMndlClusterAllocationSampleSimple1MetricChiSquare(self):
metric = distance_metric(type_metric.CHI_SQUARE)
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH, 20, False, metric=metric, alpha=0.1, beta=0.1, random_state=1000)
def testMndlClusterAllocationSampleSimple1MetricGower(self):
metric = distance_metric(type_metric.GOWER, data=read_sample(SIMPLE_SAMPLES.SAMPLE_SIMPLE1))
XmeansTestTemplates.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [[3.7, 5.5], [6.7, 7.5]], [5, 5], splitting_type.MINIMUM_NOISELESS_DESCRIPTION_LENGTH, 20, False, metric=metric)
def testBicWrongStartClusterAllocationSampleSimple1(self):
XmeansTestTemplates.templateLen |
BillGuard/cabot | cabot/cabotapp/graphite.py | Python | mit | 2,507 | 0.000399 | from django.conf import settings
import requests
import logging
graphite_api = settings.GRAPHITE_API
user = settings.GRAPHITE_USER
password = settings.GRAPHITE_PASS
graphite_from = settings.GRAPHITE_FROM
auth = (user, password)
def get_data(target_pattern):
resp = requests.get(
graphite_api + 'render', auth=auth,
params={
'target': target_pattern,
'format': 'json',
'from': graphite_from
}
)
resp.raise_for_status()
return resp.json
def get_matching_metrics(pattern):
print 'Getting metrics matching %s' % pattern
resp = requests.get(
graphite_api + 'metrics/find/', auth=auth,
params={
'query': pattern,
'format': 'completer'
},
headers={
'accept': 'application/json'
}
)
resp.raise_for_status()
return resp.json
def get_all_metrics(limit=None):
"""Grabs all metrics by navigating find API recursively"""
metrics = []
def get_leafs_of_node(nodepath):
for obj in get_matching_metrics(nodepath)['metrics']:
if int(obj | ['is_leaf']) == 1:
metrics.append(obj['path'])
else:
get_leafs_of_node(obj['path'])
get_leafs_of_node('')
return metrics
def parse_metric(metric, mins_to_check=5):
"""
Returns dict with:
- num_series_with_data: Number of series with data
- num_series_no_data: Number of total series
- max
- min
- average_value
"""
ret = {
'num_series_with_data' | : 0,
'num_series_no_data': 0,
'error': None,
'all_values': [],
'raw': ''
}
try:
data = get_data(metric)
except requests.exceptions.RequestException, e:
ret['error'] = 'Error getting data from Graphite: %s' % e
ret['raw'] = ret['error']
logging.error('Error getting data from Graphite: %s' % e)
return ret
all_values = []
for target in data:
values = [float(t[0])
for t in target['datapoints'][-mins_to_check:] if t[0] is not None]
if values:
ret['num_series_with_data'] += 1
else:
ret['num_series_no_data'] += 1
all_values.extend(values)
if all_values:
ret['max'] = max(all_values)
ret['min'] = min(all_values)
ret['average_value'] = sum(all_values) / len(all_values)
ret['all_values'] = all_values
ret['raw'] = data
return ret
|
rzhxeo/youtube-dl | youtube_dl/extractor/__init__.py | Python | unlicense | 17,033 | 0.000059 | from __future__ import unicode_literals
from .abc import ABCIE
from .abc7news import Abc7NewsIE
from .academicearth import AcademicEarthCourseIE
from .addanime import AddAnimeIE
from .adobetv import AdobeTVIE
from .adultswim import AdultSwimIE
from .aftonbladet import AftonbladetIE
from .aljazeera import AlJazeeraIE
from .alphaporno import AlphaPornoIE
from .anitube import AnitubeIE
from .anysex import AnySexIE
from .aol import AolIE
from .allocine import AllocineIE
from .aparat import AparatIE
from .appletrailers import AppleTrailersIE
from .archiveorg import ArchiveOrgIE
from .ard import ARDIE, ARDMediathekIE
from .arte import (
ArteTvIE,
ArteTVPlus7IE,
ArteTVCreativeIE,
ArteTVConcertIE,
ArteTVFutureIE,
ArteTVDDCIE,
ArteTVEmbedIE,
)
from .atresplayer import AtresPlayerIE
from .atttechchannel import ATTTechChannelIE
from .audiomack import AudiomackIE, AudiomackAlbumIE
from .azubu import AzubuIE
from .bambuser import BambuserIE, BambuserChannelIE
from .bandcamp import BandcampIE, BandcampAlbumIE
from .bbccouk import BBCCoUkIE
from .beeg import BeegIE
from .behindkink import BehindKinkIE
from .bet import BetIE
from .bild import BildIE
from .bilibili import BiliBiliIE
from .blinkx import BlinkxIE
from .bliptv import BlipTVIE, BlipTVUserIE
from .bloomberg import BloombergIE
from .bpb import BpbIE
from .br import BRIE
from .breakcom import BreakIE
from .brightcove import BrightcoveIE
from .buzzfeed import BuzzFeedIE
from .byutv import BYUtvIE
from .c56 import C56IE
from .canal13cl import Canal13clIE
from .canalplus import CanalplusIE
from .canalc2 import Canalc2IE
from .cbs import CBSIE
from .cbsnews import CBSNewsIE
from .ceskatelevize import CeskaTelevizeIE
from .channel9 import Channel9IE
from .chilloutzone import ChilloutzoneIE
from .cinchcast import CinchcastIE
from .clipfish import ClipfishIE
from .cliphunter import CliphunterIE
from .clipsyndicate import ClipsyndicateIE
from .cloudy import CloudyIE
from .clubic import ClubicIE
from .cmt import CMTIE
from .cnet import CNETIE
from .cnn import (
CNNIE,
CNNBlogsIE,
CNNArticleIE,
)
from .collegehumor import CollegeHumorIE
from .collegerama import CollegeRamaIE
from .comedycentral import ComedyCentralIE, ComedyCentralShowsIE
from .comcarcoff import ComCarCoffIE
from .commonmistakes import CommonMistakesIE
from .condenast import CondeNastIE
from .cracked import CrackedIE
from .criterion import CriterionIE
from .crunchyroll import (
CrunchyrollIE,
CrunchyrollShowPlaylistIE
)
from .cspan import CSpanIE
from .dailymotion import (
DailymotionIE,
DailymotionPlaylistIE,
DailymotionUserIE,
)
from .daum import DaumIE
from .dbtv import DBTVIE
from .deezer import DeezerPlaylistIE
from .dfb import DFBIE
from .dotsub import DotsubIE
from .dreisat import DreiSatIE
from .drbonanza import DRBonanzaIE
from .drtuber import DrTuberIE
from .drtv import DRTVIE
from .dvtv import DVTVIE
from .dump import DumpIE
from .defense import DefenseGouvFrIE
from .discovery import DiscoveryIE
from .divxstage import DivxStageIE
from .dropbox import DropboxIE
from .ebaumsworld import EbaumsWorldIE
from .echomsk import EchoMskIE
from .ehow import EHowIE
from .eighttracks import EightTracksIE
from .einthusan import EinthusanIE
from .eitb import EitbIE
from .ellentv import (
EllenTVIE,
EllenTVClipsIE,
)
from .elpais import ElPaisIE
from .empflix import EMPFlixIE
from .engadget import EngadgetIE
from .eporner import EpornerIE
from .eroprofile import EroProfileIE
from .escapist import EscapistIE
from .everyonesmixtape import EveryonesMixtapeIE
from .exfm import ExfmIE
from .expotv import ExpoTVIE
from .extremetube import ExtremeTubeIE
from .facebook import FacebookIE
from .faz import FazIE
from .fc2 import FC2IE
from .firedrive import FiredriveIE
from .firstpost import FirstpostIE
from .firsttv import FirstTVIE
from .fivemin import FiveMinIE
from .fktv import (
FKTVIE,
FKTVPosteckeIE,
)
from | .flickr import FlickrIE
from .folketinget import FolketingetIE
from .fourtube import FourTubeIE
from .foxgay import FoxgayIE
from .foxnews import FoxNewsIE
from .franceculture import FranceCultureIE
from .franceinter import FranceInterIE
from .francetv import (
PluzzIE,
FranceTvInfoIE,
FranceTVIE,
GenerationQuoiIE,
CultureboxIE,
)
from .freesound import FreesoundIE
from .freespeech import FreespeechIE
from . | freevideo import FreeVideoIE
from .funnyordie import FunnyOrDieIE
from .gamekings import GamekingsIE
from .gameone import (
GameOneIE,
GameOnePlaylistIE,
)
from .gamespot import GameSpotIE
from .gamestar import GameStarIE
from .gametrailers import GametrailersIE
from .gdcvault import GDCVaultIE
from .generic import GenericIE
from .giantbomb import GiantBombIE
from .giga import GigaIE
from .glide import GlideIE
from .globo import GloboIE
from .godtube import GodTubeIE
from .goldenmoustache import GoldenMoustacheIE
from .golem import GolemIE
from .googleplus import GooglePlusIE
from .googlesearch import GoogleSearchIE
from .gorillavid import GorillaVidIE
from .goshgay import GoshgayIE
from .grooveshark import GroovesharkIE
from .groupon import GrouponIE
from .hark import HarkIE
from .hearthisat import HearThisAtIE
from .heise import HeiseIE
from .hellporno import HellPornoIE
from .helsinki import HelsinkiIE
from .hentaistigma import HentaiStigmaIE
from .hitbox import HitboxIE, HitboxLiveIE
from .hornbunny import HornBunnyIE
from .hostingbulk import HostingBulkIE
from .hotnewhiphop import HotNewHipHopIE
from .howcast import HowcastIE
from .howstuffworks import HowStuffWorksIE
from .huffpost import HuffPostIE
from .hypem import HypemIE
from .iconosquare import IconosquareIE
from .ign import IGNIE, OneUPIE
from .imdb import (
ImdbIE,
ImdbListIE
)
from .ina import InaIE
from .infoq import InfoQIE
from .instagram import InstagramIE, InstagramUserIE
from .internetvideoarchive import InternetVideoArchiveIE
from .iprima import IPrimaIE
from .ivi import (
IviIE,
IviCompilationIE
)
from .izlesene import IzleseneIE
from .jadorecettepub import JadoreCettePubIE
from .jeuxvideo import JeuxVideoIE
from .jove import JoveIE
from .jukebox import JukeboxIE
from .jpopsukitv import JpopsukiIE
from .kankan import KankanIE
from .karaoketv import KaraoketvIE
from .keezmovies import KeezMoviesIE
from .khanacademy import KhanAcademyIE
from .kickstarter import KickStarterIE
from .keek import KeekIE
from .kontrtube import KontrTubeIE
from .krasview import KrasViewIE
from .ku6 import Ku6IE
from .la7 import LA7IE
from .laola1tv import Laola1TvIE
from .lifenews import LifeNewsIE
from .liveleak import LiveLeakIE
from .livestream import (
LivestreamIE,
LivestreamOriginalIE,
LivestreamShortenerIE,
)
from .lnkgo import LnkGoIE
from .lrt import LRTIE
from .lynda import (
LyndaIE,
LyndaCourseIE
)
from .m6 import M6IE
from .macgamestore import MacGameStoreIE
from .mailru import MailRuIE
from .malemotion import MalemotionIE
from .mdr import MDRIE
from .metacafe import MetacafeIE
from .metacritic import MetacriticIE
from .mgoon import MgoonIE
from .minhateca import MinhatecaIE
from .ministrygrid import MinistryGridIE
from .mit import TechTVMITIE, MITIE, OCWMITIE
from .mitele import MiTeleIE
from .mixcloud import MixcloudIE
from .mlb import MLBIE
from .mpora import MporaIE
from .moevideo import MoeVideoIE
from .mofosex import MofosexIE
from .mojvideo import MojvideoIE
from .moniker import MonikerIE
from .mooshare import MooshareIE
from .morningstar import MorningstarIE
from .motherless import MotherlessIE
from .motorsport import MotorsportIE
from .movieclips import MovieClipsIE
from .moviezine import MoviezineIE
from .movshare import MovShareIE
from .mtv import (
MTVIE,
MTVServicesEmbeddedIE,
MTVIggyIE,
)
from .muenchentv import MuenchenTVIE
from .musicplayon import MusicPlayOnIE
from .musicvault import MusicVaultIE
from .muzu import MuzuTVIE
from .myspace import MySpaceIE, MySpaceAlbumIE
from .myspass import MySpassIE
from .myvideo import MyVideoIE
from .myvidster import MyVidsterIE
from .naver import NaverIE
from .nba import NBAIE
from .nbc import (
NBCIE,
NBCNewsIE,
)
from .ndr import NDRIE
from .ndtv import |
looker-open-source/sdk-codegen | examples/python/cloud-function-user-provision/main.py | Python | mit | 4,360 | 0.016743 | """This Cloud Function leverages Looker Python SDK to manage user provision.
It takes an email address as an input, then checks if this email has been
associated with an existing Looker user. If a current user is found, then an
email to reset the password will be sent. Otherwise, a new user will be created,
and a setup email will be sent.
The `main` function is triggered through an HTTP request. Two example approaches
are provided below:
main(request): take a POST request in form of {"email":"test@test.com"},
and read the email value from the request body
main_gsheet(request): take a GET request and read the email value from a cell
inside an existing Google sheet.
HTTP Cloud Functions: https://cloud.google.com/functions/docs/writing/http#sample_usage"""
# If not using Google Sheet, removing Google modules here and in `requirements.txt`
from googleapiclient.discovery import build
import google.auth
import looker_sdk
sdk = looker_sdk.init40()
# [START main(request)]
def main(request):
"""Take email from JSON body of a POST request, and use the email value
as an input for looker_user_provision() function"""
try:
request_json = request.get_json()
email = request_json["email"]
result = looker_user_provision(email=email)
return result
except:
return 'Please provide JSON in the format of {"email":"test@test.com"}'
# [END main(request)]
# [START main_gsheet(request)]
def main_gsheet(request):
"""Take email from a cell inside an existing Google Sheet"""
try:
email = get_email_from_sheet()
result = looker_user_provision(email=email)
return result
except:
return 'An error occurred.'
def get_email_from_sheet():
""" Authenticate to an existing Google Sheet using the default runtime
service account and extract the email address from a cell inside the sheet.
Refer to Google Sheet API Python Quickstart for details:
https://developers.google.com/sheets/api/quickstart/python
"""
# Get the key of an existing Google Sheet from the URL.
# Example: https://docs.google.com/spreadsheets/d/[KEY HERE]/edit#gid=111
SAMPLE_SPREADSHEET_ID = "foo"
# Google Sheet Range: https://developers.google.com/sheets/api/samples/reading
SAMPLE_RANGE_NAME = "Sheet1!A:A"
creds, _proj_id = google.auth.default()
service = build("sheets", "v4", credentials=creds)
sheet = service.spreadsheets()
result = sheet.values().get(spreadsheetId=SAMPLE_SPREADSHEET_ID,
range=SAMPLE_RANGE_NAME).execute()
# `values` will be a list of lists (i.e.: [['email1'], ['email2']])
# and we can access value 'email' using index
values = result.get('values', | [])
email = values[0][0]
return email
# [END main_gsheet(request)]
# [START looker_user_provision]
def looker_user_provision(email):
user_id = search_users_by_email(email=email)
if user_id is not None:
sdk.send_user_credentials_email_password_reset(user_id=user_id)
return f'A user with this email: {email} already existed; Password reset sent.'
else | :
create_users(email=email)
return f'New user created; Setup/Welcome email sent to {email}.'
def search_users_by_email(email):
"""An email can only be assigned to one user in a Looker instance.
Therefore, search_user(email=test@test.com) will result in either
an empty dictionary, or a dictionary containing one user at index 0"""
users = sdk.search_users(email=email)
if len(users) == 0:
return None
else:
return users[0]["id"]
def create_users(email):
new_user = sdk.create_user(
body=looker_sdk.models40.WriteUser(
credentials_email=looker_sdk.models40.WriteCredentialsEmail(
email=email,
forced_password_reset_at_next_login=False
),
is_disabled=False,
models_dir_validated=False
)
)
# Create email credentials for the new user
sdk.create_user_credentials_email(
user_id=new_user.id,
body=looker_sdk.models40.WriteCredentialsEmail(
email=email,
forced_password_reset_at_next_login=False
))
# Send a welcome/setup email
sdk.send_user_credentials_email_password_reset(user_id=new_user["id"])
# [END looker_user_provision]
|
HopeFOAM/HopeFOAM | ThirdParty-0.1/ParaView-5.0.1/VTK/Rendering/Core/Testing/Python/CamBlur.py | Python | gpl-3.0 | 1,969 | 0.00965 | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtk | Renderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# create the piplinee, ball and spikes
sphere = vtk.vtkSphereSource()
sphere.SetThetaResolution(7)
sphere.SetPhiResolution(7)
sphereMapper = vtk.vtkPolyDataMapper()
sphereMapper.SetInputConnection(sphere.GetOutputPort())
sphereActor = vtk.vtkActor()
sphereActor.SetMapper(sphereMapper)
sphereActor2 = vtk.vtkActor()
sphereActor2.SetMapper(sphere | Mapper)
cone = vtk.vtkConeSource()
cone.SetResolution(5)
glyph = vtk.vtkGlyph3D()
glyph.SetInputConnection(sphere.GetOutputPort())
glyph.SetSourceConnection(cone.GetOutputPort())
glyph.SetVectorModeToUseNormal()
glyph.SetScaleModeToScaleByVector()
glyph.SetScaleFactor(0.25)
spikeMapper = vtk.vtkPolyDataMapper()
spikeMapper.SetInputConnection(glyph.GetOutputPort())
spikeActor = vtk.vtkActor()
spikeActor.SetMapper(spikeMapper)
spikeActor2 = vtk.vtkActor()
spikeActor2.SetMapper(spikeMapper)
# set the actors position and scale
spikeActor.SetPosition(0,0.7,0)
sphereActor.SetPosition(0,0.7,0)
spikeActor2.SetPosition(0,-1,-10)
sphereActor2.SetPosition(0,-1,-10)
spikeActor2.SetScale(1.5,1.5,1.5)
sphereActor2.SetScale(1.5,1.5,1.5)
ren1.AddActor(sphereActor)
ren1.AddActor(spikeActor)
ren1.AddActor(sphereActor2)
ren1.AddActor(spikeActor2)
ren1.SetBackground(0.1,0.2,0.4)
renWin.SetSize(200,200)
# do the first render and then zoom in a little
renWin.Render()
ren1.GetActiveCamera().SetFocalPoint(0,0,0)
ren1.GetActiveCamera().Zoom(1.8)
ren1.GetActiveCamera().SetFocalDisk(0.05)
renWin.SetFDFrames(11)
renWin.Render()
iren.Initialize()
#renWin SetFileName CamBlur.tcl.ppm
#renWin SaveImageAsPPM
# prevent the tk window from showing up then start the event loop
# --- end of script --
|
metamx/Diamond | src/collectors/elasticsearch/elasticsearch.py | Python | mit | 11,323 | 0.001148 | # coding=utf-8
"""
Collect the elasticsearch stats for the local node
#### Dependencies
* urlib2
"""
import urllib2
import re
try:
import json
json # workaround for pyflakes issue #13
except ImportError:
import simplejson as json
import diamond.collector
RE_LOGSTASH_INDEX = re.compile('^(.*)-\d\d\d\d\.\d\d\.\d\d$')
class ElasticSearchCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(ElasticSearchCollector,
self).get_default_config_help()
config_help.update({
'host': "",
'port': "",
'stats': "Available stats: \n"
+ " - jvm (JVM information) \n"
+ " - thread_pool (Thread pool information) \n"
+ " - indices (Individual index stats)\n",
'logstash_mode': "If 'indices' stats are gathered, remove "
+ "the YYYY.MM.DD suffix from the index name "
+ "(e.g. logstash-adm-syslog-2014.01.03) and use that "
+ "as a bucket for all 'day' index stats.",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(ElasticSearchCollector, self).get_default_config()
config.update({
'host': '127.0.0.1',
'port': 9200,
'path': 'elasticsearch',
'stats': ['jvm', 'thread_pool', 'indices'],
'logstash_mode': False,
})
return config
def _get(self, path):
url = 'http://%s:%i/%s' % (
self.config['host'], int(self.config['port']), path)
try:
response = urllib2.urlopen(url)
except Exception, err:
self.log.error("%s: %s", url, err)
return False
try:
return json.load(response)
except (TypeError, ValueError):
self.log.error("Unable to parse response from elasticsearch as a"
+ " json object")
return False
def _copy_one_level(self, metrics, prefix, data, filter=lambda key: True):
for key, value in data.iteritems():
if filter(key):
| metric_path = '%s.%s' % (prefix, key)
| self._set_or_sum_metric(metrics, metric_path, value)
def _copy_two_level(self, metrics, prefix, data, filter=lambda key: True):
for key1, d1 in data.iteritems():
self._copy_one_level(metrics, '%s.%s' % (prefix, key1), d1, filter)
def _index_metrics(self, metrics, prefix, index):
if self.config['logstash_mode']:
"""Remove the YYYY.MM.DD bit from logstash indices.
This way we keep using the same metric naming and not polute
our metrics system (e.g. Graphite) with new metrics every day."""
m = RE_LOGSTASH_INDEX.match(prefix)
if m:
prefix = m.group(1)
# keep a telly of the number of indexes
self._set_or_sum_metric(metrics,
'%s.indexes_in_group' % prefix, 1)
self._add_metric(metrics, '%s.docs.count' % prefix, index,
['docs', 'count'])
self._add_metric(metrics, '%s.docs.deleted' % prefix, index,
['docs', 'deleted'])
self._add_metric(metrics, '%s.datastore.size' % prefix, index,
['store', 'size_in_bytes'])
# publish all 'total' and 'time_in_millis' stats
self._copy_two_level(
metrics, prefix, index,
lambda key: key.endswith('total') or key.endswith('time_in_millis'))
def _add_metric(self, metrics, metric_path, data, data_path):
"""If the path specified by data_path (a list) exists in data,
add to metrics. Use when the data path may not be present"""
current_item = data
for path_element in data_path:
current_item = current_item.get(path_element)
if current_item is None:
return
self._set_or_sum_metric(metrics, metric_path, current_item)
def _set_or_sum_metric(self, metrics, metric_path, value):
"""If we already have a datapoint for this metric, lets add
the value. This is used when the logstash mode is enabled."""
if metric_path in metrics:
metrics[metric_path] += value
else:
metrics[metric_path] = value
def collect(self):
if json is None:
self.log.error('Unable to import json')
return {}
result = self._get('_nodes/_local/stats?all=true')
if not result:
return
metrics = {}
node = result['nodes'].keys()[0]
data = result['nodes'][node]
#
# http connections to ES
metrics['http.current'] = data['http']['current_open']
#
# indices
indices = data['indices']
metrics['indices.docs.count'] = indices['docs']['count']
metrics['indices.docs.deleted'] = indices['docs']['deleted']
metrics['indices.datastore.size'] = indices['store']['size_in_bytes']
transport = data['transport']
metrics['transport.rx.count'] = transport['rx_count']
metrics['transport.rx.size'] = transport['rx_size_in_bytes']
metrics['transport.tx.count'] = transport['tx_count']
metrics['transport.tx.size'] = transport['tx_size_in_bytes']
# elasticsearch < 0.90RC2
if 'cache' in indices:
cache = indices['cache']
self._add_metric(metrics, 'cache.bloom.size', cache,
['bloom_size_in_bytes'])
self._add_metric(metrics, 'cache.field.evictions', cache,
['field_evictions'])
self._add_metric(metrics, 'cache.field.size', cache,
['field_size_in_bytes'])
metrics['cache.filter.count'] = cache['filter_count']
metrics['cache.filter.evictions'] = cache['filter_evictions']
metrics['cache.filter.size'] = cache['filter_size_in_bytes']
self._add_metric(metrics, 'cache.id.size', cache,
['id_cache_size_in_bytes'])
# elasticsearch >= 0.90RC2
if 'filter_cache' in indices:
cache = indices['filter_cache']
metrics['cache.filter.evictions'] = cache['evictions']
metrics['cache.filter.size'] = cache['memory_size_in_bytes']
self._add_metric(metrics, 'cache.filter.count', cache, ['count'])
# elasticsearch >= 0.90RC2
if 'id_cache' in indices:
cache = indices['id_cache']
self._add_metric(metrics, 'cache.id.size', cache,
['memory_size_in_bytes'])
# elasticsearch >= 0.90
if 'fielddata' in indices:
fielddata = indices['fielddata']
self._add_metric(metrics, 'fielddata.size', fielddata,
['memory_size_in_bytes'])
self._add_metric(metrics, 'fielddata.evictions', fielddata,
['evictions'])
#
# process mem/cpu (may not be present, depending on access restrictions)
self._add_metric(metrics, 'process.cpu.percent', data,
['process', 'cpu', 'percent'])
self._add_metric(metrics, 'process.mem.resident', data,
['process', 'mem', 'resident_in_bytes'])
self._add_metric(metrics, 'process.mem.share', data,
['process', 'mem', 'share_in_bytes'])
self._add_metric(metrics, 'process.mem.virtual', data,
['process', 'mem', 'total_virtual_in_bytes'])
#
# filesystem (may not be present, depending on access restrictions)
if 'fs' in data and 'data' in data['fs'] and data['fs']['data']:
fs_data = data['fs']['data'][0]
self._add_metric(metrics, 'disk.reads.count', fs_data,
['disk_reads'])
self._add_metric(m |
Dangetsu/vnr | Frameworks/Sakura/py/libs/lingoes/lingoesparse.py | Python | gpl-3.0 | 6,775 | 0.020245 | # coding: utf8
# lingoesparse.py
# 1/15/2013 jichi
#
# LD2 and LDX
# http://code.google.com/p/lingoes-extractor/source/browse/trunk/src/cn/kk/extractor/lingoes/LingoesLd2Extractor.java
# https://code.google.com/p/dict4cn/source/browse/trunk/importer/src/LingoesLd2Reader.java
# http://devchina.wordpress.com/2012/03/01/lingoes%E7%81%B5%E6%A0%BC%E6%96%AF%E7%94%B5%E5%AD%90%E8%AF%8D%E5%85%B8ld2ldf%E6%96%87%E4%BB%B6%E8%A7%A3%E6%9E%90java/
# http://www.cnblogs.com/SuperBrothers/archive/2012/11/24/2785971.html
#
# StarDict
# https://code.google.com/p/dict4cn/source/browse/trunk/importer/src/LingoesLd2Reader.java
#
# LD2
# - 0:3: string '?LD2'
# - 0x18: short version (2)
# - 0x1c: long id
#
# - 0x5c: int offset - 0x60
__all__ = 'parse', 'parsefile'
if __name__ == '__main__':
import sys
sys.path.append("..")
import zlib
from sakurakit.skdebug import dprint, dwarn
import byteutil
def parsefile(path, *args, **kwargs):
try:
with open(path, 'rb') as f:
return parse(f.read(), *args, **kwargs)
except Exception, e:
dwarn(e)
def parse(data, *args, **kwargs):
"""
@param data string
@param inenc string
@param outenc string
@param* callback function(unicode word, [unicode] xmls)
@return None or [(unicode word, [unicode] xmls]
"""
if data:
try:
filetype = data[:4] # string
assert filetype == '?LD2'
version = byteutil.toshort(data, 0x18)
assert version == 2
#fileId = byteutil.tolong(data, 0x1c) # not used
# Intro
dataOffset = byteutil.toint(data, 0x5c) + 0x60
introType = byteutil.toint(data, dataOffset)
introOffset = byteutil.toint(data, dataOffset + 4) + dataOffset + 12
if introType == 3: # without additional information
return _parsedict(data, dataOffset, *args, **kwargs)
elif len(data) > introOffset - 0x1c:
return _parsedict(data, introOffset, *args, **kwargs) # skip the intro
else:
dwarn("invalid or corrupted file")
except Exception, e:
dwarn(e)
def _parsedict(data, offset, *args, **kwargs): # str, int; may raise out-of-bound exception
# Dictionary type
dictType = byteutil.toint(data, offset) # int
assert dictType == 3
#System.out.println("词典类型:0x" + Integer.toHexString(data.getInt(offset)))
limit = byteutil.toint(data, offset + 4) + offset + 8 # int
offsetIndex = offset + 0x1c
offsetCompressedDataHeader = byteutil.toint(data, offset + 8) + offsetIndex
inflatedWordsIndexLength = byteutil.toint(data, offset + 12)
inflatedWordsLength = byteutil.toint(data, offset + 16)
inflatedXmlLength = byteutil.toint(data, offset + 20)
definitions = (offsetCompressedDataHeader - offsetIndex) / 4 # for debugging only
dprint("number of definitions = %i" % definitions)
#data.position(offsetCompressedDataHeader + 8)
pos = offsetCompressedDataHeader + 8
it = byteutil.toint(data, pos)
pos += 4 # skip the index offset
#deflateStreams = byteutil.iterint32(data, indexOffset, limit) # [int]
deflateS | treams = [] #[ int]
while (it + pos) < limit:
it = byteutil.toint(data, pos)
pos += 4
deflateStreams.append(it)
inflatedBytes = _inflate(data, deflateStreams, pos) # [byte]
if inflatedBytes:
return _ex | tract(inflatedBytes, inflatedWordsIndexLength, inflatedWordsIndexLength + inflatedWordsLength, *args, **kwargs)
#final int offsetCompressedData = data.position()
#System.out.println("索引词组数目:" + definitions)
#System.out.println("索引地址/大小:0x" + Integer.toHexString(offsetIndex) + " / " + (offsetCompressedDataHeader - offsetIndex) + " B")
#System.out.println("压缩数据地址/大小:0x" + Integer.toHexString(offsetCompressedData) + " / " + (limit - offsetCompressedData) + " B")
#System.out.println("词组索引地址/大小(解压缩后):0x0 / " + inflatedWordsIndexLength + " B")
#System.out.println("词组地址/大小(解压缩后):0x" + Integer.toHexString(inflatedWordsIndexLength) + " / " + inflatedWordsLength + " B")
#System.out.println("XML地址/大小(解压缩后):0x" + Integer.toHexString(inflatedWordsIndexLength + inflatedWordsLength) + " / " + inflatedXmlLength + " B")
#System.out.println("文件大小(解压缩后):" + ((inflatedWordsIndexLength + inflatedWordsLength + inflatedXmlLength) / 1024) + " KB")
def _inflate(data, deflateStreams, dataOffset): # str, [int], int -> str; may raise
offset = -1
lastOffset = dataOffset
ret = ''
for relativeOffset in deflateStreams:
offset = dataOffset + relativeOffset
ret += zlib.decompress(data[lastOffset:offset]) # Could raise gzip error
lastOffset = offset
return ret
def _extract(inflatedBytes, offsetDefs, offsetXml, *args, **kwargs): # str, int, int
dataLen = 10
defTotal = (offsetDefs / dataLen) - 1
callback = kwargs.get('callback') # function
pos = 8
failCounter = 0
ret = []
for i in xrange(defTotal):
t = _parseentry(inflatedBytes, offsetDefs, offsetXml, dataLen, i, *args, **kwargs)
if t:
if callback:
callback(*t)
else:
ret.append(t)
return ret
def _getIdxData(dataRawBytes, position): # str, int -> [int]
return [
byteutil.toint(dataRawBytes, position),
byteutil.toint(dataRawBytes, position + 4),
byteutil.tobyte(dataRawBytes, position + 8),
byteutil.tobyte(dataRawBytes, position + 9),
byteutil.toint(dataRawBytes, position + 10),
byteutil.toint(dataRawBytes, position + 14),
]
def _parseentry(inflatedBytes, offsetWords, offsetXml, dataLen, idx, inenc, outenc, **kwargs): # str, int, int, int, int; may raise encoding error
wordIdxData = _getIdxData(inflatedBytes, dataLen * idx)
lastWordPos = wordIdxData[0]
lastXmlPos = wordIdxData[1]
refs = wordIdxData[3]
currentWordOffset = wordIdxData[4]
currentXmlOffset = wordIdxData[5]
#print currentXmlOffset - lastXmlPos > offsetXml + lastXmlPos
xmls = []
if currentXmlOffset > lastXmlPos:
t = inflatedBytes[offsetXml + lastXmlPos : offsetXml+ currentXmlOffset]
t = t.decode(outenc)
xmls.append(t)
for i in range(refs): # use range since refs is usually within 2
ref = byteutil.toint(inflatedBytes, offsetWords + lastWordPos)
wordIdxData = _getIdxData(inflatedBytes, dataLen * ref)
lastXmlPos = wordIdxData[1]
currentXmlOffset = wordIdxData[5]
if currentXmlOffset > lastXmlPos:
t = inflatedBytes[offsetXml + lastXmlPos: currentXmlOffset + offsetXml]
t = t.decode(outenc)
xmls.append(t)
lastWordPos += 4
if xmls:
#word = inflatedBytes[offsetWords + lastWordPos + 4: currentWordOffset + offsetWords]
word = inflatedBytes[offsetWords + lastWordPos: currentWordOffset + offsetWords]
word = word.decode(inenc)
return word, xmls
# EOF
|
covrom/django_sample | mysite/mysite/settings.py | Python | mit | 3,237 | 0.002162 | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.10.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '6b978%jjtj196kc$7c95g=7zdviq27i4%c7w8m914_%8qp9hak'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
#'material',
#'material.frontend',
#'material.admin',
'polls.apps.PollsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref | /settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
| 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'ru-ru'
TIME_ZONE = 'Europe/Moscow'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
|
s0lst1c3/eaphammer | local/hostapd-eaphammer/tests/hwsim/test_wpas_ctrl.py | Python | gpl-3.0 | 96,304 | 0.00216 | # wpa_supplicant control interface
# Copyright (c) 2014, Qualcomm Atheros, Inc.
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
from remotehost import remote_compatible
import logging
logger = logging.getLogger()
import os
import socket
import subprocess
import time
import binascii
import hostapd
import hwsim_utils
from hwsim import HWSimRadio
from wpasupplicant import WpaSupplicant
from utils import alloc_fail, fail_test
from test_wpas_ap import wait_ap_ready
@remote_compatible
def test_wpas_ctrl_network(dev):
"""wpa_supplicant ctrl_iface network set/get"""
id = dev[0].add_network()
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id)):
raise Exception("Unexpected success for invalid SET_NETWORK")
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id) + " name"):
raise Exception("Unexpected success for invalid SET_NETWORK")
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id + 1) + " proto OPEN"):
raise Exception("Unexpected success for invalid network id")
if "FAIL" not in dev[0].request("GET_NETWORK " + str(id)):
raise Exception("Unexpected success for invalid GET_NETWORK")
if "FAIL" not in dev[0].request("GET_NETWORK " + str(id + 1) + " proto"):
raise Exception("Unexpected success for invalid network id")
if "OK" not in dev[0].request("SET_NETWORK " + str(id) + " proto \t WPA2 "):
raise Exception("Unexpected failure for SET_NETWORK proto")
res = dev[0].request("GET_NETWORK " + str(id) + " proto")
if res != "RSN":
raise Exception("Unexpected SET_NETWORK/GET_NETWORK conversion for proto: " + res)
if "OK" not in dev[0].request("SET_NETWORK " + str(id) + " key_mgmt \t WPA-PSK "):
raise Exception("Unexpected success for SET_NETWORK key_mgmt")
res = dev[0].request("GET_NETWORK " + str(id) + " key_mgmt")
if res != "WPA-PSK":
raise Exception("Unexpected SET_NETWORK/GET_NETWORK conversion for key_mgmt: " + res)
if "OK" not in dev[0].request("SET_NETWORK " + str(id) + " auth_alg \t OPEN "):
raise Exception("Unexpected failure for SET_NETWORK auth_alg")
res = dev[0].request("GET_NETWORK " + str(id) + " auth_alg")
if res != "OPEN":
raise Exception("Unexpected SET_NETWORK/GET_NETWORK conversion for auth_alg: " + res)
if "OK" not in dev[0].request("SET_NETWORK " + str(id) + " eap \t TLS "):
raise Exception("Unexpected failure for SET_NETWORK eap")
res = dev[0].request("GET_NETWORK " + str(id) + " eap")
if res != "TLS":
raise Exception("Unexpected SET_NETWORK/GET_NETWORK conversion for eap: " + res)
tests = ("bssid foo", "key_mgmt foo", "key_mgmt ", "group NONE")
for t in tests:
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id) + " " + t):
raise Exception("Unexpected success for invalid SET_NETWORK: " + t)
tests = [("key_mgmt", "WPA-PSK WPA-EAP IEEE8021X NONE WPA-NONE FT-PSK FT-EAP WPA-PSK-SHA256 WPA-EAP-SHA256"),
("pairwise", "CCMP-256 GCMP-256 CCMP GCMP TKIP"),
("group", "CCMP-256 GCMP-256 CCMP GCMP TKIP"),
("auth_alg", "OPEN SHARED LEAP"),
("scan_freq", "1 2 3 4 5 6 7 8 9 10 11 12 13 14 15"),
("freq_list", "2412 2417"),
| ("scan_ssid", "1"),
("bssid", "00:11:22:33:44:55"),
("proto", "WPA RSN OSEN"),
("eap", "TLS"),
("go_p2p_dev_addr", "22:33:44:55:66:aa"),
("p2p_client_list", "22:33:44:55:66:bb 02:11:22:33:44:55")]
if "SAE" not in dev[0].get_capabil | ity("auth_alg"):
tests.append(("key_mgmt", "WPS OSEN"))
else:
tests.append(("key_mgmt", "WPS SAE FT-SAE OSEN"))
dev[0].set_network_quoted(id, "ssid", "test")
for field, value in tests:
dev[0].set_network(id, field, value)
res = dev[0].get_network(id, field)
if res != value:
raise Exception("Unexpected response for '" + field + "': '" + res + "'")
try:
value = "WPA-EAP-SUITE-B WPA-EAP-SUITE-B-192"
dev[0].set_network(id, "key_mgmt", value)
res = dev[0].get_network(id, "key_mgmt")
if res != value:
raise Exception("Unexpected response for key_mgmt")
except Exception as e:
if str(e).startswith("Unexpected"):
raise
else:
pass
q_tests = (("identity", "hello"),
("anonymous_identity", "foo@nowhere.com"))
for field, value in q_tests:
dev[0].set_network_quoted(id, field, value)
res = dev[0].get_network(id, field)
if res != '"' + value + '"':
raise Exception("Unexpected quoted response for '" + field + "': '" + res + "'")
get_tests = (("foo", None), ("ssid", '"test"'))
for field, value in get_tests:
res = dev[0].get_network(id, field)
if res != value:
raise Exception("Unexpected response for '" + field + "': '" + res + "'")
if dev[0].get_network(id, "password"):
raise Exception("Unexpected response for 'password'")
dev[0].set_network_quoted(id, "password", "foo")
if dev[0].get_network(id, "password") != '*':
raise Exception("Unexpected response for 'password' (expected *)")
dev[0].set_network(id, "password", "hash:12345678901234567890123456789012")
if dev[0].get_network(id, "password") != '*':
raise Exception("Unexpected response for 'password' (expected *)")
dev[0].set_network(id, "password", "NULL")
if dev[0].get_network(id, "password"):
raise Exception("Unexpected response for 'password'")
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id) + " password hash:12"):
raise Exception("Unexpected success for invalid password hash")
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id) + " password hash:123456789012345678x0123456789012"):
raise Exception("Unexpected success for invalid password hash")
dev[0].set_network(id, "identity", "414243")
if dev[0].get_network(id, "identity") != '"ABC"':
raise Exception("Unexpected identity hex->text response")
dev[0].set_network(id, "identity", 'P"abc\ndef"')
if dev[0].get_network(id, "identity") != "6162630a646566":
raise Exception("Unexpected identity printf->hex response")
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id) + ' identity P"foo'):
raise Exception("Unexpected success for invalid identity string")
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id) + ' identity 12x3'):
raise Exception("Unexpected success for invalid identity string")
for i in range(0, 4):
if "FAIL" in dev[0].request("SET_NETWORK " + str(id) + ' wep_key' + str(i) + ' aabbccddee'):
raise Exception("Unexpected wep_key set failure")
if dev[0].get_network(id, "wep_key" + str(i)) != '*':
raise Exception("Unexpected wep_key get failure")
if "FAIL" in dev[0].request("SET_NETWORK " + str(id) + ' psk_list P2P-00:11:22:33:44:55-0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'):
raise Exception("Unexpected failure for psk_list string")
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id) + ' psk_list 00:11:x2:33:44:55-0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'):
raise Exception("Unexpected success for invalid psk_list string")
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id) + ' psk_list P2P-00:11:x2:33:44:55-0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'):
raise Exception("Unexpected success for invalid psk_list string")
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id) + ' psk_list P2P-00:11:22:33:44:55+0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef'):
raise Exception("Unexpected success for invalid psk_list string")
if "FAIL" not in dev[0].request("SET_NETWORK " + str(id) + ' psk_list P2P-00:11:22:33:44:55-0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcde'):
raise Exception("Unexpected success for invalid psk_list string")
|
great-expectations/great_expectations | great_expectations/datasource/simple_sqlalchemy_datasource.py | Python | apache-2.0 | 3,666 | 0.002182 | import copy
import logging
from great_expectations.datasource.data_connector.configured_asset_sql_data_connector import (
ConfiguredAssetSqlDataConnector,
)
from great_expectations.datasource.new_datasource import BaseDatasource
logger = logging.getLogger(__name__)
class SimpleSqlalchemyDatasource(BaseDatasource):
"""A specialized Datasource for SQL backends
SimpleSqlalchemyDatasource is designed to minimize boilerplate configuration and new concepts
"""
def __init__(
self,
name: str,
connection_string: str = None,
url: str = None,
credentials: dict = None,
engine=None, # sqlalchemy.engine.Engine
introspe | ction: dict = None,
tables: dict = None,
**kwargs
):
introspection = introspection or {}
tables = tables or {}
self._execution_engine_config | = {
"class_name": "SqlAlchemyExecutionEngine",
"connection_string": connection_string,
"url": url,
"credentials": credentials,
"engine": engine,
}
self._execution_engine_config.update(**kwargs)
super().__init__(name=name, execution_engine=self._execution_engine_config)
self._data_connectors = {}
self._init_data_connectors(
introspection_configs=introspection,
table_configs=tables,
)
# NOTE: Abe 20201111 : This is incorrect. Will need to be fixed when we reconcile all the configs.
self._datasource_config = {}
# noinspection PyMethodOverriding
# Note: This method is meant to overwrite Datasource._init_data_connectors (despite signature mismatch).
def _init_data_connectors(
self,
introspection_configs: dict,
table_configs: dict,
):
# First, build DataConnectors for introspected assets
for name, config in introspection_configs.items():
data_connector_config: dict = dict(
**{
"class_name": "InferredAssetSqlDataConnector",
"name": name,
},
**config
)
self._build_data_connector_from_config(
name,
data_connector_config,
)
# Second, build DataConnectors for tables. They will map to configured assets
for table_name, table_config in table_configs.items():
for partitioner_name, partitioner_config in table_config[
"partitioners"
].items():
data_connector_name: str = partitioner_name
if data_connector_name not in self.data_connectors:
data_connector_config: dict = {
"class_name": "ConfiguredAssetSqlDataConnector",
"assets": {},
}
self._build_data_connector_from_config(
data_connector_name, data_connector_config
)
data_connector: ConfiguredAssetSqlDataConnector = self.data_connectors[
data_connector_name
]
data_asset_config: dict = copy.deepcopy(partitioner_config)
data_asset_config["table_name"] = table_name
data_asset_name: str = table_name
# If config contains any prefix, suffix or schema_name values,
# they will be handled at the ConfiguredAssetSqlDataConnector-level
data_connector.add_data_asset(
data_asset_name,
data_asset_config,
)
|
leppa/home-assistant | homeassistant/components/hp_ilo/sensor.py | Python | apache-2.0 | 6,330 | 0.000316 | """Support for information from HP iLO sensors."""
from datetime import timedelta
import logging
import hpilo
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST,
CONF_MONITORED_VARIABLES,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SENSOR_TYPE,
CONF_UNIT_OF_MEASUREMENT,
CONF_USERNAME,
CONF_VALUE_TEMPLATE,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "HP ILO"
DEFAULT_PORT = 443
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=300)
SENSOR_TYPES = {
"server_name": ["Server Name", "get_server_name"],
"server_fqdn | ": ["Server FQDN", "get_server_fqdn"],
"server_host_data": ["Server Host Data", "get_host_data"],
"server_oa_info": ["Server Onboard Administrator Info", "get_oa_info"],
"server_power_status": ["Server Power state", "get_host_power_status"],
"server_power_readings": ["Server Power readings", "get_power_readings"],
"server_power_on_time": ["Server Power On time", "get_server_power_on_time"],
"server_asset_tag": ["Server Asset Tag", "get_asset | _tag"],
"server_uid_status": ["Server UID light", "get_uid_status"],
"server_health": ["Server Health", "get_embedded_health"],
"network_settings": ["Network Settings", "get_network_settings"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_MONITORED_VARIABLES, default=[]): vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_SENSOR_TYPE): vol.All(
cv.string, vol.In(SENSOR_TYPES)
),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
}
)
],
),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the HP iLO sensors."""
hostname = config.get(CONF_HOST)
port = config.get(CONF_PORT)
login = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
monitored_variables = config.get(CONF_MONITORED_VARIABLES)
# Create a data fetcher to support all of the configured sensors. Then make
# the first call to init the data and confirm we can connect.
try:
hp_ilo_data = HpIloData(hostname, port, login, password)
except ValueError as error:
_LOGGER.error(error)
return
# Initialize and add all of the sensors.
devices = []
for monitored_variable in monitored_variables:
new_device = HpIloSensor(
hass=hass,
hp_ilo_data=hp_ilo_data,
sensor_name="{} {}".format(
config.get(CONF_NAME), monitored_variable[CONF_NAME]
),
sensor_type=monitored_variable[CONF_SENSOR_TYPE],
sensor_value_template=monitored_variable.get(CONF_VALUE_TEMPLATE),
unit_of_measurement=monitored_variable.get(CONF_UNIT_OF_MEASUREMENT),
)
devices.append(new_device)
add_entities(devices, True)
class HpIloSensor(Entity):
"""Representation of a HP iLO sensor."""
def __init__(
self,
hass,
hp_ilo_data,
sensor_type,
sensor_name,
sensor_value_template,
unit_of_measurement,
):
"""Initialize the HP iLO sensor."""
self._hass = hass
self._name = sensor_name
self._unit_of_measurement = unit_of_measurement
self._ilo_function = SENSOR_TYPES[sensor_type][1]
self.hp_ilo_data = hp_ilo_data
if sensor_value_template is not None:
sensor_value_template.hass = hass
self._sensor_value_template = sensor_value_template
self._state = None
self._state_attributes = None
_LOGGER.debug("Created HP iLO sensor %r", self)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement of the sensor."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the device state attributes."""
return self._state_attributes
def update(self):
"""Get the latest data from HP iLO and updates the states."""
# Call the API for new data. Each sensor will re-trigger this
# same exact call, but that's fine. Results should be cached for
# a short period of time to prevent hitting API limits.
self.hp_ilo_data.update()
ilo_data = getattr(self.hp_ilo_data.data, self._ilo_function)()
if self._sensor_value_template is not None:
ilo_data = self._sensor_value_template.render(ilo_data=ilo_data)
self._state = ilo_data
class HpIloData:
"""Gets the latest data from HP iLO."""
def __init__(self, host, port, login, password):
"""Initialize the data object."""
self._host = host
self._port = port
self._login = login
self._password = password
self.data = None
self.update()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from HP iLO."""
try:
self.data = hpilo.Ilo(
hostname=self._host,
login=self._login,
password=self._password,
port=self._port,
)
except (
hpilo.IloError,
hpilo.IloCommunicationError,
hpilo.IloLoginFailed,
) as error:
raise ValueError(f"Unable to init HP ILO, {error}")
|
Tooblippe/pandapower_gui | resources/ui/builder.py | Python | bsd-3-clause | 35,136 | 0.00296 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'builder.ui'
#
# Created: Mon May 22 10:30:45 2017
# by: pyside-uic 0.2.15 running on PySide 1.2.4
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_pandapower(object):
def setupUi(self, pandapower):
pandapower.setObjectName("pandapower")
pandapower.resize(1023, 702)
self.centralwidget = QtGui.QWidget(pandapower)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayoutWidget = QtGui.QWidget(self.centralwidget)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(0, 0, 1011, 661))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.tabWidget = QtGui.QTabWidget(self.verticalLayoutWidget)
self.tabWidget.setObjectName("tabWidget")
self.main = QtGui.QWidget()
self.main.setObjectName("main")
self.main_save = QtGui.QPushButton(self.main)
self.main_save.setGeometry(QtCore.QRect(10, 90, 91, 31))
self.main_save.setObjectName("main_save")
self.main_losses = QtGui.QPushButton(self.main)
self.main_losses.setGeometry(QtCore.QRect(10, 170, 91, 31))
self.main_losses.setObjectName("main_losses")
self.main_load = QtGui.QPushButton(self.main)
self.main_load.setGeometry(QtCore.QRect(10, 50, 91, 31))
self.main_load.setObjectName("main_load")
self.main_solve = QtGui.QPushButton(self.main)
self.main_solve.setGeometry(QtCore.QRect(10, 130, 91, 31))
self.main_solve.setObjectName("main_solve")
self.main_message = QtGui.QTextBrowser(self.main)
self.main_message.setGeometry(QtCore.QRect(110, 10, 861, 581))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 85, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 170, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(63, 127, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 42, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 56, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 170, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 170, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(63, 127, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 42, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 56, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shado | w, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 170, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
bru | sh = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 42, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 170, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(63, 127, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disab |
astroML/astroML | astroML/datasets/moving_objects.py | Python | bsd-2-clause | 4,948 | 0 | import os
from gzip import GzipFile
from io import BytesIO
import numpy as np
from .tools import download_with_progress_bar
from . import get_data_home
DATA_URL = ('https://github.com/astroML/astroML-data/raw/main/datasets/'
'ADR3.dat.gz')
ARCHIVE_FILE = 'moving_objects.npy'
ADR4_dtype = [('moID', 'a6'),
('sdss_run', 'i4'),
('sdss_col', 'i4'),
('sdss_field', 'i4'),
('sdss_obj', 'i4'),
('rowc', 'f4'),
('colc', 'f4'),
('mjd', 'f8'),
('ra', 'f8'),
('dec', 'f8'),
('lambda', 'f8'),
('beta', 'f8'),
('phi', 'f8'),
('vmu', 'f4'),
('vmu_err', 'f4'),
('vnu', 'f4'),
('vnu_err', 'f4'),
('vlambda', 'f4'),
('vbeta', 'f4'),
('mag_u', 'f4'),
('err_u', 'f4'),
('mag_g', 'f4'),
('err_g', 'f4'),
('mag_r', 'f4'),
('err_r', 'f4'),
('mag_i', 'f4'),
('err_i', 'f4'),
('mag_z', 'f4'),
('err_z', 'f4'),
('mag_a', 'f4'),
('err_a', 'f4'),
('mag_V', 'f4'),
('mag_B', 'f4'),
('ast_flag', 'i4'),
('ast_num', 'i8'),
('ast_designation', 'a17'),
('ast_det_count', 'i4'),
('ast_det_total', 'i4'),
('ast_flags', 'i8'),
('ra_comp', 'f8'),
('dec_comp', 'f8'),
('mag_comp', 'f4'),
('r_helio', 'f4'),
('r_geo', 'f4'),
('phase', 'f4'),
('cat_id', 'a15'),
('H', 'f4'),
('G', 'f4'),
('Arc', 'f4'),
('Epoch', 'f8'),
('a', 'f8'),
('e', 'f8'),
('i', 'f8'),
('asc_node', 'f8'),
('arg_peri', 'f8'),
('M', 'f8'),
('PEcat_id', 'a17'),
('aprime', 'f8'),
('eprime', 'f8'),
('sin_iprime', 'f8')]
def fetch_moving_objects(data_home=None, download_if_missing=True,
Parker2008_cuts=False):
"""Loader for SDSS moving objects datasets
Parameters
----------
data_home : optional, default=None
Specify another download and cache folder for the datasets. By default
all astroML data is stored in '~/astroML_data'.
download_if_missing : optional, default=True
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
Parker2008_cuts : bool (optional)
If true, apply cuts on magnitudes and orbital parameters u | sed in
Parker et al. 2008
Returns
-------
data : recarray, shape = (??,)
record array containing 60 values for each item
Notes
-----
See http://www.astro.washington.edu/users/ivezic/sdssmoc/sdssmoc3.html
Columns 0, 3 | 5, 45, and 56 are left out of the fetch: they are string
parameters. Only columns with known orbital parameters are saved.
Examples
--------
>>> from astroML.datasets import fetch_moving_objects
>>> data = fetch_moving_objects() # doctest: +IGNORE_OUTPUT +REMOTE_DATA
>>> # number of objects
>>> print(len(data)) # doctest: +REMOTE_DATA
43424
>>> # first five u-g colors of the dataset
>>> u_g = data['mag_u'] - data['mag_g'] # doctest: +REMOTE_DATA
>>> print(u_g[:5]) # doctest: +REMOTE_DATA
[1.4899998 1.7800007 1.6500015 2.0100002 1.8199997]
"""
data_home = get_data_home(data_home)
archive_file = os.path.join(data_home, ARCHIVE_FILE)
if not os.path.exists(archive_file):
if not download_if_missing:
raise IOError('data not present on disk. '
'set download_if_missing=True to download')
print("downloading moving object catalog from %s to %s"
% (DATA_URL, data_home))
zipped_buf = download_with_progress_bar(DATA_URL, return_buffer=True)
gzf = GzipFile(fileobj=zipped_buf, mode='rb')
print("uncompressing file...")
extracted_buf = BytesIO(gzf.read())
data = np.loadtxt(extracted_buf, dtype=ADR4_dtype)
# Select unique sources with known orbital elements
flag = (data['ast_flag'] == 1) & (data['ast_det_count'] == 1)
data = data[flag]
np.save(archive_file, data)
else:
data = np.load(archive_file)
if Parker2008_cuts:
i_z = data['mag_i'] - data['mag_z']
flag = ((data['aprime'] >= 0.01) & (data['aprime'] <= 100) &
(data['mag_a'] <= 0.4) & (data['mag_a'] >= -0.3) &
(i_z <= 0.6) & (i_z >= -0.8))
data = data[flag]
return data
|
diegoguimaraes/django | tests/utils_tests/test_dateparse.py | Python | bsd-3-clause | 2,232 | 0.003584 | from __future__ import unicode_literals
from datetime import date, time, datetime
import unittest
from django.utils.dateparse import parse_date, parse_time, parse_datetime
from django.utils.timezone import get_fixed_timezone
class DateParseTests(unittest.TestCase):
def test_parse_date(self):
# Valid inputs
self.assertEqual(parse_date('2012-04-23'), date(2012, 4, 23))
self.assertEqual(parse_date('2012-4-9'), date(2012, 4, 9))
# Invalid inputs
self.assertEqual(parse_date('20120423'), None)
self.assertRaises(ValueError, parse_date, '2012-04-56')
def test_parse_time(self):
# Valid inputs
self.assertEqual(parse_time('09:15:00'), time(9, 15))
self.assertEqual(parse_time('10:10'), time(10, 10))
self.assertEqual(parse_time('10:20:30.400'), time(10, 20, 30, 400000))
self.assertEqual(parse_time('4:8:16'), time(4, 8, 16))
# Invalid inputs
self.assertEqual(parse_time('091500'), None)
self.assertRaises(ValueError, parse_time, '09:15:90')
def test_parse_datetime(self):
# Valid inputs
self.assertEqual(parse_datetime('2012-04-23T09:15:00'),
datetime(2012, 4, 23, 9, 15))
self.assertEqual(parse_datetime('2012-4-9 4:8:16'),
datetime(2012, 4, 9, 4, 8, 16))
self.assertEqual(parse_datetime('2012-04-23T09:15:00Z'),
datetime(2012, 4, 23, 9, 15, 0, 0, get_fixed_timezone(0)))
self.assertEqual(parse_datetime('2012-4-9 4:8:16-0320'),
datetime(2012, 4, 9, 4, 8, 16, 0, get_fixed_timezone(-200)))
self.assertEqual(parse_datetime('2012-04-23T10:20:30.400+02:30'),
datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(150)))
self.assertEqual(parse_datetime('2012-04-23T10:20:30.400+02'),
datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(120)))
self.assertEqual(parse_datetime('2012-04-23T10:20:30.400-02'),
datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(-120)))
# Invalid inputs
self.assertEqu | al(parse_datetime('20120423091500'), None) |
self.assertRaises(ValueError, parse_datetime, '2012-04-56T09:15:90')
|
carojasq/Evaluaciones-bases-de-datos-2 | models/administrador.py | Python | mit | 3,310 | 0.038671 | from config import Config
from models.usuario import Usuario
class Administrador:
tabla = "administradores"
def __init__(self, identificador):
self.id = identificador
self.privilegios = Usuario.getTipo(self.id)
'''
#actualizar (se puede actualizar un administrador actualizando como usuario)
def save(self):
query = "UPDATE %s SET nombre_completo='%s', usuario='%s', contrasena='%s', e_mail='%s', d | r_ciudad_id =%s WHERE id=%s" % (Usuario.tabla, self.nombre_completo, self.usuario, self.contrasena,self.e_mail,self.dr_ciudad_id,self.id)
cursor = Config.getCursor()
try:
cursor.execute(query)
except Exception, e:
print e
print "No es posible actualizar el registro"
return None
return self
'''
#insertar (anade la clave del primaria usuario como clave foranea en administradores)
@staticmethod
def create(identificador): #del usuario
if Usuario.getById(identi | ficador) == None :
print "El usuario no existe, no se puede crear el administrador"
return None;
else:
query = " INSERT INTO %s (id) VALUES (%s) RETURNING id " % (Administrador.tabla, str(int(identificador)))
cursor = Config.getCursor()
try:
cursor.execute(query)
except Exception, e:
print e
print "No es posible guardar objeto"
id = cursor.fetchone()
return Administrador(id[0])
#consultar por id
@staticmethod
def getById(id):
cursor = Config.getCursor()
query = " SELECT * FROM %s JOIN %s ON %s.id = %s.id WHERE %s.id=%d" % (Administrador.tabla,Usuario.tabla,Administrador.tabla,Usuario.tabla,Administrador.tabla,id)
try:
cursor.execute(query)
row = cursor.fetchone()
except Exception, e:
print e
print "No es posible ejecutar query o no hay resultados validos"
return None
if row == None: #si no se encuentra ningun registro
return None
return Usuario(row['id'], row['nombre_completo'], row['usuario'], row['contrasena'], row['e_mail'],row['direccion_residencia'], row['dr_ciudad_id'])
#consultar todos
@staticmethod
def getAll():
cursor = Config.getCursor()
query = "SELECT * FROM %s JOIN %s ON %s.id = %s.id" % (Administrador.tabla,Usuario.tabla,Usuario.tabla, Administrador.tabla)
try:
cursor.execute(query)
except Exception, e:
print e
print "No es posible ejecutar query o no hay resultados validos"
return []
usuarios = []
rows = cursor.fetchall()
for row in rows:
usuarios.append(Usuario(row['id'], row['nombre_completo'], row['usuario'], row['contrasena'], row['e_mail'],row['direccion_residencia'], row['dr_ciudad_id']))
return usuarios
#consulta para nombre o e_email o nombre_completo (criterio de busqueda, texto de busqueda)
@staticmethod
def getAllWith(criterio,entrada):
cursor = Config.getCursor()
query = "SELECT * FROM %s JOIN %s ON %s.id = %s.id WHERE %s.%s='%s'" % (Administrador.tabla,Usuario.tabla,Administrador.tabla,Usuario.tabla,Usuario.tabla,criterio,entrada)
try:
cursor.execute(query)
except Exception, e:
print e
print "No es posible ejecutar query o no hay resultados validos"
return []
usuarios = []
rows = cursor.fetchall()
for row in rows:
usuarios.append(Usuario(row['id'], row['nombre_completo'], row['usuario'], row['contrasena'], row['e_mail'],row['direccion_residencia'], row['dr_ciudad_id']))
return usuarios
|
v-iam/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_03_01/models/subnet_association.py | Python | mit | 1,260 | 0 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Micr | osoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class SubnetAssociation(Model):
"""Network interface a | nd its custom security rules.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Subnet ID.
:vartype id: str
:param security_rules: Collection of custom security rules.
:type security_rules: list of :class:`SecurityRule
<azure.mgmt.network.v2017_03_01.models.SecurityRule>`
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'security_rules': {'key': 'securityRules', 'type': '[SecurityRule]'},
}
def __init__(self, security_rules=None):
self.id = None
self.security_rules = security_rules
|
SamuelTM/univapi | stm/univapi/modelos/boleto.py | Python | mit | 357 | 0.002801 | class Boleto:
def __init__ | (self, ano_mes, vencimento, mensalidade, dependencia, desconto, liquido, situacao):
self.ano_mes = ano_mes
self.vencimento = vencimento
self.mensalidade = | mensalidade
self.dependencia = dependencia
self.desconto = desconto
self.liquido = liquido
self.situacao = situacao
|
wrouesnel/ansible | lib/ansible/modules/cloud/ovirt/ovirt_hosts.py | Python | gpl-3.0 | 22,692 | 0.001719 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_hosts
short_description: Module to manage hosts in oVirt/RHV
version_added: "2.3"
author: "Ondra Machacek (@machacekondra)"
description:
- "Module to manage hosts in oVirt/RHV"
options:
name:
description:
- "Name of the host to manage."
required: true
state:
description:
- "State which should a host to be in after successful completion."
- "I(iscsilogin) and I(iscsidiscover) are supported since version 2.4."
choices: [
'pres | ent', 'absent', 'maintenance', 'upgraded', 'started',
'restarted', 'stopped', 'reinstalled', 'iscsidiscover', 'iscsilogin'
| ]
default: present
comment:
description:
- "Description of the host."
cluster:
description:
- "Name of the cluster, where host should be created."
address:
description:
- "Host address. It can be either FQDN (preferred) or IP address."
password:
description:
- "Password of the root. It's required in case C(public_key) is set to I(False)."
public_key:
description:
- "I(True) if the public key should be used to authenticate to host."
- "It's required in case C(password) is not set."
default: False
aliases: ['ssh_public_key']
kdump_integration:
description:
- "Specify if host will have enabled Kdump integration."
choices: ['enabled', 'disabled']
default: enabled
spm_priority:
description:
- "SPM priority of the host. Integer value from 1 to 10, where higher number means higher priority."
override_iptables:
description:
- "If True host iptables will be overridden by host deploy script."
- "Note that C(override_iptables) is I(false) by default in oVirt/RHV."
force:
description:
- "If True host will be forcibly moved to desired state."
default: False
override_display:
description:
- "Override the display address of all VMs on this host with specified address."
kernel_params:
description:
- "List of kernel boot parameters."
- "Following are most common kernel parameters used for host:"
- "Hostdev Passthrough & SR-IOV: intel_iommu=on"
- "Nested Virtualization: kvm-intel.nested=1"
- "Unsafe Interrupts: vfio_iommu_type1.allow_unsafe_interrupts=1"
- "PCI Reallocation: pci=realloc"
- "C(Note:)"
- "Modifying kernel boot parameters settings can lead to a host boot failure.
Please consult the product documentation before doing any changes."
- "Kernel boot parameters changes require host deploy and restart. The host needs
to be I(reinstalled) suceesfully and then to be I(rebooted) for kernel boot parameters
to be applied."
hosted_engine:
description:
- "If I(deploy) it means this host should deploy also hosted engine
components."
- "If I(undeploy) it means this host should un-deploy hosted engine
components and this host will not function as part of the High
Availability cluster."
power_management_enabled:
description:
- "Enable or disable power management of the host."
- "For more comprehensive setup of PM use C(ovirt_host_pm) module."
version_added: 2.4
activate:
description:
- "If C(state) is I(present) activate the host."
- "This parameter is good to disable, when you don't want to change
the state of host when using I(present) C(state)."
default: True
version_added: 2.4
iscsi:
description:
- "If C(state) is I(iscsidiscover) it means that the iscsi attribute is being
used to discover targets"
- "If C(state) is I(iscsilogin) it means that the iscsi attribute is being
used to login to the specified targets passed as part of the iscsi attribute"
version_added: "2.4"
check_upgrade:
description:
- "If I(true) and C(state) is I(upgraded) run check for upgrade
action before executing upgrade action."
default: True
version_added: 2.4
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Add host with username/password supporting SR-IOV.
# Note that override_iptables is false by default in oVirt/RHV:
- ovirt_hosts:
cluster: Default
name: myhost
address: 10.34.61.145
password: secret
override_iptables: true
kernel_params:
- intel_iommu=on
# Add host using public key
- ovirt_hosts:
public_key: true
cluster: Default
name: myhost2
address: 10.34.61.145
override_iptables: true
# Deploy hosted engine host
- ovirt_hosts:
cluster: Default
name: myhost2
password: secret
address: 10.34.61.145
override_iptables: true
hosted_engine: deploy
# Maintenance
- ovirt_hosts:
state: maintenance
name: myhost
# Restart host using power management:
- ovirt_hosts:
state: restarted
name: myhost
# Upgrade host
- ovirt_hosts:
state: upgraded
name: myhost
# discover iscsi targets
- ovirt_hosts:
state: iscsidiscover
name: myhost
iscsi:
username: iscsi_user
password: secret
address: 10.34.61.145
port: 3260
# login to iscsi targets
- ovirt_hosts:
state: iscsilogin
name: myhost
iscsi:
username: iscsi_user
password: secret
address: 10.34.61.145
target: "iqn.2015-07.com.mlipchuk2.redhat:444"
port: 3260
# Reinstall host using public key
- ovirt_hosts:
state: reinstalled
name: myhost
public_key: true
# Remove host
- ovirt_hosts:
state: absent
name: myhost
force: True
'''
RETURN = '''
id:
description: ID of the host which is managed
returned: On success if host is found.
type: str
sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c
host:
description: "Dictionary of all the host attributes. Host attributes can be found on your oVirt/RHV instance
at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/host."
returned: On success if host is found.
type: dict
iscsi_targets:
description: "List of host iscsi targets"
returned: On success if host is found and state is iscsidiscover.
type: list
'''
import time
import traceback
try:
import ovirtsdk4.types as otypes
from ovirtsdk4.types import HostStatus as hoststate
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
BaseModule,
check_sdk,
create_connection,
equal,
get_id_by_name,
ovirt_full_argument_spec,
wait,
)
class HostsModule(BaseModule):
def build_entity(self):
return otypes.Host(
name=self.param('name'),
cluster=otypes.Cluster(
name=self.param( |
google/brax | brax/envs/env.py | Python | apache-2.0 | 2,766 | 0.011931 | # Copyright 2022 The Brax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A brax e | nvironment for training and inference."""
import abc
from typing import Any, Dict, Optional
import brax
from brax import jumpy as jp
from flax import struct
from google.protobuf import text_format
@struct.dataclass
class State:
"""Environment state for training and inference."""
qp: brax.QP
obs: jp.ndarray
reward: jp.ndarray
done: jp.ndarray
metrics: Dict[str, jp.ndarray] = struct.field(default_factory=dict)
info: Dict[str, Any] = struct.field(defaul | t_factory=dict)
class Env(abc.ABC):
"""API for driving a brax system for training and inference."""
def __init__(self, config: Optional[str]):
if config:
config = text_format.Parse(config, brax.Config())
self.sys = brax.System(config)
@abc.abstractmethod
def reset(self, rng: jp.ndarray) -> State:
"""Resets the environment to an initial state."""
@abc.abstractmethod
def step(self, state: State, action: jp.ndarray) -> State:
"""Run one timestep of the environment's dynamics."""
@property
def observation_size(self) -> int:
"""The size of the observation vector returned in step and reset."""
rng = jp.random_prngkey(0)
reset_state = self.unwrapped.reset(rng)
return reset_state.obs.shape[-1]
@property
def action_size(self) -> int:
"""The size of the action vector expected by step."""
return self.sys.num_joint_dof + self.sys.num_forces_dof
@property
def unwrapped(self) -> 'Env':
return self
class Wrapper(Env):
"""Wraps the environment to allow modular transformations."""
def __init__(self, env: Env):
super().__init__(config=None)
self.env = env
def reset(self, rng: jp.ndarray) -> State:
return self.env.reset(rng)
def step(self, state: State, action: jp.ndarray) -> State:
return self.env.step(state, action)
@property
def observation_size(self) -> int:
return self.env.observation_size
@property
def action_size(self) -> int:
return self.env.action_size
@property
def unwrapped(self) -> Env:
return self.env.unwrapped
def __getattr__(self, name):
if name == '__setstate__':
raise AttributeError(name)
return getattr(self.env, name)
|
JDSchmitzMedia/pydev | pysrc/tests_python/test_debugger.py | Python | epl-1.0 | 30,357 | 0.013605 | '''
The idea is that we record the commands sent to the debugger and reproduce them from this script
(so, this works as the client, which spawns the debugger as a separate process and communicates
to it as if it was run from the outside)
Note that it's a python script but it'll spawn a process to run as jython and as python.
'''
JYTHON_JAR_LOCATION = None
JAVA_LOCATION = None
import unittest
port = 13336
def UpdatePort():
global port
port += 1
import os
def NormFile(filename):
try:
rPath = os.path.realpath #@UndefinedVariable
except:
# jython does not support os.path.realpath
# realpath is a no-op on systems without islink support
rPath = os.path.abspath
return os.path.normcase(rPath(filename))
PYDEVD_FILE = NormFile('../pydevd.py')
import sys
sys.path.append(os.path.dirname(PYDEVD_FILE))
SHOW_WRITES_AND_READS = False
SHOW_RESULT_STR = False
SHOW_OTHER_DEBUG_INFO = False
import subprocess
import socket
import threading
import time
#=======================================================================================================================
# ReaderThread
#=======================================================================================================================
class ReaderThread(threading.Thread):
def __init__(self, sock):
threading.Thread.__init__(self)
self.setDaemon(True)
self.sock = sock
self.lastReceived = None
def run(self):
try:
buf = ''
while True:
l = self.sock.recv(1024)
buf += l
if '\n' in buf:
self.lastReceived = buf
buf = ''
if SHOW_WRITES_AND_READS:
print 'Test Reader Thread Received %s' % self.lastReceived.strip()
except:
pass #ok, finished it
def DoKill(self):
self.sock.close()
#=======================================================================================================================
# AbstractWriterThread
#=======================================================================================================================
class AbstractWriterThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.setDaemon(True)
self.finishedOk = False
def DoKill(self):
if hasattr(self, 'readerThread'):
#if it's not created, it's not there...
self.readerThread.DoKill()
self.sock.close()
def Write(self, s):
last = self.readerThread.lastReceived
if SHOW_WRITES_AND_READS:
print 'Test Writer Thread Written %s' % (s,)
self.sock.send(s + '\n')
time.sleep(0.2)
i = 0
while last == self.readerThread.lastReceived and i < 10:
i += 1
time.sleep(0.1)
def StartSocket(self):
if SHOW_WRITES_AND_READS:
print 'StartSocket'
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', port))
s.listen(1)
if SHOW_WRITES_AND_READS:
print 'Waiting in socket.accept()'
newSock, addr = s.accept()
if SHOW_WRITES_AND_READS:
print 'Test Writer Thread Socket:', newSock, addr
readerThread = self.readerThread = ReaderThread(newSock)
readerThread.start()
self.sock = newSock
self._sequence = -1
#initial command is always the version
self.WriteVersion()
def NextSeq(self):
self._sequence += 2
return self._sequence
def WaitForNewThread(self):
i = 0
#wait for hit breakpoint
while not '<xml><thread name="' in self.readerThread.lastReceived or '<xml><thread name="pydevd.' in self.readerThread.lastReceived:
i += 1
time.sleep(1)
if i >= 15:
raise AssertionError('After %s seconds, a thread was not created.' % i)
#we have something like <xml><thread name="MainThread" id="12103472" /></xml>
splitted = self.readerThread.lastReceived.split('"')
threadId = splitted[3]
return threadId
def WaitForBreakpointHit(self, reason='111', get_line=False):
'''
108 is over
109 is return
111 is breakpoint
'''
i = 0
#wait for hit breakpoint
while not ('stop_reason="%s"' % reason) in self.readerThread.lastReceived:
i += 1
time.sleep(1)
if i >= 10:
raise AssertionError('After %s seconds, a break with reason: %s was not hit. Found: %s' % \
(i, reason, self.readerThread.lastReceived))
#we have something like <xml><thread id="12152656" stop_reason="111"><frame id="12453120" ...
splitted = self.readerThread.lastRe | ceived.split('"')
threadId = splitted[1]
frameId = splitted[5]
| if get_line:
return threadId, frameId, int(splitted[11])
return threadId, frameId
def WaitForVars(self, expected):
i = 0
#wait for hit breakpoint
while not expected in self.readerThread.lastReceived:
i += 1
time.sleep(1)
if i >= 10:
raise AssertionError('After %s seconds, the vars were not found. Last found:\n%s' %
(i, self.readerThread.lastReceived))
return True
def WriteMakeInitialRun(self):
self.Write("101\t%s\t" % self.NextSeq())
def WriteVersion(self):
self.Write("501\t%s\t1.0" % self.NextSeq())
def WriteAddBreakpoint(self, line, func):
'''
@param line: starts at 1
'''
if func is not None:
self.Write("111\t%s\t%s\t%s\t**FUNC**%s\tNone" % (self.NextSeq(), self.TEST_FILE, line, func))
else:
self.Write("111\t%s\t%s\t%s\tNone" % (self.NextSeq(), self.TEST_FILE, line))
def WriteRemoveBreakpoint(self, line):
self.Write("112\t%s\t%s\t%s" % (self.NextSeq(), self.TEST_FILE, line))
def WriteGetFrame(self, threadId, frameId):
self.Write("114\t%s\t%s\t%s\tFRAME" % (self.NextSeq(), threadId, frameId))
def WriteStepOver(self, threadId):
self.Write("108\t%s\t%s" % (self.NextSeq(), threadId,))
def WriteStepIn(self, threadId):
self.Write("107\t%s\t%s" % (self.NextSeq(), threadId,))
def WriteStepReturn(self, threadId):
self.Write("109\t%s\t%s" % (self.NextSeq(), threadId,))
def WriteSuspendThread(self, threadId):
self.Write("105\t%s\t%s" % (self.NextSeq(), threadId,))
def WriteRunThread(self, threadId):
self.Write("106\t%s\t%s" % (self.NextSeq(), threadId,))
def WriteKillThread(self, threadId):
self.Write("104\t%s\t%s" % (self.NextSeq(), threadId,))
#=======================================================================================================================
# WriterThreadCase13
#======================================================================================================================
class WriterThreadCase13(AbstractWriterThread):
TEST_FILE = NormFile('_debugger_case13.py')
def run(self):
self.StartSocket()
self.WriteAddBreakpoint(35, 'main')
self.Write("124\t%s\t%s" % (self.NextSeq(), "true;false;false;true"))
self.WriteMakeInitialRun()
threadId, frameId, line = self.WaitForBreakpointHit('111', True)
self.WriteGetFrame(threadId, frameId)
self.WriteStepIn(threadId)
threadId, frameId, line = self.WaitForBreakpointHit('107', True)
# Should go inside setter method
assert line == 25, 'Expected return to be in line 25, was: %s' % line
self.WriteStepIn(threadId)
th |
JulyKikuAkita/PythonPrac | cs15211/VerifyPreorderSequenceinBinarySearchTree.py | Python | apache-2.0 | 4,042 | 0.003711 | __source__ = 'https://leetcode.com/problems/verify-preorder-sequence-in-binary-search-tree/description/'
# http | s://github.co | m/kamyu104/LeetCode/blob/master/Python/verify-preorder-sequence-in-binary-search-tree.py
# Time: O(n)
# Space: O(1)]
# Stack
#
# Description: Leetcode # 255. Verify Preorder Sequence in Binary Search Tree
#
# Given an array of numbers, verify whether it is the correct preorder traversal sequence of a binary search tree.
#
# You may assume each number in the sequence is unique.
#
# Follow up:
# Could you do it using only constant space complexity?
#
# Hide Company Tags Zenefits
# Companies
# Zenefits
# Related Topics
# Tree Stack
# Similar Questions
# Binary Tree Preorder Traversal
#
import unittest
class Solution:
# @param {integer[]} preorder
# @return {boolean}
def verifyPreorder(self, preorder):
low = float("-inf")
i = -1
for p in preorder:
if p < -1:
return False
while i >= 0 and p > preorder[i]:
low = preorder[i]
i -= 1
i += 1
preorder[i] = p
return True
# Time: O(n)
# Space: O(h)
# 60ms 41.14%
class Solution2:
# @param {integer[]} preorder
# @return {boolean}
def verifyPreorder(self, preorder):
low = float("-inf")
path = []
for p in preorder:
if p < low:
return False
while path and p > path[-1]:
low = path[-1]
path.pop()
path.append(p)
return True
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
#Thought:
[10,7,4,8,6,40,23] should be false
# 31ms 56.09%
class Solution {
public boolean verifyPreorder(int[] preorder) {
int low = Integer.MIN_VALUE;
Stack<Integer> path = new Stack();
for (int p : preorder) {
if (p < low)
return false;
while (!path.empty() && p > path.peek())
low = path.pop();
path.push(p);
}
return true;
}
}
# assume no duplicate (since bst doesnt allow duplicate)
# we have to do it in place
# i = is the virtual stack that we maintained
# if we the array index we traverse is smaller than the previous one
# means that we are still traversing to the left subtree,
# if we find out the current index is bigger than the previous one we traverse it
# means that we are on the right subtree or the right hand side of the bst
# so we simply pop out all the elements in the stack that is smaller than the current index
# also use the popped value as the new min
# (since we are in right subtree means we must never come across a smaller number)
# index = index that traverse through the array
# 2ms 100%
class Solution {
public boolean verifyPreorder(int[] preorder) {
int index = -1;
int min = Integer.MIN_VALUE;
for (int i = 0; i < preorder.length; i++) {
if (preorder[i] < min) {
return false;
}
while (index >= 0 && preorder[index] < preorder[i]) {
min = preorder[index--];
}
preorder[++index] = preorder[i];
}
return true;
}
}
# 428ms 14.72%
class Solution {
public boolean verifyPreorder(int[] preorder) {
return verifyPreorder(preorder, 0, preorder.length - 1);
}
private boolean verifyPreorder(int[] preorder, int start, int end) {
if (start >= end) {
return true;
}
int root = preorder[start];
int index = start + 1;
while (index <= end && preorder[index] < root) {
index++;
}
for (int i = index + 1; i<= end; i++) {
if (preorder[i] < root) {
return false;
}
}
return verifyPreorder(preorder, start + 1, index - 1) && verifyPreorder(preorder, index, end);
}
}
'''
|
redox-alpha/omorfi | src/python/generate-lexcs.py | Python | gpl-3.0 | 14,293 | 0.00063 | #!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
This script generates Finnish omorfi database to lexc format, given that
they contain at least following information for each word:
* the word lemma or the dictionary form
* the word inflection classification in one of the known format.
Additional data may be available in the database and can be deduced from the
lemma or classification as needed. The current database reader is based
on the python's csv module, but may change in the future.
"""
# Author: Omorfi contributors, 2014
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import argparse
import csv
from sys import argv, exit, stderr
from time import strftime
from omorfi.apertium_formatter import ApertiumFormatter
from omorfi.ftb3_formatter import Ftb3Formatter
from omorfi.giella_formatter import GiellaFormatter
from omorfi.labeled_segments_formatter import LabeledSegmentsFormatter
from omorfi.no_tags_formatter import NoTagsFormatter
from omorfi.omor_formatter import OmorFormatter
# standard UI stuff
def main():
# defaults
curr_lexicon = dict()
# initialise argument parser
ap = argparse.ArgumentParser(
description="Convert Finnish dictionary TSV data into xerox/HFST lexc format")
ap.add_argument("--quiet", "-q", action="store_false", dest="verbose",
default=False,
help="do not print output to stdout while processing")
ap.add_argument("--verbose", "-v", action="store_true", default=False,
help="print each step to stdout while processing")
ap.add_argument("--master", "-m", action="append", required=True,
dest="masterfilenames",
metavar="MFILE", help="read lexical roots from MFILEs")
ap.add_argument("--stemparts", "-p", action="append", required=True,
dest='spfilenames',
metavar="SPFILE", help="read lexical roots from SPFILEs")
ap.add_argument("--inflection", "-i", action="append", required=True,
dest='inffilenames',
metavar="INFFILE", help="read inflection from INFFILEs")
ap.add_argument("--exclude-pos", "-x", action="append",
metavar="XPOS",
help="exclude all XPOS parts of speech from generation")
ap.add_argument("--include-lemmas", "-I", action="append", type=open,
metavar="ILFILE", help="read lemmas to include from ILFILE")
ap.add_argument("--exclude-blacklisted", "-B", action="append", type=str,
metavar="BLIST", help="exclude lemmas in BLIST blacklist",
choices=["FGK", "PROPN-BLOCKING"])
ap.add_argument("--version", "-V", action="version")
ap.add_argument("--output", "-o", "--one-file", "-1",
type=argparse.FileType("w"), required=True,
metavar="OFILE", help="write output to OFILE")
ap.add_argument("--fields", "-F", action="store", default=2,
metavar="N", help="read N fields from master")
ap.add_argument("--separator", action="store", default="\t",
metavar="SEP", help="use SEP as separator")
ap.add_argument("--comment", "-C", action="append", default=["#"],
metavar="COMMENT", help="skip lines starting with COMMENT that"
"do not have SEPs")
ap.add_argument("--strip", action="store",
metavar="STRIP", help="strip STRIP from fields before using")
ap.add_argument("--format", "-f", action="store", default="omor",
help="use specific output format for lexc data",
choices=["omor", "giella", "ftb3", "ftb1", "none", "apertium",
"labelsegments"])
ap.add_argument("--omor-new-para", action="store_true", default=False,
help="include NEW_PARA= in raw analyses")
ap.add_argument("--omor-allo", action="store_true", default=False,
help="include ALLO= in raw analyses")
ap.add_argument("--omor-props", action="store_true", default=False,
help="include PROPER= in raw analyses")
ap.add_argument("--omor-sem", action="store_true", default=False,
help="include SEM= in raw analyses")
ap.add_argument | ("--none-lemmas", action="store_true", default=False,
help="include lemmas in raw analyses")
ap.add_argument("--none-segments", action="store_true", default=False,
help="include segments in raw analyses")
args = ap.parse_args()
formatter = None
if args.format == 'omor':
formatter = OmorForm | atter(args.verbose, new_para=args.omor_new_para,
allo=args.omor_allo, props=args.omor_props, sem=args.omor_sem)
elif args.format == 'ftb3':
formatter = Ftb3Formatter(args.verbose)
elif args.format == 'apertium':
formatter = ApertiumFormatter(args.verbose)
elif args.format == 'giella':
formatter = GiellaFormatter(args.verbose)
elif args.format == 'none':
formatter = NoTagsFormatter(args.verbose,
lemmatise=args.none_lemmas, segment=args.none_segments)
elif args.format == 'labelsegments':
formatter = LabeledSegmentsFormatter(args.verbose)
else:
print("DIDNT CONVERT FORMATTER YET", args.format)
exit(1)
# check args
if args.strip == '"' or args.strip == "'":
quoting = csv.QUOTE_ALL
quotechar = args.strip
else:
quoting = csv.QUOTE_NONE
quotechar = None
lemmas = []
if args.include_lemmas:
for lemma_file in args.include_lemmas:
if args.verbose:
print("including only lemmas from", lemma_file.name)
for line in lemma_file:
lemmas.append(line.rstrip('\n'))
lemma_file.close()
if not args.exclude_pos:
args.exclude_pos = []
# setup files
if args.verbose:
print("Writing everything to", args.output.name)
if args.exclude_pos:
print("Not writing closed parts-of-speech data in",
",".join(args.exclude_pos))
# print definitions to rootfile
print(formatter.copyright_lexc(), file=args.output)
if args.verbose:
print("Creating Multichar_Symbols and Root")
print(formatter.multichars_lexc(), file=args.output)
print(formatter.root_lexicon_lexc(), file=args.output)
# read from csv files
for tsv_filename in args.masterfilenames:
if args.verbose:
print("Reading from", tsv_filename)
linecount = 0
print("! Omorfi stubs generated from", tsv_filename,
"\n! date:", strftime("%Y-%m-%d %H:%M:%S+%Z"),
"\n! params: ", ' '.join(argv), file=args.output)
print(formatter.copyright_lexc(), file=args.output)
curr_lexicon = ""
# for each line
with open(tsv_filename, "r", newline='') as tsv_file:
tsv_reader = csv.DictReader(tsv_file, delimiter=args.separator,
quoting=quoting, escapechar='%', quotechar=quotechar, strict=True)
postponed_suffixes = list()
postponed_abbrs = {'ABBREVIATION': list(), 'ACRONYM': list()}
for tsv_parts in tsv_reader:
linecount += 1
if args.verbose and (linecount % 10000 == 0):
print(linecount, "...", sep='', end='\r')
if len(tsv_parts) < |
cgcgbcbc/django-xadmin | xadmin/views/base.py | Python | bsd-3-clause | 20,760 | 0.001541 | import sys
import copy
import functools
import datetime
import decimal
from functools import update_wrapper
from inspect import getargspec
from django import forms
from django.utils.encoding import force_unicode
from django.conf import settings
from django.contrib import messages
from django.core.exceptions import ValidationError
from django.core.serializers.json import DjangoJSONEncoder
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.template import Context, Template
from django.template.response import TemplateResponse
from django.utils.datastructures import SortedDict
from django.utils.decorators import method_decorator, classonlymethod
from django.utils.encoding import smart_unicode
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import csrf_protect
from django.views.generic import View
from xadmin.util import static, json, vendor, sortkeypicker
csrf_protect_m = method_decorator(csrf_protect)
class IncorrectPluginArg(Exception):
pass
def filter_chain(filters, token, func, *args, **kwargs):
if token == -1:
return func()
else:
def _inner_method():
fm = filters[token]
fargs = getargspec(fm)[0]
if len(fargs) == 1:
# Only self arg
result = func()
if result is None:
return fm()
else:
raise IncorrectPluginArg(u'Plugin filter method need a arg to receive parent method result.')
else:
return fm(func if fargs[1] == '__' else func(), *args, **kwargs)
return filter_chain(filters, token - 1, _inner_method, *args, **kwargs)
def filter_hook(func):
tag = func.__name__
func.__doc__ = "``filter_hook``\n\n" + (func.__doc__ or "")
@functools.wraps(func)
def method(self, *args, **kwargs):
def _inner_method():
return func(self, *args, **kwargs)
if self.plugins:
filters = [(getattr(getattr(p, tag), 'priority', 10), getattr(p, tag))
for p in self.plugins if callable(getattr(p, tag, None))]
filters = [f for p, f in sorted(filters, key=lambda x:x[0])]
return filter_chain(filters, len(filters) - 1, _inner_method, *args, **kwargs)
else:
return _inner_method()
return method
def inclusion_tag(file_name, context_class=Context, takes_context=False):
def wrap(func):
@functools.wraps(func)
def method(self, context, nodes, *arg, **kwargs):
_dict = func(self, context, nodes, *arg, **kwargs)
from django.template.loader import get_template, select_template
if isinstance(file_name, Template):
t = file_name
elif not isinstance(file_name, basestring) and is_iterable(file_name):
t = select_template(file_name)
else:
t = get_template(file_name)
new_context = context_class(_dict, **{
'autoescape': context.autoescape,
'current_app': context.current_app,
'use_l10n': context.use_l10n,
'use_tz': context.use_tz,
})
new_context['admin_view'] = context['admin_view']
csrf_token = context.get('csrf_token', None)
if csrf_token is not None:
new_context['csrf_token'] = csrf_token
nodes.append(t.render(new_context))
return method
return wrap
class JSONEncoder(DjangoJSONEncoder):
def default(self, o):
if isinstance(o, datetime.date):
return o.strftime('%Y-%m-%d')
elif isinstance(o, datetime.datetime):
return o.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(o, decimal.Decimal):
return str(o)
else:
try:
return super(JSONEncoder, self).default(o)
except Exception:
return smart_unicode(o)
class BaseAdminObject(object):
def get_view(self, view_class, option_class=None, *args, **kwargs):
opts = kwargs.pop('opts', {})
return self.admin_site.get_view_class(view_class, option_class, **opts)(self.request, *args, **kwargs)
def get_model_view(self, view_class, model, *args, **kwargs):
return self.get_view(view_class, self.admin_site._registry.get(model), *args, **kwargs)
def get_admin_url(self, name, *args, **kwargs):
return reverse('%s:%s' % (self.admin_site.app_name, name), args=args, kwargs=kwargs)
def get_model_url(self, model, name, *args, **kwargs):
return reve | rse(
'%s:%s_%s_%s' % (self.admin_site.app_name, model._meta.app_label,
model._meta.module_name, name),
args=args, kwargs=kwargs, current_app=self.admin_site.name)
def get_model_perm(self, model, name):
return '%s.%s_%s' % (model._meta.app_label, name, model._meta.module_name)
def has_model_perm(self, model, name, user=None):
user = user or self.user
r | eturn user.has_perm(self.get_model_perm(model, name)) or (name == 'view' and self.has_model_perm(model, 'change', user))
def get_query_string(self, new_params=None, remove=None):
if new_params is None:
new_params = {}
if remove is None:
remove = []
p = dict(self.request.GET.items()).copy()
for r in remove:
for k in p.keys():
if k.startswith(r):
del p[k]
for k, v in new_params.items():
if v is None:
if k in p:
del p[k]
else:
p[k] = v
return '?%s' % urlencode(p)
def get_form_params(self, new_params=None, remove=None):
if new_params is None:
new_params = {}
if remove is None:
remove = []
p = dict(self.request.GET.items()).copy()
for r in remove:
for k in p.keys():
if k.startswith(r):
del p[k]
for k, v in new_params.items():
if v is None:
if k in p:
del p[k]
else:
p[k] = v
return mark_safe(''.join(
'<input type="hidden" name="%s" value="%s"/>' % (k, v) for k, v in p.items() if v))
def render_response(self, content, response_type='json'):
if response_type == 'json':
response = HttpResponse(mimetype="application/json; charset=UTF-8")
response.write(
json.dumps(content, cls=JSONEncoder, ensure_ascii=False))
return response
return HttpResponse(content)
def template_response(self, template, context):
return TemplateResponse(self.request, template, context, current_app=self.admin_site.name)
def message_user(self, message, level='info'):
"""
Send a message to the user. The default implementation
posts a message using the django.contrib.messages backend.
"""
if hasattr(messages, level) and callable(getattr(messages, level)):
getattr(messages, level)(self.request, message)
def static(self, path):
return static(path)
def vendor(self, *tags):
return vendor(*tags)
class BaseAdminPlugin(BaseAdminObject):
def __init__(self, admin_view):
self.admin_view = admin_view
self.admin_site = admin_view.admin_site
if hasattr(admin_view, 'model'):
self.model = admin_view.model
self.opts = admin_view.model._meta
def init_request(self, *args, **kwargs):
pass
class BaseAdminView(BaseAdminObject, View):
""" Base Admin view, support some comm attrs."""
base_template = 'xadmin/base.html'
need_site_permission = True
def __init__(self, request, *args, **kwargs):
self.request = request
self.reque |
cloud-io/CloudUp | tests/test_my_views.py | Python | mit | 8,048 | 0.003106 | from test_utils import testCaseSetUp
from test_utils import testCaseTearDown
from test_utils import getUnitTestUserEmail
from src.models import Host
from src.models import Link
from src.models import UserHasLink
from src.memcache_utils import MemCacheKeyGen
from src.error_message import ErrorMessages
import urllib2
import urllib
import unittest
from google.appengine.ext import ndb
from google.appengine.api import memcache
class LinkTest(unittest.TestCase):
def setUp(self):
testCaseSetUp(self)
# Adds two hosts.
self.hostName = 'heroku'
host = Host(id=self.hostName, interval=45)
host.put()
self.hostNameB = 'azure'
host = Host(id=self.hostNameB, interval=15)
host.put()
def tearDown(self):
testCaseTearDown(self)
def testAddLink(self):
linkUrl = 'http://heroku-on.appspot.com'
params = {
'url': linkUrl,
'host': self.hostName,
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/add?' + query)
# Checks return page.
self.assertEqual(response.status_int, 302)
self.assertEqual(response.location, 'http://localhost/my')
# Checks database.
link = ndb.Key('Host', self.hostName, 'Link', linkUrl).get()
self.assertIsNotNone(link)
def testAddLinkShouldFailWithEmtpyUrl(self):
params = {
'url': '',
'host': self.hostName,
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/add?' + query, status=404)
# Checks return page.
self.assertEqual(response.status_int, 404)
self.assertEqual(response.body, ErrorMessages.invalidLinkUrl())
# Checks database.
self.assertIsNone(Link.query().get())
def testAddLinkShouldStoreFullPathWithNukeUrl(self):
linkUrl = 'heroku-on.appspot.com'
params = {
'url': linkUrl,
'host': self.hostName,
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/add?' + query)
# Checks return page.
self.assertEqual(response.status_int, 302)
self.assertEqual(response.location, 'http://localhost/my')
# Checks database.
link = ndb.Key('Host', self.hostName, 'Link', linkUrl).get()
self.assertIsNone(link)
link = ndb.Key('Host', self.hostName, 'Link', 'http://' + linkUrl).get()
self.assertIsNotNone(link)
def testAddLinkShouldFailWithNonExistingHost(self):
url = 'heroku-on.appspot.com'
params = {
'url': url,
'host': 'NotExistingHostName',
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/add?' + query, status=404)
# Checks return page.
self.assertEqual(response.status_int, 404)
self.assertEqual(response.body, ErrorMessages.hostDoesNotExist())
# Checks database.
self.assertIsNone(Link.query().get())
def testAddLinkShouldFailWithEmtpyHostName(self):
url = 'heroku-on.appspot.com'
params = {
'url': url,
'host': '',
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/add?' + query, status=404)
# Checks return page.
self.assertEqual(response.status_int, 404)
self.assertEqual(response.body, ErrorMessages.invalidHostName())
# Checks database.
self.assertIsNone(Link.query().get())
def testAddLinkShouldFailWithExistingHostUrlPair(self):
linkUrl = 'http://heroku-on.appspot.com'
# setup - add a link with the same url and host.
linkKey = Link(id=linkUrl, parent=ndb.Key('Host', self.hostName)).put()
UserHasLink(id=linkKey.urlsafe(), parent=ndb.Key('User', getUnitTestUserEmail())).put()
params = {
'url': linkUrl,
'host': self.hostName,
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/add?' + query, status=404)
# Checks return page.
self.assertEqual(response.status_int, 404)
self.assertEqual(response.body,
ErrorMessages.duplicatingLinkUrlHostPair())
# Checks database.
link = ndb.Key('Host', self.hostName, 'Link', linkUrl).get()
self.assertIsNotNone(link)
def testAddLinkShouldSucceedWithSameUrlDifferentHost(self):
linkUrl = 'http://heroku-on.appspot.com'
# setup - add a link with the same url and host.
| Link(id=linkUrl, parent=ndb.Key('Host', self.hostName)).put()
params = {
'url': linkUrl,
'host': self.hostNameB,
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/add?' + query)
# Checks return page.
self.assertEqual(response.status_int, 302)
self.assertEqual(response.location, 'http://localhost/my')
# Checks database.
link = ndb.Key('Host', self.hostName, 'Link', linkUrl).get()
self.assertIsNotNone(link)
link = ndb.Key( | 'Host', self.hostNameB, 'Link', linkUrl).get()
self.assertIsNotNone(link)
def testAddLinkShouldInvalidateCacheForLinksOfGiveHostAndAllLinks(self):
memcache.set(MemCacheKeyGen.getLinksKey(self.hostName), [])
memcache.set(MemCacheKeyGen.getAllLinksKey(), [])
linkUrl = 'http://heroku-on.appspot.com'
params = {
'url': linkUrl,
'host': self.hostName,
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/add?' + query)
self.assertIsNone(memcache.get(MemCacheKeyGen.getLinksKey(self.hostName)))
self.assertIsNone(memcache.get(MemCacheKeyGen.getAllLinksKey()))
def testDeleteLink(self):
linkUrl = 'http://heroku-on.appspot.com'
linkKey = Link(id=linkUrl, parent=ndb.Key('Host', self.hostName)).put()
UserHasLink(id=linkKey.urlsafe(), parent=ndb.Key('User', getUnitTestUserEmail())).put()
params = {
'url': linkUrl,
'host': self.hostName,
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/delete?' + query)
# Checks return page.
self.assertEqual(response.status_int, 302)
self.assertEqual(response.location, 'http://localhost/my')
# Checks database.
userHasLink = ndb.Key('Host', self.hostName, 'UserHasLink', linkKey.urlsafe()).get()
self.assertIsNone(userHasLink)
def testDeleteLinkShouldFailWithMistmatchingHostAndLink(self):
linkUrl = 'http://heroku-on.appspot.com'
linkKey = Link(id=linkUrl, parent=ndb.Key('Host', self.hostName)).put()
UserHasLink(id=linkKey.urlsafe(), parent=ndb.Key('User', getUnitTestUserEmail())).put()
params = {
'url': linkUrl,
'host': self.hostNameB,
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/delete?' + query, status=404)
# Checks return page.
self.assertEqual(response.status_int, 404)
self.assertEqual(response.body, ErrorMessages.urlHostNamePairDoesNotExist())
# Checks database.
userHasLink = ndb.Key('Host', self.hostName, 'UserHasLink', linkKey.urlsafe()).get()
self.assertIsNone(userHasLink)
def testDeleteLinkShouldFailWithEmtpyUrl(self):
linkUrl = 'http://heroku-on.appspot.com'
linkKey = Link(id=linkUrl, parent=ndb.Key('Host', self.hostName)).put()
params = {
'url': '',
'host': self.hostName,
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/delete?' + query, status=404)
# Checks return page.
self.assertEqual(response.status_int, 404)
self.assertEqual(response.body, ErrorMessages.invalidLinkUrl())
# Checks database.
userHasLink = ndb.Key('Host', self.hostName, 'UserHasLink', linkKey.urlsafe()).get()
self.assertIsNone(userHasLink)
def testDeleteLinkShouldFailWithEmtpyHostName(self):
linkUrl = 'http://heroku-on.appspot.com'
linkKey = Link(id=linkUrl, parent=ndb.Key('Host', self.hostName)).put()
params = {
'url': linkUrl,
'host': '',
}
query = urllib.urlencode(params)
response = self.testapp.get('/my/link/delete?' + query, status=404)
# Checks return page.
self.assertEqual(response.status_int, 404)
self.assertEqual(response.body, ErrorMessages.invalidHostName())
# Checks database.
userHasLink = ndb.Key('Host', self.hostName, 'UserHasLink', linkKey.urlsafe()).get()
self.assertIsNone(userHasLink)
|
mabuchilab/Instrumental | instrumental/drivers/motion/_kinesis/common.py | Python | gpl-3.0 | 2,398 | 0.002085 | from enum import Enum
# Message Enums
#
class MessageType(Enum):
GenericDevice = 0
GenericPiezo = 1
GenericMotor = 2
GenericDCMotor = 3
GenericSimpleMotor = 4
RackDevice = 5
Laser = 6
TECCtlr = 7
Quad = 8
NanoTrak = 9
Specialized = 10
Solenoid = 11
class GenericDevice(Enum):
SettingsInitialized = 0
SettingsUpdated = 1
Error = 2
Close = 3
class GenericMotor(Enum):
Homed = 0
Moved = 1
Stopped = 2
LimitUpdate | d = 3
class GenericDCMotor(Enum):
Error = 0
Status = 1
MessageIDs = {
MessageType.GenericDevice: GenericDevice,
MessageType.GenericMotor: GenericMotor,
MessageType.GenericDCMotor: GenericDCMotor
}
class KinesisError(Exception):
messages = {
0: 'Success',
1: 'The FTDI functions have not been initialized | ',
2: 'The device could not be found. Make sure to call TLI_BuildDeviceList().',
3: 'The device must be opened before it can be accessed',
4: 'An I/O Error has occured in the FTDI chip',
5: 'There are insufficient resources to run this application',
6: 'An invalid parameter has been supplied to the device',
7: 'The device is no longer present',
8: 'The device detected does not match that expected',
32: 'The device is already open',
33: 'The device has stopped responding',
34: 'This function has not been implemented',
35: 'The device has reported a fault',
36: 'The function could not be completed because the device is disconnected',
41: 'The firmware has thrown an error',
42: 'The device has failed to initialize',
43: 'An invalid channel address was supplied',
37: 'The device cannot perform this function until it has been Homed',
38: 'The function cannot be performed as it would result in an illegal position',
39: 'An invalid velocity parameter was supplied. The velocity must be greater than zero',
44: 'This device does not support Homing. Check the Limit switch parameters are correct',
45: 'An invalid jog mode was supplied for the jog function',
}
def __init__(self, code=None, msg=''):
if code is not None and not msg:
msg = '(0x{:X}) {}'.format(code, self.messages[code])
super(KinesisError, self).__init__(msg)
self.code = code
|
ghchinoy/tensorflow | tensorflow/contrib/learn/python/learn/datasets/base.py | Python | apache-2.0 | 8,304 | 0.006142 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Base utilities for loading datasets (deprecated).
This module and all its submodules are deprecated. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for migration instructions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import csv
import os
from os import path
import random
import time
import numpy as np
from six.moves import urllib
from tensorflow.python.platform import gfile
from tensorflow.python.util.deprecation import deprecated
Dataset = collections.namedtuple('Dataset', ['data', 'target'])
Datasets = collections.namedtuple('Datasets', ['train', 'validation', 'test'])
@deprecated(None, 'Use tf.data instead.')
def load_csv_with_header(filename,
target_dtype,
features_dtype,
target_column=-1):
"""Load dataset from CSV file with a header row."""
with gfile.Open(filename) as csv_file:
data_file = csv.reader(csv_file)
header = next(data_file)
n_samples = int(header[0])
n_features = int(header[1])
data = np.zeros((n_samples, n_features), dtype=features_dtype)
target = np.zeros((n_samples,), dtype=target_dtype)
for i, row in enumerate(data_file):
target[i] = np.asarray(row.pop(target_column), dtype=target_dtype)
data[i] = np.asarray(row, dtype=features_dtype)
return Dataset(data=data, target=target)
@deprecated(None, 'Use tf.data instead.')
def load_csv_without_header(filename,
target_dtype,
features_dtype,
target_column=-1):
"""Load dataset from CSV file without a header row."""
with gfile.Open(filename) as csv_file:
data_file = csv.reader(csv_file)
data, target = [], []
for row in data_file:
target.append(row.pop(target_column))
data.append(np.asarray(row, dtype=features_dtype))
target = np.array(target, dtype=target_dtype)
data = np.array(data)
return Dataset(data=data, target=target)
@deprecated(None, 'Use tf.data instead.')
def shrink_csv(filename, ratio):
"""Create a smaller dataset of only 1/ratio of original data."""
filename_small = filename.replace('.', '_small.')
with gfile.Open(filename_small, 'w') as csv_file_small:
writer = csv.writer(csv_file_small)
with gfile.Open(filename) as csv_file:
reader = csv.reader(csv_file)
i = 0
for row in reader:
if i % ratio == 0:
writer.writerow(row)
i += 1
@deprecated(None, 'Use scikits.learn.datasets.')
def load_iris(data_path=None):
"""Load Iris dataset.
Args:
data_path: string, path to iris dataset (optional)
Returns:
Dataset object containing data in-memory.
"""
if data_path is None:
module_path = path.dirname(__file__)
data_path = path.join(module_path, 'data', 'iris.csv')
return load_csv_with_header(
data_path, target_dtype=np.int, features_dtype=np.float)
@deprecated(None, 'Use scikits.learn.datasets.')
def load_boston(data_path=None):
"""Load Boston housing dataset.
Args:
data_path: string, path to boston dataset (optional)
Returns:
Dataset object containing data in-memory.
"""
if data_pat | h is None:
module_path = path.dirname(__file__)
data_path = path.join(module_path, 'data', 'boston_house_prices.csv')
return load_csv_with_header(
data_path, target_dtype=np.float, features_dtype=np.float)
@deprecated(None, 'Use the retry module or similar alternatives.')
def retry(initial_delay,
max_delay,
factor=2.0,
jitter= | 0.25,
is_retriable=None):
"""Simple decorator for wrapping retriable functions.
Args:
initial_delay: the initial delay.
max_delay: the maximum delay allowed (actual max is
max_delay * (1 + jitter).
factor: each subsequent retry, the delay is multiplied by this value.
(must be >= 1).
jitter: to avoid lockstep, the returned delay is multiplied by a random
number between (1-jitter) and (1+jitter). To add a 20% jitter, set
jitter = 0.2. Must be < 1.
is_retriable: (optional) a function that takes an Exception as an argument
and returns true if retry should be applied.
Returns:
A function that wraps another function to automatically retry it.
"""
return _internal_retry(
initial_delay=initial_delay,
max_delay=max_delay,
factor=factor,
jitter=jitter,
is_retriable=is_retriable)
def _internal_retry(initial_delay,
max_delay,
factor=2.0,
jitter=0.25,
is_retriable=None):
"""Simple decorator for wrapping retriable functions, for internal use only.
Args:
initial_delay: the initial delay.
max_delay: the maximum delay allowed (actual max is
max_delay * (1 + jitter).
factor: each subsequent retry, the delay is multiplied by this value.
(must be >= 1).
jitter: to avoid lockstep, the returned delay is multiplied by a random
number between (1-jitter) and (1+jitter). To add a 20% jitter, set
jitter = 0.2. Must be < 1.
is_retriable: (optional) a function that takes an Exception as an argument
and returns true if retry should be applied.
Returns:
A function that wraps another function to automatically retry it.
"""
if factor < 1:
raise ValueError('factor must be >= 1; was %f' % (factor,))
if jitter >= 1:
raise ValueError('jitter must be < 1; was %f' % (jitter,))
# Generator to compute the individual delays
def delays():
delay = initial_delay
while delay <= max_delay:
yield delay * random.uniform(1 - jitter, 1 + jitter)
delay *= factor
def wrap(fn):
"""Wrapper function factory invoked by decorator magic."""
def wrapped_fn(*args, **kwargs):
"""The actual wrapper function that applies the retry logic."""
for delay in delays():
try:
return fn(*args, **kwargs)
except Exception as e: # pylint: disable=broad-except
if is_retriable is None:
continue
if is_retriable(e):
time.sleep(delay)
else:
raise
return fn(*args, **kwargs)
return wrapped_fn
return wrap
_RETRIABLE_ERRNOS = {
110, # Connection timed out [socket.py]
}
def _is_retriable(e):
return isinstance(e, IOError) and e.errno in _RETRIABLE_ERRNOS
@deprecated(None, 'Please use urllib or similar directly.')
@_internal_retry(initial_delay=1.0, max_delay=16.0, is_retriable=_is_retriable)
def urlretrieve_with_retry(url, filename=None):
return urllib.request.urlretrieve(url, filename)
@deprecated(None, 'Please write your own downloading logic.')
def maybe_download(filename, work_directory, source_url):
"""Download the data from source url, unless it's already here.
Args:
filename: string, name of the file in the directory.
work_directory: string, path to working directory.
source_url: url to download from if file doesn't exist.
Returns:
Path to resulting file.
"""
if not gfile.Exists(work_directory):
gfile.MakeDirs(work_directory)
filepath = os.path.join(work_directory, filename)
if not gfile.Exists(filepath):
temp_file_name, _ = urlretrieve_with_retry(source_url)
gfile.Copy(temp_file_name, filepath)
with gfile.G |
wangyifan1985/sampan | sampan/properties.py | Python | mit | 5,148 | 0.00136 | #!/usr/bin/env python
# coding: utf-8
import re
import sys
import typing
import time
from collections import OrderedDict, abc
""" A Python implementation for java.util.Properties """
__all__ = ['Properties']
# Constants ###################################################################
###############################################################################
DMT = '%a %b %d %H:%M:%S %Z %Y'
ENCODING = 'latin-1'
# Errors ######################################################################
###############################################################################
class PropertiesError(Exception):
pass
# Properties ##################################################################
###############################################################################
class Properties:
re_property = re.compile(r'(.+?)(?<!\\)(?:\s*[=|:]\s*)(.*)')
re_property_space = re.compile(r'(.+?)(?<!\\)(?:[ ]+)(.+)')
re_tail = re.compile(r'([\\]+)$')
# shamed copy from "jproperties"
@staticmethod
def unescape(value):
ret = []
backslash = False
for c in value:
if backslash:
if c == "u":
# fall through to native unicode_escape
ret.append(r"\u")
elif c == "t":
ret.append("\t")
elif c == "r":
ret.append("\r")
elif c == "n":
ret.append("\n")
elif c == "f":
ret.append("\f")
else:
ret.append(c)
backslash = False
elif c == "\\":
backslash = True
else:
ret.append(c)
ret = "".join(ret).encode("utf-8").decode("unicode_escape")
return ret
def __init__(self, defaults=None):
self._props = OrderedDict()
if defaults is not None:
if isinstance(defaults, abc.Mapping):
self._props.update(defaults)
elif isinstance(defaults, Properties):
self._props.update(defaults._props)
else:
raise PropertiesError(f'Unknown | default properties type: {type(defaults)}')
| def __setitem__(self, key, value):
self.setProperty(key, value)
def __getitem__(self, key):
return self.getProperty(key)
def __getattr__(self, name):
try:
return self.__dict__[name]
except KeyError:
if hasattr(self._props, name):
return getattr(self._props, name)
def __len__(self):
return len(self._props)
def __eq__(self, other):
return isinstance(other, Properties) and self._props == other._props
def __contains__(self, key):
return key in self._props
def __delitem__(self, key):
del self._props[key]
def __str__(self):
s = '{'
for key, value in self._props.items():
s = ''.join((s, key, '=', value, ', '))
s = ''.join((s[:-2], '}'))
return s
def __iter__(self):
return iter(self._props)
def setProperty(self, key: str, value: str):
self._props[key] = value
def getProperty(self, key: str, defaultValue: str=None):
if defaultValue:
return self._props.get(key, defaultValue)
return self._props.get(key)
def list(self, out=sys.stdout):
print('-- listing properties --', file=out)
for key, value in self._props.items():
print(''.join((key, '=', value)), file=out)
def propertyNames(self):
return self._props.keys()
def stringPropertyNames(self):
return set(self._props.keys())
def load(self, ins: typing.IO):
lineno = 0
lines = iter(ins.readlines())
for line in lines:
lineno += 1
line = line.strip()
if not line or line.startswith('#') or line.startswith('!'):
continue
while line.endswith('\\'):
if len(self.re_tail.search(line).group(1)) % 2 == 1:
line = line[:-1] + next(lines).strip()
lineno += 1
m = self.re_property.match(line)
if m:
key = m.group(1)
value = m.group(2)
else:
m = self.re_property_space.match(line)
if m:
key = m.group(1)
value = m.group(2)
else:
raise PropertiesError(f'Illegal property at line: {lineno}')
self.setProperty(self.unescape(key), self.unescape(value))
def store(self, out, comments: str=None):
lines = []
if comments:
lines.append(''.join(('# ', comments)))
lines.append(''.join(('# ', time.strftime(DMT, time.gmtime()))))
for k, v in self._props.items():
lines.append(f'{k}={v}')
if 'b' in out.mode:
out.write('\n'.encode(ENCODING).join([s.encode(ENCODING) for s in lines]))
else:
out.write('\n'.join(lines))
|
javierrodriguezcuevas/git-cola | test/models_selection_test.py | Python | gpl-2.0 | 515 | 0 | from __future__ import absolute_import, division, unicode_literals
import unittest
import mock
from cola.models import selection
class SelectionTestCase(unittest.TestCase):
def test_union(self):
t = mock.Mock()
t.staged = ['a']
t.unmerged = ['a', 'b']
t.modified = ['b', 'a', 'c']
t.untracked = ['d']
expect = ['a', 'b', 'c', 'd']
actual = selection.union(t)
self.assertEqual(expect, actual)
|
if __name__ == | '__main__':
unittest.main()
|
thedod/redwind | migrations/20141017-permalinks.py | Python | bsd-2-clause | 531 | 0 | from redwind import app, db, util
from redwind.models import Post
import itertools
db.engine.execute('alter table post add column historic_path varchar(256)')
db.engine. | execute('update post set historic_path = path')
for post in Post.query.all():
print(post.historic_path)
if | not post.slug:
post.slug = post.generate_slug()
post.path = '{}/{:02d}/{}'.format(post.published.year,
post.published.month,
post.slug)
db.session.commit()
|
ACJTeam/enigma2 | e2reactor.py | Python | gpl-2.0 | 5,223 | 0.036378 | # enigma2 reactor: based on pollreactor, which is
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Maintainer: U{Felix Domke<mailto:tmbinc@elitedvb.net>}
"""
# System imports
import select, errno, sys
# Twisted imports
from twisted.python import log, failure
from twisted.internet import main, posixbase, error
#from twisted.internet.pollreactor import PollReactor, poller
from enigma import getApplication
# globals
reads = {}
writes = {}
selectables = {}
POLL_DISCONNECTED = (select.POLLHUP | select.POLLERR | select.POLLNVAL)
class E2SharedPoll:
def __init__(self):
self.dict = { }
self.eApp = getApplication()
def register(self, fd, eventmask = select.POLLIN | select.POLLERR | select.POLLOUT):
self.dict[fd] = eventmask
def unregister(self, fd):
del self.dict[fd]
def poll(self, timeout = None):
try:
r = self.eApp.poll(timeout, self.dict)
except KeyboardInterrupt:
return None
return r
poller = E2SharedPoll()
class PollReactor(posixbase.PosixReactorBase):
"""A reactor that uses poll(2)."""
def _updateRegistration(self, fd):
"""Register/unregister an fd with the poller."""
try:
poller.unregister(fd)
except KeyError:
pass
mask = 0
if fd in reads:
mask = mask | select.POLLIN
if fd in writes:
mask = mask | select.POLLOUT
if mask != 0:
poller.register(fd, mask)
else:
if fd in selectables:
del selectables[fd]
poller.eApp.interruptPoll()
def _dictRemove(self, selectable, mdict):
try:
# the easy way
fd = selectable.fileno()
# make sure the fd is actually real. In some situations we can get
# -1 here.
mdict[fd]
except:
# the hard way: necessary becau | se fileno() may disappear at any
# moment, thanks to python's underlying sockets impl
for fd, fdes in selectables.items():
if selectable is fdes:
break
else:
# Hmm, maybe not the right course of action? This method can't
# fail, because it happens inside error detection...
return
if fd in mdict:
del mdict[fd]
self._updateRegistration(fd)
d | ef addReader(self, reader):
"""Add a FileDescriptor for notification of data available to read.
"""
fd = reader.fileno()
if fd not in reads:
selectables[fd] = reader
reads[fd] = 1
self._updateRegistration(fd)
def addWriter(self, writer, writes=writes, selectables=selectables):
"""Add a FileDescriptor for notification of data available to write.
"""
fd = writer.fileno()
if fd not in writes:
selectables[fd] = writer
writes[fd] = 1
self._updateRegistration(fd)
def removeReader(self, reader, reads=reads):
"""Remove a Selectable for notification of data available to read.
"""
return self._dictRemove(reader, reads)
def removeWriter(self, writer, writes=writes):
"""Remove a Selectable for notification of data available to write.
"""
return self._dictRemove(writer, writes)
def removeAll(self, reads=reads, writes=writes, selectables=selectables):
"""Remove all selectables, and return a list of them."""
if self.waker is not None:
self.removeReader(self.waker)
result = selectables.values()
fds = selectables.keys()
reads.clear()
writes.clear()
selectables.clear()
for fd in fds:
poller.unregister(fd)
if self.waker is not None:
self.addReader(self.waker)
return result
def doPoll(self, timeout,
reads=reads,
writes=writes,
selectables=selectables,
select=select,
log=log,
POLLIN=select.POLLIN,
POLLOUT=select.POLLOUT):
"""Poll the poller for new events."""
if timeout is not None:
timeout = int(timeout * 1000) # convert seconds to milliseconds
try:
l = poller.poll(timeout)
if l is None:
if self.running:
self.stop()
l = [ ]
except select.error, e:
if e[0] == errno.EINTR:
return
else:
raise
_drdw = self._doReadOrWrite
for fd, event in l:
try:
selectable = selectables[fd]
except KeyError:
# Handles the infrequent case where one selectable's
# handler disconnects another.
continue
log.callWithLogger(selectable, _drdw, selectable, fd, event, POLLIN, POLLOUT, log)
doIteration = doPoll
def _doReadOrWrite(self, selectable, fd, event, POLLIN, POLLOUT, log,
faildict={
error.ConnectionDone: failure.Failure(error.ConnectionDone()),
error.ConnectionLost: failure.Failure(error.ConnectionLost())
}):
why = None
inRead = False
if event & POLL_DISCONNECTED and not (event & POLLIN):
why = main.CONNECTION_LOST
else:
try:
if event & POLLIN:
why = selectable.doRead()
inRead = True
if not why and event & POLLOUT:
why = selectable.doWrite()
inRead = False
if not selectable.fileno() == fd:
why = error.ConnectionFdescWentAway('Filedescriptor went away')
inRead = False
except:
log.deferr()
why = sys.exc_info()[1]
if why:
self._disconnectSelectable(selectable, why, inRead)
def callLater(self, *args, **kwargs):
poller.eApp.interruptPoll()
return posixbase.PosixReactorBase.callLater(self, *args, **kwargs)
def install():
"""Install the poll() reactor."""
p = PollReactor()
main.installReactor(p)
__all__ = ["PollReactor", "install"]
|
jamesbdunlop/tk-jbd-submit-mayaplayblast | python/lib/renderGlobals.py | Python | apache-2.0 | 197 | 0.010152 | import maya.cmds as | cmds
def setRenderGlobals():
" | ""
Sets the base defaults for the renderglobals for playblasting.
"""
print 'Set your custom renderglobals here for playblasting.' |
jcsp/manila | manila/api/openstack/wsgi.py | Python | apache-2.0 | 44,832 | 0.000022 | # Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import inspect
import math
import time
from oslo_log import log
from oslo_serialization import jsonutils
from oslo_utils import strutils
import six
import webob
import webob.exc
from manila.api.openstack import api_version_request as api_version
from manila.api.openstack import versioned_method
from manila import exception
from manila.i18n import _
from manila.i18n import _LE
from manila.i18n import _LI
from manila import wsgi
LOG = log.getLogger(__name__)
SUPPORTED_CONTENT_TYPES = (
'application/json',
)
_MEDIA_TYPE_MAP = {
'application/json': 'json',
}
# name of attribute to keep version method information
VER_METHOD_ATTR = 'versioned_methods'
# Name of header used by clients to request a specific version
# of the REST API
API_VERSION_REQUEST_HEADER = 'X-OpenStack-Manila-API-Version'
EXPERIMENTAL_API_REQUEST_HEADER = 'X-OpenStack-Manila-API-Experimental'
V1_SCRIPT_NAME = '/v1'
class Request(webob.Request):
"""Add some OpenStack API-specific logic to the base webob.Request."""
def __init__(self, *args, **kwargs):
super(Request, self).__init__(*args, **kwargs)
self._resource_cache = {}
if not hasattr(self, 'api_version_request'):
self.api_version_request = api_version.APIVersionRequest()
def cache_resource(self, resource_to_cache, id_attribute='id', name=None):
"""Cache the given resource.
Allow API methods to cache objects, such as results from a DB query,
to be used by API extensions within the same API request.
The resource_to_cache can be a list or an individual resource,
but ultimately resources are cached individually using the given
id_attribute.
Different resources types might need to be cached during the same
request, they can be cached using the name parameter. For example:
Controller 1:
request.cache_resource(db_volumes, 'volumes')
request.cache_resource(db_volume_types, 'types')
Controller 2:
db_volumes = request.cached_resource('volumes')
db_type_1 = request.cached_resource_by_id('1', 'types')
If no name is given, a default name will be used for the resource.
An instance of this class only lives for the lifetime of a
single API request, so there's no need to implement full
cache management.
"""
if not isinstance(resource_to_cache, list):
resource_to_cache = [resource_to_cache]
if not name:
name = self.path
cached_resources = self._resource_cache.setdefault(name, {})
for resource in resource_to_cache:
cached_resources[resource[id_attribute]] = resource
def cached_resource(self, name=None):
"""Get the cached resources cached under the given resource name.
Allow an API extension to get previously stored objects within
the same API request.
Note that the object data will be slightly stale.
:returns: a dict of id_attribute to the resource from the cached
resources, an empty map if an empty collection was cached,
or None if nothing has been cached yet under this name
"""
if not name:
name = self.path
if name not in self._resource_cache:
# Nothing has been cached for this key yet
return None
return self._resource_cache[name]
def cached_resource_by_id(self, resource_id, name=None):
"""Get a resource by ID cached under the given resource name.
Allow an API extension to get a previously stored object
within the same API request. This is basically a convenience method
to lookup by ID on the dictionary of all cached resources.
Note that the object data will be slightly stale.
:returns: the cached resource or None if the item is not in the cache
"""
resources = self.cached_resource(name)
if not resources:
# Nothing has been cached yet for this key yet
return None
return resources.get(resource_id)
def cache_db_items(self, key, items, item_key='id'):
"""Allow API methods to store objects from a DB query to be
used by API extensions within the same API request.
An instance of this class only lives for the lifetime of a
single API request, so there's no need to implement full
cache management.
"""
self.cache_resource(items, item_key, key)
def get_db_items(self, key):
"""Allow an API extension to get previously stored objects within
the same API request.
Note that the object data will be slightly stale.
"""
return self.cached_resource(key)
def get_db_item(self, key, item_key):
"""Allow an API extension to get a previously stored object
within the same API request.
Note that the object data will be slightly stale.
"""
return self.get_db_items(key).get(item_key)
def cache_db_share_types(self, share_types):
self.cache_db_items('share_types', share_types, 'id')
def cache_db_share_type(self, share_type):
self.cache_db_items('share_types', [share_type], 'id')
def get_db_share_types(self):
return self.get_db_items('share_types')
def get_db_share_type(self, share_type_id):
return self.get_db_item('share_types', share_type_id)
def best_match_content_type(self):
"""Determine the requested response content-type."""
if 'manila.best_content_type' not in self.environ:
# Calculate the best MIME type
content_type = None
# Check URL path suffix
parts = self.path.rsplit('.', 1)
if len(parts) > 1:
possible_type = 'application/' + parts[1]
if possible_type in SUPPORTED_CONTENT_TYPES:
content_type = possible_type
if not content_type:
content_type = self.accept.best_match(SUPPORTED_CONTENT_TYPES)
self.environ['manila.best_content_type'] = (content_type or
'application/json')
return self.environ['manila.best_content_type']
def get_content_type(self):
| """Determine content type of the request body.
Does not do any body introspection, only checks header.
"""
if "Content-Type" not in self.headers:
return None
allowed_types = SUPPORTED_CONTENT_TYPES
content_type = self.content_type
| if content_type not in allowed_types:
raise exception.InvalidContentType(content_type=content_type)
return content_type
def set_api_version_request(self):
"""Set API version request based on the request header information.
Microversions starts with /v2, so if a client sends a /v1 URL, then
ignore the headers and request 1.0 APIs.
"""
if not self.script_name:
self.api_version_request = api_version.APIVersionRequest()
elif self.script_name == V1_SCRIPT_NAME:
self.api_version_request = api_version.APIVersionRequest('1.0')
else:
if API_VERSION_REQUEST_HEADER in self.headers:
hdr_string = self.headers[API_VERSION_REQUEST_HEADER]
self.api_version_request = api_version.APIVersionRequest(
|
vinc3nt/freepto-web | manage.py | Python | gpl-2.0 | 842 | 0.002375 | #!/usr/bin/env python
from flask.ext.script import Manager
from flask_frozen import Freezer
import discovery
import logging
out = logging.StreamHandler()
out.set | Formatter(logging.Formatter())
out.setLevel(logging.DEBUG)
logging.getLogger('freepto-web').setLevel(logging.INFO)
logging.getLogger('freepto-web').addHandler(out)
from app import app
manager = Manager(app)
freezer = Freezer(app)
@freezer.register_generator
def index():
yield {}
@freezer.register_generator
def page_index():
for lang | in discovery.lang_dirs:
yield {'lang': lang}
@freezer.register_generator
def page():
for lang in discovery.lang_dirs:
for title in discovery.find_pages(lang):
yield {'lang': lang, 'title': title}
@manager.command
def freeze():
freezer.freeze()
if __name__ == "__main__":
manager.run()
|
andymccurdy/tested-transcoder | transcoder.py | Python | mit | 12,971 | 0.000463 | #!/usr/bin/python
import logging
import os
import re
import shlex
import shutil
import signal
import subprocess
import sys
import time
def non_zero_min(values):
"Return the min value but always prefer non-zero values if they exist"
if len(values) == 0:
raise TypeError('non_zero_min expected 1 arguments, got 0')
non_zero_values = [i for i in values if i != 0]
if non_zero_values:
return min(non_zero_values)
return 0
class Transcoder(object):
# name of the share defined in virtualbox that will contain input/output video
VBOX_SHARE_NAME = 'transcoder'
# path to mount the virtual box share
TRANSCODER_ROOT = "/media/transcoder"
# directory containing new video to transcode
INPUT_DIRECTORY = TRANSCODER_ROOT + '/input'
# directory where handbrake will save the output to. this is a temporary
# location and the file is moved to OUTPUT_DIRECTORY after complete
WORK_DIRECTORY = TRANSCODER_ROOT + '/work'
# directory containing the original inputs after they've been transcoded
COMPLETED_DIRECTORY = TRANSCODER_ROOT + '/completed-originals'
# directory contained the compressed outputs
OUTPUT_DIRECTORY = TRANSCODER_ROOT + '/output'
# standard options for the transcode-video script
TRANSCODE_OPTIONS = '--mkv --slow --allow-dts --allow-ac3 --find-forced add --copy-all-ac3'
# number of seconds a file must remain unmodified in the INPUT_DIRECTORY
# before it is considered done copying. increase this value for more
# tolerance on bad network connections.
WRITE_THRESHOLD = 30
# path to logfile
LOGFILE = TRANSCODER_ROOT + '/transcoder.log'
def __init__(self):
self.running = False
self.logger = None
self.current_command = None
self._default_handlers = {}
def setup_signal_handlers(self):
"Setup graceful shutdown and cleanup when sent a signal"
def handler(signum, frame):
self.stop()
for sig in (signal.SIGTERM, signal.SIGHUP, signal.SIGINT):
self._default_handlers[sig] = signal.signal(sig, handler)
def restore_signal_handlers(self):
"Restore the default handlers"
for sig, handler in self._default_handlers.items():
signal.signal(sig, handler)
self._default_handlers = {}
def execute(self, command):
# TODO: use Popen and assign to current_command so we can terminate
args = shlex.split(command)
out = subprocess.check_output(args=args, stderr=subprocess.STDOUT)
return out
def mount_share(self):
"""
Mount the VBox share if it's not already mounted.
Returns True if mounted, otherwise False.
"""
out = self.execute('mount')
if '%s type vboxsf' % self.TRANSCODER_ROOT in out:
return True
# attempt to mount
uid, gid = os.getuid(), os.getgid()
command = 'sudo mount -t vboxsf -o uid=%s,gid=%s %s %s' % (
uid, gid, self.VBOX_SHARE_NAME, self.TRANSCODER_ROOT)
try:
self.execute(command)
except subprocess.CalledProcessError as ex:
msg = 'Unable to mount Virtual Box Share: %s' % ex.output
sys.stdout.write(msg)
sys.stdout.flush()
return False
return True
def setup_logging(self):
self.logger = logging.getLogger('transcoder')
self.logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(self.LOGFILE)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(message)s')
handler.setFormatter(formatter)
self.logger.addHandler(handler)
self.logger.info('Transcoder started and scanning for input')
def check_filesystem(self):
"Checks that the filesystem and logger is setup properly"
dirs = (self.INPUT_DIRECTORY, self.WORK_DIRECTORY,
self.OUTPUT_DIRECTORY, self.COMPLETED_DIRECTORY)
if not all(map(os.path.exists, dirs)):
if not self.mount_share():
return False
for path in dirs:
if not os.path.exists(path):
try:
os.mkdir(path)
except OSError as ex:
msg = 'Cannot create directory "%s": %s' % (
path, ex.strerror)
sys.stdout.write(msg)
sys.stdout.flush()
return False
if not self.logger:
self.setup_logging()
return True
def stop(self):
# guard against multiple signals being sent before the first one
# finishes
if not self.running:
return
self.running = False
self.logger.info('Transcoder shutting down')
if self.current_command:
self.current_command.terminate()
# logging
logging.shutdown()
self.logger = None
# signal handlers
self.restore_signal_handlers()
def run(self):
self.running = True
self.setup_signal_handlers()
while self.running:
if self.check_filesystem():
self.check_for_input()
time.sleep(5)
def check_for_input(self):
"Look in INPUT_DIRECTORY for an input file and process it"
for filename in os.listdir(self.INPUT_DIRECTORY):
if filename.startswith('.'):
continue
path = os.path.join(self.INPUT_DIRECTORY, filename)
if (time.time() - os.stat(path).st_mtime) > self.WRITE_THRESHOLD:
# when copying a file from windows to the VM, the filesize and
# last modified times don't change as data is written.
# fortunately these files seem to be locked such that
# attempting to open the file for reading raises an IOError.
# it seems reasonable to skip any file we can't open
try:
f = open(path, 'r')
f.close()
except IOError:
continue
self.process_input(path)
# move the source to the COMPLETED_DIRECTORY
dst = os.path.join(self.COMPLETED_DIRECTORY,
os.path.basename(path))
shutil.move(path, dst)
break
def process_input(self, path):
name = os.path.basename(path)
self.logger.info('Found new input "%s"', name)
# if any of the following functions return no output, something
# bad happened and we can't continue
# parse the input meta info.
meta = self.scan_media(path)
if not meta:
return
# determine crop dimensions
crop = self.detect_crop(path)
if not crop:
return
# transcode the video
work_path = self.transcode(path, crop, meta)
if not work_path:
| return
# move the completed output to the output directory
self. | logger.info('Moving completed work output %s to output directory',
os.path.basename(work_path))
output_path = os.path.join(self.OUTPUT_DIRECTORY,
os.path.basename(work_path))
shutil.move(work_path, output_path)
shutil.move(work_path + '.log', output_path + '.log')
def scan_media(self, path):
"Use handbrake to scan the media for metadata"
name = os.path.basename(path)
self.logger.info('Scanning "%s" for metadata', name)
command = 'HandBrakeCLI --scan --input "%s"' % path
try:
out = self.execute(command)
except subprocess.CalledProcessError as ex:
if 'unrecognized file type' in ex.output:
self.logger.info('Unknown media type for input "%s"', name)
else:
self.logger.info('Unknown error for input "%s" with error: %s',
name, ex.output)
return None
# process out
ret |
dpattiso/igraph | lama/translate/translate_old.py | Python | gpl-2.0 | 31,909 | 0.007083 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import with_statement
from collections import defaultdict
from copy import deepcopy
import axiom_rules
import fact_groups
import instantiate
import pddl
import sas_tasks
import simplify
import timers
# TODO: The translator may generate trivial derived variables which are always true,
# for example if there ia a derived predicate in the input that only depends on
# (non-derived) variables which are detected as always true.
# Such a situation was encountered in the PSR-STRIPS-DerivedPredicates domain.
# Such "always-true" variables should best be compiled away, but it is
# not clear what the best place to do this should be. Similar
# simplifications might be possible elsewhere, for example if a
# derived variable is synonymous with another variable (derived or
# non-derived).
ALLOW_CONFLICTING_EFFECTS = True
USE_PARTIAL_ENCODING = True
DETECT_UNREACHABLE = True
## Setting the following variable to True can cause a severe
## performance penalty du | e to weaker relevance analysis (see issue7).
ADD_IMPLIED_PRECONDITIONS = False
removed_implied_effect_counter = 0
simplified_effect_condition_counter = 0
added_implied_precondition_counter = 0
def strips_to_sas_dictionary(groups, assert_partial):
dictionary = {}
for var_no, group in enumerate(groups):
for val_no, atom in enumerate(group):
dictionary.setdefault(atom, []).append((var_no, val_no))
if assert_partial:
assert all(len(sas_pairs) == 1
| for sas_pairs in dictionary.itervalues())
return [len(group) + 1 for group in groups], dictionary
def translate_strips_conditions_aux(conditions, dictionary, ranges):
condition = {}
for fact in conditions:
if fact.negated:
# we handle negative conditions later, because then we
# can recognize when the negative condition is already
# ensured by a positive condition
continue
for var, val in dictionary.get(fact, ()):
# The default () here is a bit of a hack. For goals (but
# only for goals!), we can get static facts here. They
# cannot be statically false (that would have been
# detected earlier), and hence they are statically true
# and don't need to be translated.
# TODO: This would not be necessary if we dealt with goals
# in the same way we deal with operator preconditions etc.,
# where static facts disappear during grounding. So change
# this when the goal code is refactored (also below). (**)
if (condition.get(var) is not None and
val not in condition.get(var)):
# Conflicting conditions on this variable: Operator invalid.
return None
condition[var] = set([val])
for fact in conditions:
if fact.negated:
## Note Here we use a different solution than in Sec. 10.6.4
## of the thesis. Compare the last sentences of the third
## paragraph of the section.
## We could do what is written there. As a test case,
## consider Airport ADL tasks with only one airport, where
## (occupied ?x) variables are encoded in a single variable,
## and conditions like (not (occupied ?x)) do occur in
## preconditions.
## However, here we avoid introducing new derived predicates
## by treat the negative precondition as a disjunctive precondition
## and expanding it by "multiplying out" the possibilities.
## This can lead to an exponential blow-up so it would be nice
## to choose the behaviour as an option.
done = False
new_condition = {}
atom = pddl.Atom(fact.predicate, fact.args) # force positive
for var, val in dictionary.get(atom, ()):
# see comment (**) above
poss_vals = set(range(ranges[var]))
poss_vals.remove(val)
if condition.get(var) is None:
assert new_condition.get(var) is None
new_condition[var] = poss_vals
else:
# constrain existing condition on var
prev_possible_vals = condition.get(var)
done = True
prev_possible_vals.intersection_update(poss_vals)
if len(prev_possible_vals) == 0:
# Conflicting conditions on this variable:
# Operator invalid.
return None
if not done and len(new_condition) != 0:
# we did not enforce the negative condition by constraining
# an existing condition on one of the variables representing
# this atom. So we need to introduce a new condition:
# We can select any from new_condition and currently prefer the
# smalles one.
candidates = sorted(new_condition.items(),
lambda x,y: cmp(len(x[1]),len(y[1])))
var, vals = candidates[0]
condition[var] = vals
def multiply_out(condition): # destroys the input
sorted_conds = sorted(condition.items(),
lambda x,y: cmp(len(x[1]),len(y[1])))
flat_conds = [{}]
for var, vals in sorted_conds:
if len(vals) == 1:
for cond in flat_conds:
cond[var] = vals.pop() # destroys the input here
else:
new_conds = []
for cond in flat_conds:
for val in vals:
new_cond = deepcopy(cond)
new_cond[var] = val
new_conds.append(new_cond)
flat_conds = new_conds
return flat_conds
return multiply_out(condition)
def translate_strips_conditions(conditions, dictionary, ranges,
mutex_dict, mutex_ranges):
if not conditions:
return [{}] # Quick exit for common case.
# Check if the condition violates any mutexes.
if translate_strips_conditions_aux(
conditions, mutex_dict, mutex_ranges) is None:
return None
return translate_strips_conditions_aux(conditions, dictionary, ranges)
# 17/1/2012 -- Author- David Pattison -- This method is a copy of the normal translate_strips_conditions_aux
# but has had the check for conflicting conditions (in this case,
# goal conditions) removed.
def translate_strips_conditions_goal_aux(conditions, dictionary, ranges):
condition = {}
for fact in conditions:
if fact.negated:
# we handle negative conditions later, because then we
# can recognize when the negative condition is already
# ensured by a positive condition
continue
for var, val in dictionary.get(fact, ()):
# The default () here is a bit of a hack. For goals (but
# only for goals!), we can get static facts here. They
# cannot be statically false (that would have been
# detected earlier), and hence they are statically true
# and don't need to be translated.
# TODO: This would not be necessary if we dealt with goals
# in the same way we deal with operator preconditions etc.,
# where static facts disappear during grounding. So change
# this when the goal code is refactored (also below). (**)
# if (condition.get(var) is not None and
# val not in condition.get(var)):
# # Conflicting conditions on this variable: Operator invalid.
# return None
condition[var] = set([val])
for fact in conditions:
if fact.negated:
## Note Here we use a differ |
f4ble/Arkon | configs/tasks_default.py | Python | apache-2.0 | 707 | 0.007072 | from ark.tasks.task_check_for_update import Task_CheckForUpdates
from ark.tasks.task_list_players import Task_ListPlayers
from ark.tasks.task_get_chat import Task_Get | Chat
from ark.tasks.task_daily_restart import Task_DailyRestart
from ark.tasks.task_daily_restart import Task_DailyRestartRepopulate
from ark.tasks.task_sql_keep_alive import Task_SQL_keep_alive
def init():
#Part of Core Features:
Task_ListPlayers.run_interval(8,immediately=True)
Task_GetChat.run_interval(5,immediately=True)
Task_SQL_keep_alive.run_interval(60)
#Extras | :
Task_CheckForUpdates.run_interval(1800)
Task_DailyRestart.run_daily('15:00:00')
Task_DailyRestartRepopulate.run_daily('06:00:00')
|
asears/bloppit | gethot.py | Python | mit | 547 | 0.02925 | import sys
import praw
import unicodedata
user_agent='bloppit_app'
if len(sys.argv) == 2:
script, filename, subreddit = argv
else:
subreddit = "opensource"
filename = subreddit + ".txt"
r = praw.Reddit(user | _agent)
submissions = r.get_subreddit(subreddit).get_hot(limi | t=100)
target = open(filename, 'w')
for x in submissions:
line = (str(x.fullname) + ", " + str(x.title.encode("cp437","ignore"))[1:] + " , " + str(x.url.encode("cp437","ignore"))[1:].strip('"\''))
print(line)
target.write(line)
target.write("\n")
target.close()
|
JakeShulman/G-Neat | Connection.py | Python | apache-2.0 | 1,393 | 0.045226 | '''
Created on Feb 4, 2017
@author: Jake
'''
import numpy as np
import random
import Population
class Connection(object):
inNeuron = None
outNeuron = None
weight = None
enabled = None
ID = None
def __init__(self,ID,inNeuron,outNeuron,weight,enabled = True):
self.inNeuron = inNeuron
self.outNeuron = outNeuron
self.weight = weight
self.enabled = enabled
self.ID = ID
if(self.inNeuron.neuronType=='hidden'):
self.inNeuron.neuronID=self.ID
if(self.outNeuron.neuronType=='hidden'):
self.outNeuron.neuronID=self.ID
#Hi future luke and Jake Think about making connection .equals function for convergent evolution
def mutateWeight(self):
#generates a random bool efficiently bitwise
if random.getrandbits(1): self.weight = self.weight + Population.MUTATION_VALUE
else: self.weight = se | lf.weight - Population.MUTATION_VALUE
def mutateEnable(self):
#switch the | enabled status of the connection
#WORRY ABOUT LATER CAN CAUSE BACK PROP PROBLEMS
self.enabled = not self.enabled
def copy(self):
return Connection(self.ID, self.inNeuron, self.outNeuron, self.weight)
def __eq__(self, other):
# if self.inNeuron != other.inNeuron: return False
# if self.outNeuron != other.outNeuron: return False
# if self.enabled != other.enabled: return False
print "SAME"
return True |
sassoftware/mirrorball | updatebot/artifactory.py | Python | apache-2.0 | 11,601 | 0.000603 | #
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/ | licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License | for the specific language governing permissions and
# limitations under the License.
#
"""
Module for finding artifactory packages and updating them
"""
from collections import deque
import logging
import time
from conary import conarycfg
from rmake.build import buildcfg
from rmake.cmdline import helper
from . import cmdline
from . import pkgsource
from .bot import Bot as BotSuperClass
from .build import Builder
from .errors import JobFailedError
from .lib import util
from .update import Updater as UpdaterSuperClass
log = logging.getLogger('updatebot.artifactory')
class Bot(BotSuperClass):
_updateMode = 'artifactory'
def __init__(self, cfg):
self._validateMode(cfg)
self._cfg = cfg
self._clientcfg = cmdline.UpdateBotClientConfig()
self._ui = cmdline.UserInterface(self._clientcfg)
self._pkgSource = pkgsource.PackageSource(self._cfg, self._ui)
self._updater = Updater(self._cfg, self._ui, self._pkgSource)
def create(self, rebuild=False, recreate=None):
"""
Do initial imports.
:param bool rebuild: build all packages, even if source is the same
:param bool recreate: recreate all source packages
"""
start = time.time()
log.info('starting import')
# Populate rpm source object from yum metadata.
self._pkgSource.load()
# Import sources into repository.
trvMap, fail = self._updater.create(buildAll=rebuild, recreate=recreate)
if fail:
log.error('failed to create %s packages:' % len(fail))
for pkg, e in fail:
log.error('failed to import %s: %s' % (pkg, e))
return {}, fail
log.info('elapsed time %s' % (time.time() - start, ))
return trvMap, fail
class Updater(UpdaterSuperClass):
"""Class for finding and updating packages sourced from artifactory
"""
def _buildLeaf(self, leaf, cache, buildAll=False, recreate=False):
failedImports = set()
srcVersion = cache.get((
'%s:source' % leaf.name,
leaf.getConaryVersion(),
None,
))
binVersion = cache.get((
leaf.name,
leaf.getConaryVersion(),
None,
))
# determine if leaf needs to be imported, and update srcVersion
srcVersion = self._importPackage(leaf, srcVersion, recreate)
# if buildAll is true, or there is no existing binary or the
# binary was built from a different source, then build leaf
if (buildAll or not binVersion
or binVersion.getSourceVersion() != srcVersion):
return srcVersion, True
else:
log.info('not building %s', leaf)
return srcVersion, False
def _build(self, buildSet, buildReqs, cache):
"""Helper function to do some repetivite pre-build processing
:param buildSet: list of name, version, flavor tuples and packages to
build
:type buildSet: [((name, version, flavor), package), ...]
:param dict cache: conary version cache
"""
# unpack buildSet into nvf tuples
nvfs = []
buildPackages = []
resolveTroves = set()
for package, version in buildSet:
nvfs.append((package.name, version, None))
buildPackages.append(package)
# get our base rmakeCfg
rmakeCfg = Builder(self._cfg, self._ui)._getRmakeConfig()
# create resolve troves for deps not in the current chunk
resolveTroves.update(set([
(dep.name, rmakeCfg.buildLabel, dep.getConaryVersion())
for dep in buildReqs
if dep not in buildPackages
]))
if resolveTroves:
rmakeCfg.configKey(
'resolveTroves',
' '.join('%s=%s/%s' % r for r in resolveTroves),
)
# make a new buidler with rmakeCfg to do the actual build
builder = Builder(self._cfg, self._ui, rmakeCfg=rmakeCfg)
# Build all newly imported packages.
tries = 0
while True:
try:
log.debug("Building: \n%s", "\n".join(str(nvf) for nvf in nvfs))
log.debug("Resolve troves: \n%s",
"\n".join("%s=%s/%s" % r for r in resolveTroves))
trvMap = builder.build(nvfs)
except JobFailedError, e:
# Commit partial job
log.info('committing partial job %s', e.jobId)
trvMap = builder._commitJob(e.jobId)
break
if tries > 1:
raise
tries += 1
log.info('attempting to retry build: %s of %s', tries, 2)
else:
break
return trvMap
def _createVerCache(self, troveList):
verCache = {}
for k, v in self._conaryhelper.findTroves(
troveList,
allowMissing=True,
cache=False,
).iteritems():
if len(v) > 1:
# something weird happened
import epdb; epdb.st() # XXX breakpoint
verCache[k] = v[0][1] # v is a list of a tuple (name, ver, flav)
return verCache
def _importPackage(self, p, version, recreate):
"""Import source package
If the package is new, or `recreate` is True, then check if the
source needs to be updated.
:param PomPackage p: package to import
:param version: conary version of existing source
:type version: conary version object or None
:param bool recreate: re-import the package if True
:returns: the conary source version to build
:rtype: conary version object
"""
if not version or recreate:
log.info("attempting to import %s", p)
manifest = dict(
manifest_version="1",
version=p.getConaryVersion(),
build_requires=p.buildRequires,
artifacts=p.artifacts,
)
if version and recreate:
origManifest = self._conaryhelper.getJsonManifest(p.name, version)
if manifest == origManifest:
return version
self._conaryhelper.setJsonManifest(p.name, manifest)
version = self._conaryhelper.commit(
p.name, commitMessage=self._cfg.commitMessage)
else:
log.info("not importing %s", p)
return version
def create(self, buildAll=False, recreate=False):
"""Import new packages into the repository
By default, this will only imort and build completely new packages. Set
`buildAll` to True if you want to buid all packages, even ones whose
source trove did not changes. Set `recreate` True if you want to check
if existing sources changed, and import them if they have.
:param buildAll: build all binary packages, even if their source didn't
change, defaults to False
:type buildAll: bool
:param recreate: commit changed source packages when True, else only
commit new sources
:type recreate: bool
:returns: a list of buildable chunks (sets of packages that can be built
together)
:rtype: [set([((name, version, flavor), pkg), ...]), ...]
"""
# generate a list of trove specs for the packages in the queue so
# we can populate a cache of existing conary versions
troveList = []
for p in self._pkgSource.pkgQueu |
marinkaz/orange3 | Orange/widgets/utils/__init__.py | Python | bsd-2-clause | 811 | 0.004932 | from functools import reduce
def vartype(var):
| if var.is_discrete:
return 1
elif var.is_con | tinuous:
return 2
elif var.is_string:
return 3
else:
return 0
def progress_bar_milestones(count, iterations=100):
return set([int(i*count/float(iterations)) for i in range(iterations)])
def getdeepattr(obj, attr, *arg, **kwarg):
if isinstance(obj, dict):
return obj.get(attr)
try:
return reduce(getattr, attr.split("."), obj)
except AttributeError:
if arg:
return arg[0]
if kwarg:
return kwarg["default"]
raise
def getHtmlCompatibleString(strVal):
return strVal.replace("<=", "≤").replace(">=","≥").replace("<", "<").replace(">",">").replace("=\\=", "≠")
|
ygol/odoo | addons/website_event_track_online/models/event_type.py | Python | agpl-3.0 | 551 | 0 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class EventType(models.Model):
_inherit = "event.type"
community_menu = fields.Boolean(
"Community Menu", compute= | "_compute_community_menu",
readonly=False, store=True,
help="Display community tab on website")
@api.depends('website_menu')
def _compute_community_menu(self):
for event_type in self:
event_type.community_menu = event_type.web | site_menu
|
StellarCN/py-stellar-base | stellar_sdk/xdr/ledger_close_meta_v0.py | Python | apache-2.0 | 5,160 | 0.000581 | # This is an automatically generated file.
# DO NOT EDIT or your changes may be overwritten
import base64
from typing import List
from xdrlib import Packer, Unpacker
from ..type_checked import type_checked
from .ledger_header_history_entry import LedgerHeaderHistoryEntry
from .scp_history_entry import SCPHistoryEntry
from .transaction_result_meta import TransactionResultMeta
from .transaction_set import TransactionSet
from .upgrade_entry_meta import UpgradeEntryMeta
__all__ = ["LedgerCloseMetaV0"]
@type_checked
class LedgerCloseMetaV0:
"""
XDR Source Code::
struct LedgerCloseMetaV0
{
| LedgerHeaderHistoryEntry ledgerHeader;
// NB: txSet is sorted in "Hash order"
TransactionSet txSet;
// NB: transactions are sorted in apply order here
// fees for all transactions are processed first
// followed by applying transactions
TransactionResultMeta txProcessing<>;
// upgrades are applied last
UpgradeEntryMeta upgradesProcessing<>;
// other misc | information attached to the ledger close
SCPHistoryEntry scpInfo<>;
};
"""
def __init__(
self,
ledger_header: LedgerHeaderHistoryEntry,
tx_set: TransactionSet,
tx_processing: List[TransactionResultMeta],
upgrades_processing: List[UpgradeEntryMeta],
scp_info: List[SCPHistoryEntry],
) -> None:
if tx_processing and len(tx_processing) > 4294967295:
raise ValueError(
f"The maximum length of `tx_processing` should be 4294967295, but got {len(tx_processing)}."
)
if upgrades_processing and len(upgrades_processing) > 4294967295:
raise ValueError(
f"The maximum length of `upgrades_processing` should be 4294967295, but got {len(upgrades_processing)}."
)
if scp_info and len(scp_info) > 4294967295:
raise ValueError(
f"The maximum length of `scp_info` should be 4294967295, but got {len(scp_info)}."
)
self.ledger_header = ledger_header
self.tx_set = tx_set
self.tx_processing = tx_processing
self.upgrades_processing = upgrades_processing
self.scp_info = scp_info
def pack(self, packer: Packer) -> None:
self.ledger_header.pack(packer)
self.tx_set.pack(packer)
packer.pack_uint(len(self.tx_processing))
for tx_processing_item in self.tx_processing:
tx_processing_item.pack(packer)
packer.pack_uint(len(self.upgrades_processing))
for upgrades_processing_item in self.upgrades_processing:
upgrades_processing_item.pack(packer)
packer.pack_uint(len(self.scp_info))
for scp_info_item in self.scp_info:
scp_info_item.pack(packer)
@classmethod
def unpack(cls, unpacker: Unpacker) -> "LedgerCloseMetaV0":
ledger_header = LedgerHeaderHistoryEntry.unpack(unpacker)
tx_set = TransactionSet.unpack(unpacker)
length = unpacker.unpack_uint()
tx_processing = []
for _ in range(length):
tx_processing.append(TransactionResultMeta.unpack(unpacker))
length = unpacker.unpack_uint()
upgrades_processing = []
for _ in range(length):
upgrades_processing.append(UpgradeEntryMeta.unpack(unpacker))
length = unpacker.unpack_uint()
scp_info = []
for _ in range(length):
scp_info.append(SCPHistoryEntry.unpack(unpacker))
return cls(
ledger_header=ledger_header,
tx_set=tx_set,
tx_processing=tx_processing,
upgrades_processing=upgrades_processing,
scp_info=scp_info,
)
def to_xdr_bytes(self) -> bytes:
packer = Packer()
self.pack(packer)
return packer.get_buffer()
@classmethod
def from_xdr_bytes(cls, xdr: bytes) -> "LedgerCloseMetaV0":
unpacker = Unpacker(xdr)
return cls.unpack(unpacker)
def to_xdr(self) -> str:
xdr_bytes = self.to_xdr_bytes()
return base64.b64encode(xdr_bytes).decode()
@classmethod
def from_xdr(cls, xdr: str) -> "LedgerCloseMetaV0":
xdr_bytes = base64.b64decode(xdr.encode())
return cls.from_xdr_bytes(xdr_bytes)
def __eq__(self, other: object):
if not isinstance(other, self.__class__):
return NotImplemented
return (
self.ledger_header == other.ledger_header
and self.tx_set == other.tx_set
and self.tx_processing == other.tx_processing
and self.upgrades_processing == other.upgrades_processing
and self.scp_info == other.scp_info
)
def __str__(self):
out = [
f"ledger_header={self.ledger_header}",
f"tx_set={self.tx_set}",
f"tx_processing={self.tx_processing}",
f"upgrades_processing={self.upgrades_processing}",
f"scp_info={self.scp_info}",
]
return f"<LedgerCloseMetaV0 {[', '.join(out)]}>"
|
jirikuncar/invenio-ext | tests/test_ext_registry.py | Python | gpl-2.0 | 2,613 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTI | CULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc., |
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Test unit for the miscutil/mailutils module.
"""
from invenio_ext.registry import DictModuleAutoDiscoverySubRegistry
from invenio.testsuite import InvenioTestCase, make_test_suite, run_test_suite
from flask_registry import ImportPathRegistry, RegistryError
class TestDictModuleAutoDiscoverySubRegistry(InvenioTestCase):
def test_registration(self):
r = self.app.extensions['registry']
r['testpkgs'] = ImportPathRegistry(
initial=['invenio.testsuite.test_apps']
)
assert len(r['testpkgs']) == 1
r['myns'] = \
DictModuleAutoDiscoverySubRegistry(
'last',
keygetter=lambda k, v, new_v: k if k else v.__name__,
app=self.app,
registry_namespace='testpkgs'
)
assert len(r['myns']) == 1
from invenio.testsuite.test_apps.last import views
assert r['myns']['invenio.testsuite.test_apps.last.views'] == \
views
self.assertRaises(
RegistryError,
DictModuleAutoDiscoverySubRegistry,
'last',
app=self.app,
registry_namespace='testpkgs'
)
# Register simple object
class TestObject(object):
pass
r['myns'].register(TestObject)
# Identical keys raises RegistryError
self.assertRaises(
RegistryError,
r['myns'].register,
TestObject
)
r['myns'].unregister('TestObject')
assert 'TestObject' not in r['myns']
r['myns']['mykey'] = TestObject
assert TestObject == r['myns']['mykey']
assert len(r['myns'].items()) == 2
TEST_SUITE = make_test_suite(TestDictModuleAutoDiscoverySubRegistry)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
|
CIGNo-project/CIGNo | cigno/mdtools/forms.py | Python | gpl-3.0 | 173 | 0.00578 | from models import Connection
from django import forms |
class ConnectionForm(forms.ModelForm):
class Meta:
model = Con | nection
exclude = ('d_object_id',)
|
e0ne/cinder | cinder/api/contrib/volume_transfer.py | Python | apache-2.0 | 8,571 | 0 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from webob import exc
from cinder.api import common
from cinder.api import extensions
from cinder.api.openstack import wsgi
from cinder.api.views import transfers as transfer_view
from cinder.api import xmlutil
from cinder import exception
from cinder.i18n import _
from cinder.openstack.common import log as logging
from cinder import transfer as transferAPI
from cinder import utils
LOG = logging.getLogger(__name__)
def make_transfer(elem):
elem.set('id')
elem.set('volume_id')
elem.set('created_at')
elem.set('name')
elem.set('auth_key')
class TransferTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('transfer', selector='transfer')
make_transfer(root)
alias = Volume_transfer.alias
namespace = Volume_transfer.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class TransfersTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('transfers')
elem = xmlutil.SubTemplateElement(root, 'transfer',
selector='transfers')
make_transfer(elem)
alias = Volume_transfer.alias
namespace = Volume_transfer.namespace
return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace})
class CreateDeserializer(wsgi.MetadataXMLDeserializer):
def default(self, string):
dom = utils.safe_minidom_parse_string(string)
transfer = self._extract_transfer(dom)
return {'body': {'transfer': transfer}}
def _extract_transfer(self, node):
transfer = {}
transfer_node = self.find_first_child_named(node, 'transfer')
attributes = ['volume_id', 'name']
for attr in attributes:
if transfer_node.getAttribute(attr):
transfer[attr] = transfer_node.getAttribute(attr)
return transfer
class AcceptDeserializer(wsgi.MetadataXMLDeserializer):
def default(self, string):
dom = utils.safe_minidom_parse_string(string)
transfer = self._extract_transfer(dom)
return {'body': {'accept': transfer}}
def _extract_transfer(self, node):
transfer = {}
transfer_node = self.find_first_child_named(node, 'accept')
attributes = ['auth_key']
for attr in attributes:
if transfer_node.getAttribute(attr):
transfer[attr] = transfer_node.getAttribute(attr)
return transfer
class VolumeTransferController(wsgi.Controller):
"""The Volume Transfer API controller for the OpenStack API."""
_view_builder_class = transfer_view.ViewBuilder
def __init__(self):
self.transfer_api = transferAPI.API()
super(VolumeTransferController, self).__init__()
@wsgi.serializers(xml=TransferTemplate)
def show(self, req, id):
"""Return data about active transfers."""
context = req.environ['cinder.context']
try:
transfer = self.transfer_api.get(context, transfer_id=id)
except exception.TransferNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
return self._view_builder.detail(req, transfer)
@wsgi.serializers(xml=TransfersTemplate)
def index(self, req):
"""Returns a summary list of transfers."""
return self._get_transfers(req, is_detail=False)
@wsgi.serializers(xml=TransfersTemplate)
def detail(self, req):
"""Returns a detailed list of transfers."""
return self._get_transfers(req, is_detail=True)
def _get_transfers(self, req, is_detail):
"""Returns a list of transfers, transformed through view builder."""
context = req.environ['cinder.context']
LOG.debug('Listing volume transfers')
transfers = self.transfer_api.get_all(context)
limited_list = common.limited(transfers, req)
if is_detail:
transfers = self._view_builder.detail_list(req, limited_list)
else:
transfers = self._view_builder.summary_list(req, limited_list)
return transfers
@wsgi.response(202)
@wsgi.serializers(xml=TransferTemplate)
@wsgi.deserializers(xml=CreateDeserializer)
def create(self, req, body):
"""Create a new volume transfer."""
LOG.debug('Creating new volume transfer %s', body)
if not self.is_valid_body(body, 'transfer'):
raise exc.HTTPBadRequest()
context = req.environ['cinder.context']
try:
transfer = body['transfer']
volume_id = transfer['volume_id']
except KeyError:
msg = _("Incorrect request body format")
raise exc.HTTPBadRequest(explanation=msg)
name = transfer.get('name', None)
LOG.info(_("Creating transfer of volume %s"),
volume_id,
context=context)
try:
new_transfer = self.transfer_api.create(context, volume_id, name)
except exception.InvalidVolume as error:
raise exc.HTTPBadRequest(explanation=error.msg)
except exception.VolumeNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
transfer = self._view_builder.create(req,
dict(new_transfer.iteritems()))
return transfer
@wsgi.response(202)
@wsgi.serializers(xml=TransferTemplate)
@wsgi.deserializers(xml=AcceptDeserializer)
def accept(self, req, id, body):
"""Accept a new volume transfer."""
transfer_id = id
LOG.debug('Accepting volume transfer %s', transfer_id)
if not self.is_valid_body(body, 'accept'):
raise exc.HTTPBadRequest()
context = req.environ['cinder.context' | ]
| try:
accept = body['accept']
auth_key = accept['auth_key']
except KeyError:
msg = _("Incorrect request body format")
raise exc.HTTPBadRequest(explanation=msg)
LOG.info(_("Accepting transfer %s"), transfer_id,
context=context)
try:
accepted_transfer = self.transfer_api.accept(context, transfer_id,
auth_key)
except exception.VolumeSizeExceedsAvailableQuota as error:
raise exc.HTTPRequestEntityTooLarge(
explanation=error.msg, headers={'Retry-After': 0})
except exception.InvalidVolume as error:
raise exc.HTTPBadRequest(explanation=error.msg)
transfer = \
self._view_builder.summary(req,
dict(accepted_transfer.iteritems()))
return transfer
def delete(self, req, id):
"""Delete a transfer."""
context = req.environ['cinder.context']
LOG.info(_("Delete transfer with id: %s"), id, context=context)
try:
self.transfer_api.delete(context, transfer_id=id)
except exception.TransferNotFound as error:
raise exc.HTTPNotFound(explanation=error.msg)
return webob.Response(status_int=202)
class Volume_transfer(extensions.ExtensionDescriptor):
"""Volume transfer management support."""
name = "VolumeTransfer"
alias = "os-volume-transfer"
namespace = "http://docs.openstack.org/volume/ext/volume-transfer/" + \
"api/v1.1"
updated = "2013-05-29T00:00:00+00:00"
def get_resources(self):
resources = []
res = extension |
Sinar/popit_ng | popit/tests/test_person_misc_api.py | Python | agpl-3.0 | 32,087 | 0.002961 | __author__ = 'sweemeng'
from rest_framework import status
from popit.signals.handlers import *
from popit.models import *
from popit.tests.base_testcase import BasePopitAPITestCase
class PersonLinkAPITestCase(BasePopitAPITestCase):
def test_view_person_link_list_unauthorized(self):
| response = self.client.get("/en/persons/ab1a5788e5bae95 | 5c048748fa6af0e97/links/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_link_list_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_link_details_unauthorized(self):
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_link_details_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_link_details_not_exist_unauthorized(self):
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_view_person_link_details_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_create_person_links_unauthorized(self):
data = {
"url": "http://twitter.com/sweemeng",
}
response = self.client.post("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/", data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_person_links_authorized(self):
data = {
"url": "http://twitter.com/sweemeng",
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/", data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
url = person_.links.language("en").get(url="http://twitter.com/sweemeng")
self.assertEqual(url.url, "http://twitter.com/sweemeng")
def test_update_person_links_unauthorized(self):
data = {
"note": "just a random repo"
}
response = self.client.put(
"/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_links_not_exist_unauthorized(self):
data = {
"note": "just a random repo"
}
response = self.client.put(
"/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_links_authorized(self):
data = {
"note": "just a random repo"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/",
data
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
url = person.links.language("en").get(id="a4ffa24a9ef3cbcb8cfaa178c9329367")
self.assertEqual(url.note, "just a random repo")
def test_update_person_links_not_exist_authorized(self):
data = {
"note": "just a random repo"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_person_links_unauthorized(self):
response = self.client.delete("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/")
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_links_not_exist_unauthorized(self):
response = self.client.delete("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/")
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_links_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/")
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_delete_person_links_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class PersonOtherNameAPITestCase(BasePopitAPITestCase):
def test_view_person_othername_list_unauthorized(self):
response = self.client.get("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_othername_list_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_othername_details_unauthorized(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_othername_details_not_exist_unauthorized(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_view_person_othername_details_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_othername_details_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP |
ParrotPrediction/pyalcs | lcs/strategies/anticipatory_learning_process.py | Python | mit | 1,363 | 0 | from lcs.strategies.subsumption import does_subsume
def add_classifier(child, population, new_list, theta_exp: int) -> None:
"" | "
Looks for subsuming / similar classifiers in the population of classifiers
and those created in the current ALP | run (`new_list`).
If a similar classifier was found it's quality is increased,
otherwise `child_cl` is added to `new_list`.
Parameters
----------
child:
New classifier to examine
population:
list of classifiers
new_list:
A list of newly created classifiers in this ALP run
theta_exp: int
experience threshold for subsumption
"""
# TODO: p0: write tests
old_cl = None
# Look if there is a classifier that subsumes the insertion candidate
for cl in population:
if does_subsume(cl, child, theta_exp):
if old_cl is None or cl.is_more_general(old_cl):
old_cl = cl
# Check if any similar classifier was in this ALP run
if old_cl is None:
for cl in new_list:
if cl == child:
old_cl = cl
# Check if there is similar classifier already
if old_cl is None:
for cl in population:
if cl == child:
old_cl = cl
if old_cl is None:
new_list.append(child)
else:
old_cl.increase_quality()
|
shastah/spacewalk | spacewalk/certs-tools/rhn_bootstrap_strings.py | Python | gpl-2.0 | 16,095 | 0.001864 | #
# Copyright (c) 2008--2020 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
# shell script function library for rhn-bootstrap
#
import os.path
_header = """\
#!/bin/bash
echo "%s Client bootstrap script v4.0"
# This file was autogenerated. Minor manual editing of this script (and
# possibly the client-config-overrides.txt file) may be necessary to complete
# the bootstrap setup. Once customized, the bootstrap script can be triggered
# in one of two ways (the first is preferred):
#
# (1) centrally, from the %s via ssh (i.e., from the
# %s):
# cd /var/www/html/pub/bootstrap/
# cat bootstrap-<edited_name>.sh | ssh root@<client-hostname> /bin/bash
#
# ...or...
#
# (2) in a decentralized manner, executed on each client, via wget or curl:
# wget -qO- https://<hostname>/pub/bootstrap/bootstrap-<edited_name>.sh | /bin/bash
# ...or...
# curl -Sks https://<hostname>/pub/bootstrap/bootstrap-<edited_name>.sh | /bin/bash
# SECURITY NOTE:
# Use of these scripts via the two methods discussed is the most expedient
# way to register machines to your %s. Since "wget" is used
# throughout the script to download various files, a "Man-in-the-middle"
# attack is theoretically possible.
#
# The actual registration process is performed securely via SSL, so the risk
# is minimized in a sense. This message merely serves as a warning.
# Administrators need to appropriately weigh their concern against the
# relative security of their internal network.
# PROVISIONING/KICKSTART NOTE:
# If provisioning a client, ensure the proper CA SSL public | certificate is
# configured properly in the post section of your kickstart profiles (the
# Red Hat Satellite or hosted web user interface).
# UP2DATE/RHN_REGISTER VERSIONING NOTE:
# This script will not work with very old versions of up2date and |
# rhn_register.
echo
echo
echo "MINOR MANUAL EDITING OF THIS FILE MAY BE REQUIRED!"
echo
echo "If this bootstrap script was created during the initial installation"
echo "of a Red Hat Satellite, the ACTIVATION_KEYS, and ORG_GPG_KEY values will"
echo "probably *not* be set (see below). If this is the case, please do the"
echo "following:"
echo " - copy this file to a name specific to its use."
echo " (e.g., to bootstrap-SOME_NAME.sh - like bootstrap-web-servers.sh.)"
echo " - on the website create an activation key or keys for the system(s) to"
echo " be registered."
echo " - edit the values of the VARIABLES below (in this script) as"
echo " appropriate:"
echo " - ACTIVATION_KEYS needs to reflect the activation key(s) value(s)"
echo " from the website. XKEY or XKEY,YKEY"
echo " - ORG_GPG_KEY needs to be set to the name(s) of the corporate public"
echo " GPG key filename(s) (residing in /var/www/html/pub) if appropriate. XKEY or XKEY,YKEY"
echo
echo "Verify that the script variable settings are correct:"
echo " - CLIENT_OVERRIDES should be only set differently if a customized"
echo " client-config-overrides-VER.txt file was created with a different"
echo " name."
echo " - ensure the value of HOSTNAME is correct."
echo " - ensure the value of ORG_CA_CERT is correct."
echo
echo "Enable this script: comment (with #'s) this block (or, at least just"
echo "the exit below)"
echo
%s
# can be edited, but probably correct (unless created during initial install):
# NOTE: ACTIVATION_KEYS *must* be used to bootstrap a client machine.
ACTIVATION_KEYS=%s
ORG_GPG_KEY=%s
# can be edited, but probably correct:
CLIENT_OVERRIDES=%s
HOSTNAME=%s
ORG_CA_CERT=%s
ORG_CA_CERT_IS_RPM_YN=%s
USING_SSL=%s
USING_GPG=%s
REGISTER_THIS_BOX=1
ALLOW_CONFIG_ACTIONS=%s
ALLOW_REMOTE_COMMANDS=%s
FULLY_UPDATE_THIS_BOX=%s
# Set if you want to specify profilename for client systems.
# NOTE: Make sure it's set correctly if any external command is used.
#
# ex. PROFILENAME="foo.example.com" # For specific client system
# PROFILENAME=`hostname -s` # Short hostname
# PROFILENAME=`hostname -f` # FQDN
PROFILENAME="" # Empty by default to let it be set automatically.
#
# -----------------------------------------------------------------------------
# DO NOT EDIT BEYOND THIS POINT -----------------------------------------------
# -----------------------------------------------------------------------------
#
# an idea from Erich Morisse (of Red Hat).
# use either wget *or* curl
# Also check to see if the version on the
# machine supports the insecure mode and format
# command accordingly.
if [ -x /usr/bin/wget ] ; then
output=`LANG=en_US /usr/bin/wget --no-check-certificate 2>&1`
error=`echo $output | grep "unrecognized option"`
if [ -z "$error" ] ; then
FETCH="/usr/bin/wget -q -r -nd --no-check-certificate"
else
FETCH="/usr/bin/wget -q -r -nd"
fi
else
if [ -x /usr/bin/curl ] ; then
output=`LANG=en_US /usr/bin/curl -k 2>&1`
error=`echo $output | grep "is unknown"`
if [ -z "$error" ] ; then
FETCH="/usr/bin/curl -SksO"
else
FETCH="/usr/bin/curl -SsO"
fi
fi
fi
HTTP_PUB_DIRECTORY=http://${HOSTNAME}/%s
HTTPS_PUB_DIRECTORY=https://${HOSTNAME}/%s
if [ $USING_SSL -eq 0 ] ; then
HTTPS_PUB_DIRECTORY=${HTTP_PUB_DIRECTORY}
fi
INSTALLER=up2date
if [ -x /usr/bin/zypper ] ; then
INSTALLER=zypper
elif [ -x /usr/bin/yum ] ; then
INSTALLER=yum
fi
"""
def getHeader(productName, activation_keys, org_gpg_key,
overrides, hostname, orgCACert, isRpmYN,
using_ssl, using_gpg,
allow_config_actions, allow_remote_commands, up2dateYN, pubname):
# 11/22/16 options.gpg_key is now a comma-separated list of path.
# Removing paths from options.gpg_key
org_gpg_key = ",".join([os.path.basename(gpg_key) for gpg_key in org_gpg_key.split(",")])
if not activation_keys:
exit_call = "exit 1"
else:
exit_call = " "
return _header % (productName, productName, productName, productName,
exit_call, activation_keys, org_gpg_key,
overrides, hostname, orgCACert, isRpmYN,
using_ssl, using_gpg,
allow_config_actions, allow_remote_commands, up2dateYN,
pubname, pubname)
def getConfigFilesSh():
return """\
echo
echo "UPDATING RHN_REGISTER/UP2DATE CONFIGURATION FILES"
echo "-------------------------------------------------"
echo "* downloading necessary files"
echo " client_config_update.py..."
rm -f client_config_update.py
$FETCH ${HTTPS_PUB_DIRECTORY}/bootstrap/client_config_update.py
echo " ${CLIENT_OVERRIDES}..."
rm -f ${CLIENT_OVERRIDES}
$FETCH ${HTTPS_PUB_DIRECTORY}/bootstrap/${CLIENT_OVERRIDES}
if [ ! -f "client_config_update.py" ] ; then
echo "ERROR: client_config_update.py was not downloaded"
exit 1
fi
if [ ! -f "${CLIENT_OVERRIDES}" ] ; then
echo "ERROR: ${CLIENT_OVERRIDES} was not downloaded"
exit 1
fi
"""
def getUp2dateScriptsSh():
return """\
for cmd in /usr/bin/python /usr/bin/python2 /usr/libexec/platform-python /usr/bin/python3
do
if [ -x $cmd ] ; then
PYTHON=$cmd
break
fi
done
echo "* running the update scripts"
if [ -f "/etc/sysconfig/rhn/rhn_register" ] ; then
echo " . rhn_register config file"
$PYTHON -u client_config_update.py /etc/sysconfig/rhn/rhn_register ${CLIENT_OVERRIDES}
fi
echo " . up2date config file"
$PYTHON -u client_config_update.py /etc/sysconfig/rhn/up2date ${CLIENT_OVERRIDES}
"""
def getGPGKeyImportSh():
return """\
if [ ! -z "$ORG_GPG_KEY" ] ; then
echo
echo "* impo |
tjcsl/cslbot | bot.py | Python | gpl-2.0 | 1,125 | 0.001778 | #!/usr/bin/env python3
# Copyright (C) 2013-2018 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Tris Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR P | URPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import sys
import warnings
from os.path import abspath, dirname
warnings.simplefilter('default')
sys.path.insert(0, dirname(abspath(__file__)))
from cslbot.helpers import co | re # noqa
if __name__ == '__main__':
core.init(dirname(abspath(__file__)))
|
horacioMartinez/dakara-client | tools/misc/traductor de indices y mapas/mapas/extra_data_generator/lluvia_extra_data_generator.py | Python | mit | 601 | 0.028286 | import json
import struct
import os
origen = open("./FK.ind", "rb")
destino = open("./extra_ | data","w")
origen.read(256 + 7) # saco header
cantidad_mapas = struct.unpack('<H', (origen.read(2)))[0] # l1
x = 1
while (x < cantidad_mapas +1):
# numero X = numero de mapa
lluvia = | struct.unpack('<B', (origen.read(1)))[0]
if (lluvia > 0):
lluvia = 1
destino.write(str(x)+"="+'"outdoor"'+":"+str(lluvia)+"\n")
x = x +1
#hay menos mapas aca que en la otra carpeta..
extra = 100
while (x < cantidad_mapas + 1 +extra):
destino.write(str(x)+"="+'"outdoor"'+":"+"0"+"\n")
x = x +1
destino.close() |
dimV36/webtests | run.py | Python | gpl-2.0 | 141 | 0.007092 | #!/u | sr/bin/env venv/bin/python
__author__ = 'dimv36'
from webtests | import app
if __name__ == "__main__":
app.debug = True
app.run() |
JensTimmerman/radical.pilot | src/radical/pilot/scheduler/interface.py | Python | mit | 2,025 | 0.014321 | #pylint: disable=C0301, C0103, W0212
"""
.. module:: radical.pilot.scheduler.Interface
:platform: Unix
:synopsis: The abstract interface class for all schedulers.
.. moduleauthor:: Ole Weidner <ole.weidner@rutgers.edu>
"""
__copyright__ = "Copyright 2013-2014, http://radical.rutgers.edu"
__license__ = "MIT"
from radical.pilot.utils.logger import logger
# -----------------------------------------------------------------------------
#
class Scheduler(object):
"""Scheduler provides an abstsract interface for all schedulers.
"""
# -------------------------------------------------------------------------
#
def __init__(self, manager, session):
"""Le constructeur.
"""
raise RuntimeError ('Not Implemented!')
# -------------------------------------------------------------------------
#
def pilot_added (self, pilot) :
"""Inform the scheduler about a new pilot"""
logger.warn ("scheduler %s does not implement 'pilot_added()'" % self.name)
# -------------------------------------------------------------------------
#
def pilot_removed (self, pilot) :
"""Inform the scheduler about a pilot removal"""
logger.warn ("scheduler %s does not implement 'pilot_removed()'" % self.name)
# | -------------------------------------------------------------------------
#
def schedule (self, units) :
"""Schedules one or more ComputeUnits"""
raise RuntimeError ("scheduler %s does not implement 'pilot_removed()'" % self.name)
# -------------------------------------------------------------------------
#
def unschedule (self, units) :
"""Unschedule one or more ComputeUnits"""
logger.warn | ("scheduler %s does not implement 'unschedule()'" % self.name)
# -------------------------------------------------------------------------
#
@property
def name(self):
"""The name of the scheduler"""
return self.__class__.__name__
|
mrquim/repository.mrquim | script.module.pycryptodome/lib/Crypto/Hash/SHA1.py | Python | gpl-2.0 | 3,063 | 0.003265 | # -*- coding: utf-8 -*-
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""SHA-1 c | ryptographic hash algorithm.
SHA-1_ produces the 160 bit digest of a message.
>>> from Crypto.Hash import SHA1
>>>
>>> h = SHA1.new()
>>> h.update(b'Hello')
>>> print h.hexdigest()
*SHA* stands for Se | cure Hash Algorithm.
This algorithm is not considered secure. Do not use it for new designs.
.. _SHA-1: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf
"""
__all__ = ['new', 'block_size', 'digest_size']
from Crypto.Util.py3compat import *
def __make_constructor():
try:
# The sha module is deprecated in Python 2.6, so use hashlib when possible.
from hashlib import sha1 as _hash_new
except ImportError:
from sha import new as _hash_new
h = _hash_new()
if hasattr(h, 'new') and hasattr(h, 'name') and hasattr(h, 'digest_size') and hasattr(h, 'block_size'):
# The module from stdlib has the API that we need. Just use it.
return _hash_new
else:
# Wrap the hash object in something that gives us the expected API.
_copy_sentinel = object()
class _SHA1(object):
digest_size = 20
block_size = 64
oid = "1.3.14.3.2.26"
def __init__(self, *args):
if args and args[0] is _copy_sentinel:
self._h = args[1]
else:
self._h = _hash_new(*args)
def copy(self):
return _SHA1(_copy_sentinel, self._h.copy())
def update(self, *args):
f = self.update = self._h.update
f(*args)
def digest(self):
f = self.digest = self._h.digest
return f()
def hexdigest(self):
f = self.hexdigest = self._h.hexdigest
return f()
_SHA1.new = _SHA1
return _SHA1
new = __make_constructor()
del __make_constructor
#: The size of the resulting hash in bytes.
digest_size = new().digest_size
#: The internal block size of the hash algorithm in bytes.
block_size = new().block_size
|
mbayon/TFG-MachineLearning | venv/lib/python3.6/site-packages/sklearn/utils/_scipy_sparse_lsqr_backport.py | Python | mit | 18,021 | 0.000388 | """Sparse Equations and Least Squares.
The original Fortran code was written by C. C. Paige and M. A. Saunders as
described in
C. C. Paige and M. A. Saunders, LSQR: An algorithm for sparse linear
equations and sparse least squares, TOMS 8(1), 43--71 (1982).
C. C. Paige and M. A. Saunders, Algorithm 583; LSQR: Sparse linear
equations and least-squares problems, TOMS 8(2), 195--209 (1982).
It is licensed under the following BSD license:
Copyright (c) 2006, Systems Optimization Laboratory
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Stanford University nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The Fortran code was translated to Python for use in CVXOPT by Jeffery
Kline with contributions by Mridul Aanjaneya and Bob Myhill.
Adapted for SciPy by Stefan van der Walt.
"""
from __future__ import division, print_function, absolute_import
__all__ = ['lsqr']
import numpy as np
from math import sqrt
from scipy.sparse.linalg.interface import aslinearoperator
eps = np.finfo(np.float64).eps
def _sym_ortho(a, b):
"""
Stable implementation of Givens rotation.
Notes
-----
The routine 'SymOrtho' was added for numerical stability. This is
recommended by S.-C. Choi in [1]_. It removes the unpleasant potential of
``1/eps`` in some important places (see, for example text following
"Compute the next plane rotation Qk" in minres.py).
References
----------
.. [1] S.-C. Choi, "Iterative Methods for Singular Linear Equations
and Least-Squares Problems", Dissertation,
http://www.stanford.edu/group/SOL/dissertations/sou-cheng-choi-thesis.pdf
"""
if b == 0:
return np.sign(a), 0, abs(a)
elif a == 0:
return 0, np.sign(b), abs(b)
elif abs(b) > abs(a):
tau = a / b
s = np.sign(b) / sqrt(1 + tau * tau)
c = s * tau
r = b / s
else:
tau = b / a
c = np.sign(a) / sqrt(1+tau*tau)
s = c * tau
r = a / c
return c, s, r
def lsqr(A, b, damp=0.0, atol=1e-8, btol=1e-8, conlim=1e8,
iter_lim=None, show=False, calc_var=False):
"""Find the least-squares solution to a large, sparse, linear system
of equations.
The function solves ``Ax = b`` or ``min ||b - Ax||^2`` or
``min ||Ax - b||^2 + d^2 ||x||^2``.
The matrix A may be square or rectangular (over-determined or
under-determined), and may have any rank.
::
1. Unsymmetric equations -- solve A*x = b
2. Linear least squares -- solve A*x = b
in the least-squares sense
3. Damped least squares -- solve ( A )*x = ( b )
( damp*I ) ( 0 )
in the least-squares sense
Parameters
----------
A : {sparse matrix, ndarray, LinearOperatorLinear}
Representation of an m-by-n matrix. It is required that
the linear operator can produce ``Ax`` and ``A^T x``.
b : (m,) ndarray
Right-hand side vector ``b``.
damp : float
Damping coefficient.
atol, btol : float, default 1.0e-8
Stopping tolerances. If both are 1.0e-9 (say), the final
residual norm should be accurate to about 9 digits. (The
final x will usually have fewer correct digits, depending on
cond(A) and the size of damp.)
conlim : float
Another stopping tolerance. lsqr terminates if an estimate of
``cond(A)`` exceeds `conlim`. For compatible systems ``Ax =
b``, `conlim` could be as large as 1.0e+12 (say). For
least-squares problems, conlim should be less than 1.0e+8.
Maximum precision can be obtained by setting ``atol = btol =
conlim = zero``, but the number of iterations may then be
excessive.
iter_lim : int
Explicit limitation on number of iterations (for safety).
show : bool
Display an iteration log.
calc_var : bool
Whether to estimate diagonals of ``(A'A + damp^2*I)^{-1}``.
Returns
-------
x : ndarray of float
The final solution.
istop : int
Gives the reason for termination.
1 means x is an approximate solution to Ax = b.
2 means x approximately solves the least-squares problem.
itn : int
Iteration number upon termination.
r1norm : float
``norm(r)``, where ``r = b - Ax``.
r2norm : float
``sqrt( norm(r)^2 + damp^2 * norm(x)^2 )``. Equal to `r1norm` if
``damp == 0``.
anorm : float
Estimate of Frobenius norm of ``Abar = [[A]; [dam | p*I]]``.
acond : float
Estimate of ``cond(Abar | )``.
arnorm : float
Estimate of ``norm(A'*r - damp^2*x)``.
xnorm : float
``norm(x)``
var : ndarray of float
If ``calc_var`` is True, estimates all diagonals of
``(A'A)^{-1}`` (if ``damp == 0``) or more generally ``(A'A +
damp^2*I)^{-1}``. This is well defined if A has full column
rank or ``damp > 0``. (Not sure what var means if ``rank(A)
< n`` and ``damp = 0.``)
Notes
-----
LSQR uses an iterative method to approximate the solution. The
number of iterations required to reach a certain accuracy depends
strongly on the scaling of the problem. Poor scaling of the rows
or columns of A should therefore be avoided where possible.
For example, in problem 1 the solution is unaltered by
row-scaling. If a row of A is very small or large compared to
the other rows of A, the corresponding row of ( A b ) should be
scaled up or down.
In problems 1 and 2, the solution x is easily recovered
following column-scaling. Unless better information is known,
the nonzero columns of A should be scaled so that they all have
the same Euclidean norm (e.g., 1.0).
In problem 3, there is no freedom to re-scale if damp is
nonzero. However, the value of damp should be assigned only
after attention has been paid to the scaling of A.
The parameter damp is intended to help regularize
ill-conditioned systems, by preventing the true solution from
being very large. Another aid to regularization is provided by
the parameter acond, which may be used to terminate iterations
before the computed solution becomes very large.
If some initial estimate ``x0`` is known and if ``damp == 0``,
one could proceed as follows:
1. Compute a residual vector ``r0 = b - A*x0``.
2. Use LSQR to solve the system ``A*dx = r0``.
3. Add the correction dx to obtain a final solution ``x = x0 + dx``.
This requires that ``x0`` be available before and after the call
to LSQR. To judge the benefits, suppose LSQR takes k1 iterations
to solve |
kingvuplus/ops | lib/python/Plugins/Extensions/PicturePlayer/ui.py | Python | gpl-2.0 | 22,711 | 0.027211 | from enigma import ePicLoad, eTimer, getDesktop, gMainDC, eSize
from Screens.Screen import Screen
from Tools.Directories import resolveFilename, pathExists, SCOPE_MEDIA, SCOPE_CURRENT_SKIN
from Components.Pixmap import Pixmap, MovingPixmap
from Components.ActionMap import ActionMap, NumberActionMap
from Components.Sources.StaticText import StaticText
from Components.FileList import FileList
from Components.AVSwitch import AVSwitch
from Components.Sources.List import List
from Components.ConfigList import ConfigList, ConfigListScreen
from Components.config import config, ConfigSubsection, ConfigInteger, ConfigSelection, ConfigText, ConfigYesNo, KEY_LEFT, KEY_RIGHT, KEY_0, getConfigListEntry
import skin
def getScale():
return AVSwitch().getFramebufferScale()
config.pic = ConfigSubsection()
config.pic.framesize = ConfigInteger(default=30, limits=(5, 99))
config.pic.slidetime = ConfigInteger(default=10, limits=(1, 60))
config.pic.resize = ConfigSelection(default="1", choices = [("0", _("simple")), ("1", _("better"))])
config.pic.cache = ConfigYesNo(default=True)
config.pic.lastDir = ConfigText(default=resolveFilename(SCOPE_MEDIA))
config.pic.infoline = ConfigYesNo(default=True)
config.pic.loop = ConfigYesNo(default=True)
config.pic.bgcolor = ConfigSelection(default="#00000000", choices = [("#00000000", _("black")),("#009eb9ff", _("blue")),("#00ff5a51", _("red")), ("#00ffe875", _("yellow")), ("#0038FF48", _("green"))])
config.pic.autoOrientation = ConfigYesNo(default=False)
config.pic.textcolor = ConfigSelection(default="#0038FF48", choices = [("#00000000", _("black")),("#009eb9ff", _("blue")),("#00ff5a51", _("red")), ("#00ffe875", _("yellow")), ("#0038FF48", _("green"))])
class picshow(Screen):
skin = """
<screen name="picshow" position="center,center" size="560,440" title="Picture player" >
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget source="label" render="Label" position="5,55" size="350,140" font="Regular;19" backgroundColor="#25062748" transparent="1" />
<widget name="thn" position="360,40" size="180,160" alphatest="on" />
<widget name="filelist" position="5,205" zPosition="2" size="550,230" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions", "MenuActions"],
{
"cancel": self | .KeyExit,
"red": self.KeyExit,
"green": self.KeyGreen,
"yellow": self.KeyYellow,
"menu": self.KeyMenu,
"ok": sel | f.KeyOk
}, -1)
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Thumbnails"))
self["key_yellow"] = StaticText("")
self["label"] = StaticText("")
self["thn"] = Pixmap()
currDir = config.pic.lastDir.value
if not pathExists(currDir):
currDir = "/"
self.oldService = self.session.nav.getCurrentlyPlayingServiceReference()
self.session.nav.stopService()
# Show Background MVI
import os
try:
os.system("/usr/bin/showiframe /usr/share/enigma2/black.mvi &")
except:
pass
self.filelist = FileList(currDir, matchingPattern = "(?i)^.*\.(jpeg|jpg|jpe|png|bmp|gif)")
self["filelist"] = self.filelist
self["filelist"].onSelectionChanged.append(self.selectionChanged)
self.ThumbTimer = eTimer()
self.ThumbTimer.callback.append(self.showThumb)
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.showPic)
self.onLayoutFinish.append(self.setConf)
def showPic(self, picInfo=""):
ptr = self.picload.getData()
if ptr != None:
self["thn"].instance.setPixmap(ptr.__deref__())
self["thn"].show()
text = picInfo.split('\n',1)
self["label"].setText(text[1])
self["key_yellow"].setText(_("Exif"))
def showThumb(self):
if not self.filelist.canDescent():
if self.filelist.getCurrentDirectory() and self.filelist.getFilename():
if self.picload.getThumbnail(self.filelist.getCurrentDirectory() + self.filelist.getFilename()) == 1:
self.ThumbTimer.start(500, True)
def selectionChanged(self):
if not self.filelist.canDescent():
self.ThumbTimer.start(500, True)
else:
self["label"].setText("")
self["thn"].hide()
self["key_yellow"].setText("")
def KeyGreen(self):
#if not self.filelist.canDescent():
self.session.openWithCallback(self.callbackView, Pic_Thumb, self.filelist.getFileList(), self.filelist.getSelectionIndex(), self.filelist.getCurrentDirectory())
def KeyYellow(self):
if not self.filelist.canDescent():
self.session.open(Pic_Exif, self.picload.getInfo(self.filelist.getCurrentDirectory() + self.filelist.getFilename()))
def KeyMenu(self):
self.session.openWithCallback(self.setConf, Pic_Setup)
def KeyOk(self):
if self.filelist.canDescent():
self.filelist.descent()
else:
self.session.openWithCallback(self.callbackView, Pic_Full_View, self.filelist.getFileList(), self.filelist.getSelectionIndex(), self.filelist.getCurrentDirectory())
def setConf(self, retval=None):
self.setTitle(_("Picture player"))
sc = getScale()
#0=Width 1=Height 2=Aspect 3=use_cache 4=resize_type 5=Background(#AARRGGBB)
self.picload.setPara((self["thn"].instance.size().width(), self["thn"].instance.size().height(), sc[0], sc[1], config.pic.cache.value, int(config.pic.resize.value), "#00000000", config.pic.autoOrientation.value))
def callbackView(self, val=0):
if val > 0:
self.filelist.moveToIndex(val)
def KeyExit(self):
del self.picload
if self.filelist.getCurrentDirectory() is None:
config.pic.lastDir.value = "/"
else:
config.pic.lastDir.value = self.filelist.getCurrentDirectory()
config.pic.save()
self.session.nav.playService(self.oldService)
self.close()
#------------------------------------------------------------------------------------------
class Pic_Setup(Screen, ConfigListScreen):
def __init__(self, session):
Screen.__init__(self, session)
# for the skin: first try MediaPlayerSettings, then Setup, this allows individual skinning
self.skinName = ["PicturePlayerSetup", "Setup"]
self.setup_title = _("Settings")
self.onChangedEntry = []
self.session = session
ConfigListScreen.__init__(self, [], session = session, on_change = self.changedEntry)
self["actions"] = ActionMap(["SetupActions", "MenuActions"],
{
"cancel": self.keyCancel,
"save": self.keySave,
"ok": self.keySave,
"menu": self.closeRecursive,
}, -2)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("OK"))
self.createSetup()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(self.setup_title)
def createSetup(self):
setup_list = [
getConfigListEntry(_("Slide show interval (sec.)"), config.pic.slidetime),
getConfigListEntry(_("Scaling mode"), config.pic.resize),
getConfigListEntry(_("Cache thumbnails"), config.pic.cache),
getConfigListEntry(_("Show info line"), config.pic.infoline),
getConfigListEntry(_("Frame size in full view"), config.pic.framesize),
getConfigListEntry(_("Slide picture in loop"), config.pic.loop),
getConfigListEntry(_("Background color"), config.pic.bgcolor),
getConfigListEntry(_("Text color"), config.pic.textcolor),
getConfigListEntry(_("Fulview resulution"), config.usage.pic_resolution),
getConfigListEntry(_("Auto EXIF Orientation rotation/flipping"), config.pic.autoOrientation),
]
self["config |
neithere/pyrant | pyrant/exceptions.py | Python | apache-2.0 | 1,671 | 0.007181 | # -*- coding: utf-8 -*-
"""
If you know error code, use `get_for_code(code)` to retrieve exception instance.
"""
__all__ = ['Success', 'InvalidOperation', 'HostNotFound', 'ConnectionRefused',
'SendError', 'ReceiveError', 'RecordExists', 'RecordNotFound',
'MiscellaneousError', 'get_for_code']
class TyrantError(Exception):
"""
Tyrant error, socket and communication errors are not included here.
"""
pass
class Success(TyrantError):
"""
Don't laugh at me. I represent a constant from the protocol. Honestly!
"""
pass
class InvalidOperation(TyrantError):
pass
class HostNotFound(TyrantError):
pass
class ConnectionRefused(TyrantError):
pass
class SendError(TyrantError):
pass
class ReceiveError(TyrantError):
pass
class RecordExists(TyrantError):
message = 'Record already exists'
class RecordNotFound(TyrantError):
pass
class MiscellaneousError(TyrantError):
pass
ERROR_CODE_TO_CLASS = {
0: Success,
1: InvalidOp | eration,
2: HostNotFound,
3: ConnectionRefused,
4: SendError,
5: ReceiveError,
6: RecordExists,
7: RecordNotFound,
9999: MiscellaneousError,
}
def get_for_code(error_code, message=None):
try:
int(error_code)
except ValueError:
raise TypeError(u'Could not map error code to e | xception class: expected '
'a number, got "%s"' % error_code)
else:
try:
cls = ERROR_CODE_TO_CLASS[error_code]
except KeyError:
raise ValueError('Unknown error code "%d"' % error_code)
else:
return cls(message) if message else cls()
|
vanceeasleaf/aces | aces/materials/MoN2_alpha_rect.py | Python | gpl-2.0 | 1,320 | 0.036364 | from aces.materials.POSCAR import structure as Material
class structure(Material):
def getPOSCAR(self):
return self.getMinimized()
def csetup(self):
from ase.dft.kp | oints import ibz_points
#self.bandpoints=ibz_points['hexagonal']
import numpy as np
x=0.5*np.cos(np.arange(8)/8.0*2.0*np.pi)
y=0.5*np.sin(np.arange(8)/8.0*2.0*np.pi)
self.bandpath=['Gamma']
for i in range(8):
if(np.abs(x[i])>0.2):x[i]/=np.abs(x[i])*2.0
if(np.abs(y[i])>0.2):y[i]/=np.abs(y[i])*2.0
self.bandpoints['X'+str(i)]=[x[i],y[i],0.0]
self.bandpath.app | end('X'+str(i))
self.bandpath.append('Gamma')
#self.bandpath=['Gamma',"X2"]
def getMinimized(self):
return """Mo N
1.0000000000000000
2.9916000366000000 0.0000000000000000 0.0000000000000000
0.0000000000000000 5.1814560994168932 0.0000000000000000
0.0000000000000000 0.0000000000000000 25.0000000000000000
Mo N
2 4
Direct
0.5000000000000000 0.5000000000000000 0.5000000000000000
0.0000000000000000 0.0000000000000000 0.5000000000000000
0.5000000000000000 0.8333333333333335 0.4555099610000000
0.5000000000000000 0.8333333333333335 0.5444900390000000
0.0000000000000000 0.3333333333333333 0.4555099610000000
0.0000000000000000 0.3333333333333333 0.5444900390000000
""" |
almarklein/bokeh | bokeh/server/views/statics.py | Python | bsd-3-clause | 844 | 0.004739 |
import flask
| from ..app import bokeh_app
## This URL heirarchy is important, because of the way we build bokehjs
## the source mappings list the source file as being inside ../../src
| @bokeh_app.route('/bokehjs/static/<path:filename>')
def bokehjs_file(filename):
""" Return a specific BokehJS deployment file
:param filename: name of the file to retrieve
:status 200: file is found
:status 404: file is not found
"""
return flask.send_from_directory(bokeh_app.bokehjsdir, filename)
@bokeh_app.route('/bokehjs/src/<path:filename>')
def bokehjssrc_file(filename):
""" Return a specific BokehJS source code file
:param filename: name of the file to retrieve
:status 200: file is found
:status 404: file is not found
"""
return flask.send_from_directory(bokeh_app.bokehjssrcdir, filename)
|
KenKundert/avendesora | avendesora/command.py | Python | gpl-3.0 | 59,621 | 0.001275 | # Commands
# License {{{1
# Copyright (C) 2016-2022 Kenneth S. Kundert
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
# Imports {{{1
from .collection import Collection
from .config import get_setting, override_setting
from .editors import GenericEditor
from .files import AccountFiles
from .error import PasswordError
from .generator import PasswordGenerator
from .gpg import GnuPG, PythonFile
from .obscure import ObscuredSecret
from .shlib import chmod, cp, rm, to_path
from .utilities import query_user, two_columns, name_completion
from .writer import get_writer
from inform import (
codicil, columns, conjoin, cull, display, error, Error, indent,
is_collection, join, narrate, os_error, output, render, title_case, warn,
)
from docopt import docopt
from textwrap import dedent
import re
import sys
# Utilities {{{1
def title(text):
return title_case(text)
# Command base class {{{1
class Command(object):
@classmethod
def commands(cls):
for cmd in cls.__subclasses__():
assert is_collection(cmd.NAMES)
yield cmd
# currently only one level of subclassing is supported
@classmethod
def commands_sorted(cls):
for cmd in sorted(cls.commands(), key=lambda c: c.get_name()):
yield cmd
@classmethod
def find(cls, name):
for command in cls.commands():
if name in command.NAMES:
return command
@classmethod
def execute(cls, name, args):
if args is None:
args = []
command = cls.find(name)
if not command:
# consider an alias
aliases = get_setting('command_aliases')
if aliases and name in aliases:
aliases = Collection(aliases[name])
new_name = aliases[0]
new_args = aliases[1:]
if new_args:
narrate("Replacing '{}' in command with '{} {}'".format(
name, new_name, ' '.join(new_args)
))
args = new_args + args
else:
narrate("Replacing '{}' in command with '{}'".format(
name, new_name,
))
name = new_name
command = cls.find(new_name)
if not command:
# no recognizable command was specified
# in this case, run 'credentials' if one argument is given
# and 'value' otherwise
args = [name] + args
name = 'value' if len(args) > 1 else 'credentials'
command = cls.find(name)
if not command:
error('unknown command.', culprit=name)
codicil("Use 'avendesora help' for list of available commands."),
return
command.run(name, args)
@classmethod
def summarize(cls, width=16):
summaries = []
for cmd in Command.commands_sorted():
summaries.append(two_columns(', '.join(cmd.NAMES), cmd.DESCRIPTION))
return '\n'.join(summaries)
@classmethod
def get_name(cls):
return cls.NAMES[0]
@classmethod
def help(cls):
text = dedent("""
{title}
{usage}
""").strip()
return text.format(
title=title(cls.DESCRIPTION), usage=cls.USAGE,
)
@classmethod
def get_help_url(cls):
try:
anchor = '-'.join(e.lower() for e in [cls.NAMES[0]] + cls.DESCRIPTION.split())
re | turn '/commands.html#' + anchor
except (AttributeError, TypeError):
pass
# Add {{{1
class Add(Command):
| NAMES = 'add',
DESCRIPTION = 'add a new account'
USAGE = dedent("""
Usage:
avendesora add [options] [<template>]
Options:
-f <file>, --file <file>
Add account to specified accounts file.
Creates a new account starting from a template. The template consists of
boilerplate code and fields. The fields take the from _NAME_. They
should be replaced by appropriate values or deleted if not needed. If
you are using the Vim editor, it is preconfigured to jump to the next
field when you press 'n'. If the field is surrounded by '<<' and '>>',
as in '<<_ACCOUNT_NUMBER_>>', the value you enter will be concealed.
You can create your own templates by adding them to 'account_templates'
in the ~/.config/avendesora/config file.
You can change the editor used when adding account by changing the
'edit_template', also found in the ~/.config/avendesora/config file.
""").strip()
@classmethod
def help(cls):
text = dedent("""
{title}
{usage}
The default template is {default}. The available templates are:
{templates}
The available accounts files are (the default is given first):
{files}
""").strip()
def indented_list(l):
indent = get_setting('indent')
return indent + ('\n' + indent).join(sorted(l))
return text.format(
title=title(cls.DESCRIPTION), usage=cls.USAGE,
default=get_setting('default_account_template'),
templates=indented_list(get_setting('account_templates').keys()),
files=indented_list(get_setting('accounts_files', [])),
)
@classmethod
def run(cls, command, args):
# read command line
cmdline = docopt(cls.USAGE, argv=[command] + args)
# determine the accounts file
prefix = cmdline['--file']
if prefix:
candidates = [
p
for p in get_setting('accounts_files')
if p.startswith(prefix)
]
if not candidates:
raise PasswordError('not found.', cuplrit=cmdline['--file'])
if len(candidates) > 1:
raise PasswordError(
'ambiguous, matches %s.' % conjoin(candidates),
cuplrit=prefix
)
filename = candidates[0]
else:
filename = get_setting('accounts_files')[0]
try:
# get the specified template
templates = get_setting('account_templates')
if cmdline['<template>']:
template_name = cmdline['<template>']
else:
template_name = get_setting('default_account_template')
template = dedent(templates[template_name]).strip() + '\n'
except KeyError:
raise PasswordError(
'unknown account template, choose from %s.' % conjoin(
sorted(templates.keys())
), culprit=template_name
)
try:
# get original contents of accounts file
path = to_path(get_setting('settings_dir'), filename)
orig_accounts_file = PythonFile(path)
orig_accounts = orig_accounts_file.run()
gpg_ids = orig_accounts.get('gpg_ids')
orig_accounts_code = orig_accounts_file.code.strip('\n')
# backup the original file
orig_accounts_file.backup('.saved')
# save the template into temp file
from tempfile import mktemp
tmpfilename = mktemp(suffix='_avendesora.gpg')
tmpfile = GnuPG(tmpfilename)
tmpfile.save(template, get_setting('gpg_ids'))
except OSError as e:
|
edoburu/django-oscar-docdata | oscar_docdata/urls.py | Python | apache-2.0 | 281 | 0.003559 | from django.conf.urls import url
from . | views import OrderReturnView, StatusChangedNotificationView
urlpatterns = [
url(r'^return/$', OrderReturnView.as_view(), name='return_url'),
url(r'^upd | ate_order/$', StatusChangedNotificationView.as_view(), name='status_changed'),
]
|
ashutosh-mishra/youtube-dl | youtube_dl/extractor/traileraddict.py | Python | unlicense | 2,278 | 0.00878 | import re
from .common import InfoExtractor
class TrailerAddictIE(InfoExtractor):
_VALID_URL = r'(?:http://)?(?:www\.)?traileraddict\.com/(?:trailer|clip)/(?P<movie>.+?)/(?P<trailer_name>.+)'
_TEST = {
u'url': u | 'http://www.traileraddict.com/trailer/prince-avalanche/trailer',
u'file': u'76184.mp4',
u'md5': u'57e39dbcf4142ceb8e1f242ff423fd71',
u'info_dict': {
u"title": u"Prince Avalanche Trailer",
u"description": u"Trailer for Prince Avalanche.Two highway road workers spend the summe | r of 1988 away from their city lives. The isolated landscape becomes a place of misadventure as the men find themselves at odds with each other and the women they left behind."
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
name = mobj.group('movie') + '/' + mobj.group('trailer_name')
webpage = self._download_webpage(url, name)
title = self._search_regex(r'<title>(.+?)</title>',
webpage, 'video title').replace(' - Trailer Addict','')
view_count = self._search_regex(r'Views: (.+?)<br />',
webpage, 'Views Count')
video_id = self._og_search_property('video', webpage, 'Video id').split('=')[1]
# Presence of (no)watchplus function indicates HD quality is available
if re.search(r'function (no)?watchplus()', webpage):
fvar = "fvarhd"
else:
fvar = "fvar"
info_url = "http://www.traileraddict.com/%s.php?tid=%s" % (fvar, str(video_id))
info_webpage = self._download_webpage(info_url, video_id , "Downloading the info webpage")
final_url = self._search_regex(r'&fileurl=(.+)',
info_webpage, 'Download url').replace('%3F','?')
thumbnail_url = self._search_regex(r'&image=(.+?)&',
info_webpage, 'thumbnail url')
ext = final_url.split('.')[-1].split('?')[0]
return [{
'id' : video_id,
'url' : final_url,
'ext' : ext,
'title' : title,
'thumbnail' : thumbnail_url,
'description' : self._og_search_description(webpage),
'view_count' : view_count,
}]
|
sysadminmatmoz/pmis | analytic_plan_analysis/__manifest__.py | Python | agpl-3.0 | 633 | 0 | # -*- coding: utf-8 -*-
# Copyright 2017 Eficent Business and IT Consulting Services S.L.
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
'name': 'Analytic Plan-Actual Analysis',
'version': '10.0.1.1.0',
'author': 'Eficent, Odoo Community Association (OCA),'
'Project Expert Team',
'website': 'http://project.expert',
'category': 'Project Management',
'license': 'AGPL-3',
'depends': [
'analytic_plan'
],
' | data': [
'security/ir.model.access. | csv',
'report/account_analytic_plan_actual_view.xml',
],
'installable': False,
}
|
mreider/multi-user-encryption | app.py | Python | mit | 1,685 | 0.020178 | # -*- coding: utf-8 -*-
from flask import Flask,req | uest, jsonify
from dbmanager import Database
db = Database()
db.start_engine()
app = Flask(__name__)
@app.route('/api/v1.0/data',methods=['GET','POST'])
def index():
if request.method == 'GET':
#F | etch data
username = request.args.get('user','')
password = request.args.get('password','')
print 'Username = %s, password = %s'%(username,password)
content = db.get_data(username,password)
return jsonify(data=content)
elif request.method == 'POST':
#Update data
username = request.form.get('user','')
password = request.form.get('password','')
data = request.form.get('content','')
print 'Username = %s, password = %s'%(username,password)
print 'Data %s'%data
db.save_data(data,username,password)
else :
print 'Not handling it'
return jsonify(data=data)
@app.route('/api/v1.0/rotate',methods=['GET'])
def rotate():
if request.method == 'GET':
#Fetch data
username = request.args.get('user','')
password = request.args.get('password','')
print 'Username = %s, password = %s'%(username,password)
if username != 'Root':
return jsonify(data='Error : Only root user can perform this.')
print 'Username = %s, password = %s'%(username,password)
data = db.rotate_data(username,password)
return jsonify(data=data)
@app.teardown_appcontext
def teardown_db(exception):
if db:
db.connection_close()
if __name__ == '__main__':
app.run(debug=True,host='0.0.0.0',port=8000) |
irvingprog/gmusic | manage.py | Python | lgpl-3.0 | 256 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE | ", "downloadmusic.settings")
f | rom django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
rohitranjan1991/home-assistant | homeassistant/components/sonarr/const.py | Python | mit | 436 | 0 | """Constants for Sonarr."""
DOMAIN = "sonarr"
# Config Keys
CONF_BASE_PATH = "base_path"
CONF_DAYS = "days"
CONF_INCLUDED | = "include_paths"
CONF_UNIT = "unit"
CONF_UPCOMING_DAYS = "upcoming_days"
CONF_WANTED_MAX_ITEMS = "wanted_max_items"
# Data
DATA_HOST_CONFIG = "host_config"
DATA_SONARR = "sonarr"
DATA_SYSTEM_STATUS = "system_status"
# Defaults
DEFAULT_UPCOMING_DAYS = 1
DEFAULT_VERIFY_SSL = False
| DEFAULT_WANTED_MAX_ITEMS = 50
|
spulec/moto | tests/test_timestreamwrite/test_server.py | Python | apache-2.0 | 514 | 0 | import json
import sure # noqa # | pylint: disable=unused-import
import moto.server as server
from moto import mock_times | treamwrite
@mock_timestreamwrite
def test_timestreamwrite_list():
backend = server.create_backend_app("timestream-write")
test_client = backend.test_client()
headers = {"X-Amz-Target": "Timestream_20181101.ListDatabases"}
resp = test_client.post("/", headers=headers, json={})
resp.status_code.should.equal(200)
json.loads(resp.data).should.equal({"Databases": []})
|
mutarock/python-utils | compress/__init__.py | Python | mit | 53 | 0 | from | core import ungzip
from core import ungzip_ | html
|
dmsimard/ara | ara/clients/offline.py | Python | gpl-3.0 | 3,306 | 0.00121 | # Copyright (c) 2018 Red Hat, Inc.
#
# This file is part of ARA: Ansible Run Analysis.
#
# ARA is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ARA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
# This is an "offline" API client that does not require standing up
# an API server and does not execute actual HTTP calls.
import logging
import os
import threading
from ara.clients.http import AraHttpClient
from ara.setup.exceptions import MissingDjangoException
try:
from django.core.handlers.wsgi import WSGIHandler
from django.core.servers.basehttp import ThreadedWSGIServer, WSGIRequestHandler
except ImportError as e:
raise MissingDjangoException from e
class AraOfflineClient(AraHttpClient):
def __init__(self, auth=None, run_sql_migrations=True):
self.log = logging.getLogger(__name__)
from django import setup as django_setup
from django.core.management import execute_from_command_line
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ara.server.settings")
if run_sql_migrations:
# Automatically create the database and run migrations (is there a better way?)
execute_from_command_line(["django", "migrate"])
# Set up the things Django needs
django_setup()
self._start_server()
super().__init__(endpoint="http://localhost:%d" % self.server_thread.port, auth=auth)
def _start_server(self):
self.server_thread = ServerThread("localhost")
self.server_thread.start()
# Wait for the live server to be ready
| self.server_thread.is_ready.wa | it()
if self.server_thread.error:
raise self.server_thread.error
class ServerThread(threading.Thread):
def __init__(self, host, port=0):
self.host = host
self.port = port
self.is_ready = threading.Event()
self.error = None
super().__init__(daemon=True)
def run(self):
"""
Set up the live server and databases, and then loop over handling
HTTP requests.
"""
try:
# Create the handler for serving static and media files
self.httpd = self._create_server()
# If binding to port zero, assign the port allocated by the OS.
if self.port == 0:
self.port = self.httpd.server_address[1]
self.httpd.set_app(WSGIHandler())
self.is_ready.set()
self.httpd.serve_forever()
except Exception as e:
self.error = e
self.is_ready.set()
def _create_server(self):
return ThreadedWSGIServer((self.host, self.port), QuietWSGIRequestHandler, allow_reuse_address=False)
class QuietWSGIRequestHandler(WSGIRequestHandler):
def log_message(*args):
pass
|
yasharmaster/scancode-toolkit | src/packagedcode/pyrpm/rpm.py | Python | apache-2.0 | 10,566 | 0.001136 | # -*- coding: iso-8859-15 -*-
# -*- Mode: Python; py-ident-offset: 4 -*-
# vim:ts=4:sw=4:et
# Copyright (c) M�rio Morgado
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
PyRPM
=====
PyRPM is a pure python, simple to use, module to read information from a RPM
file.
'''
from __future__ import absolute_import
from StringIO import StringIO
import struct
import re
from . import rpmdefs
HEADER_MAGIC_NUMBER = re.compile('(\x8e\xad\xe8)')
def find_magic_number(regexp, data):
''' find a magic number in a buffer
'''
string = data.read(1)
whi | le 1:
match = regexp.search(string)
if match:
return data.tell() - 3
byte = data.read(1)
if not byte:
return None
else:
string += byte
class Entry(object):
''' RPM Header Entry
'''
def __init__(self, entry, store):
self.entry = entry
self.store = store
self.switch = {
rpmd | efs.RPM_DATA_TYPE_CHAR: self.__readchar,
rpmdefs.RPM_DATA_TYPE_INT8: self.__readint8,
rpmdefs.RPM_DATA_TYPE_INT16: self.__readint16,
rpmdefs.RPM_DATA_TYPE_INT32: self.__readint32,
rpmdefs.RPM_DATA_TYPE_INT64: self.__readint64,
rpmdefs.RPM_DATA_TYPE_STRING: self.__readstring,
rpmdefs.RPM_DATA_TYPE_BIN: self.__readbin,
rpmdefs.RPM_DATA_TYPE_STRING_ARRAY: self.__readstring,
rpmdefs.RPM_DATA_TYPE_ASN1: self.__readbin,
rpmdefs.RPM_DATA_TYPE_OPENPGP: self.__readbin,
rpmdefs.RPM_DATA_TYPE_I18NSTRING_TYPE: self.__readstring
}
self.store.seek(entry[2])
self.value = self.switch[entry[1]]()
self.tag = entry[0]
def __str__(self):
return "(%s, %s)" % (self.tag, self.value,)
def __repr__(self):
return "(%s, %s)" % (self.tag, self.value,)
def __readchar(self, offset=1):
''' store is a pointer to the store offset
where the char should be read
'''
data = self.store.read(offset)
fmt = '!' + str(offset) + 'c'
value = struct.unpack(fmt, data)
return value
def __readint8(self, offset=1):
''' int8 = 1byte
'''
return self.__readchar(offset)
def __readint16(self, offset=1):
''' int16 = 2bytes
'''
data = self.store.read(offset * 2)
fmt = '!' + str(offset) + 'i'
value = struct.unpack(fmt, data)
return value
def __readint32(self, offset=1):
''' int32 = 4bytes
'''
data = self.store.read(offset * 4)
fmt = '!' + str(offset) + 'i'
value = struct.unpack(fmt, data)
return value
def __readint64(self, offset=1):
''' int64 = 8bytes
'''
data = self.store.read(offset * 4)
fmt = '!' + str(offset) + 'l'
value = struct.unpack(fmt, data)
return value
def __readstring(self):
''' read a string entry
'''
string = ''
while 1:
char = self.__readchar()
if char[0] == '\x00': # read until '\0'
break
string += char[0]
return string
def __readbin(self):
''' read a binary entry
'''
if self.entry[0] == rpmdefs.RPMSIGTAG_MD5:
data = self.store.read(rpmdefs.MD5_SIZE)
value = struct.unpack('!' + rpmdefs.MD5_SIZE + 's', data)
return value
elif self.entry[0] == rpmdefs.RPMSIGTAG_PGP:
data = self.store.read(rpmdefs.PGP_SIZE)
value = struct.unpack('!' + rpmdefs.PGP_SIZE + 's', data)
return value
class Header(object):
''' RPM Header Structure
'''
def __init__(self, header, entries, store):
self.header = header
self.entries = entries
self.store = store
self.pentries = []
self.rentries = []
self.__readentries()
def __readentry(self, entry):
''' [4bytes][4bytes][4bytes][4bytes]
TAG TYPE OFFSET COUNT
'''
entryfmt = '!llll'
entry = struct.unpack(entryfmt, entry)
if entry[0] < rpmdefs.RPMTAG_MIN_NUMBER or\
entry[0] > rpmdefs.RPMTAG_MAX_NUMBER:
return None
return entry
def __readentries(self):
''' read a rpm entry
'''
for entry in self.entries:
entry = self.__readentry(entry)
if entry:
if entry[0] in rpmdefs.RPMTAGS:
self.pentries.append(entry)
for pentry in self.pentries:
entry = Entry(pentry, self.store)
if entry:
self.rentries.append(entry)
class RPMError(BaseException):
pass
class RPM(object):
def __init__(self, rpm):
''' rpm - StringIO.StringIO | file
'''
if hasattr(rpm, 'read'): # if it walk like a duck..
self.rpmfile = rpm
else:
raise ValueError('invalid initialization: '
'StringIO or file expected received %s'
% (type(rpm),))
self.binary = None
self.source = None
self.__entries = []
self.__headers = []
self.__readlead()
offset = self.__read_sigheader()
self.__readheaders(offset)
def __readlead(self):
''' reads the rpm lead section
struct rpmlead {
unsigned char magic[4];
unsigned char major, minor;
short type;
short archnum;
char name[66];
short osnum;
short signature_type;
char reserved[16];
} ;
'''
lead_fmt = '!4sBBhh66shh16s'
data = self.rpmfile.read(96)
value = struct.unpack(lead_fmt, data)
magic_num = value[0]
ptype = value[3]
if magic_num != rpmdefs.RPM_LEAD_MAGIC_NUMBER:
raise RPMError('wrong magic number this is not a RPM file')
if ptype == 1:
self.binary = False
self.source = True
elif ptype == 0:
self.binary = True
self.source = False
else:
raise RPMError('wrong package type this is not a RPM file')
def __read_sigheader(self):
''' read signature header
ATN: this will not return any usefull information
besides the file offset
'''
start = find_magic_number(HEADER_MAGIC_NUMBER, self.rpmfile)
if not start:
raise RPMError('invalid RPM file, signature header not found')
# return the offsite after the magic number
return start + 3
def __readheader(self, header):
''' reads the header-header section
[3bytes][1byte][4b |
thedrow/cyrapidjson | tests/test_benchmarks.py | Python | bsd-3-clause | 7,481 | 0.000544 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
import sys
import time
import pytest
try:
import yajl
except ImportError:
yajl = None
try:
import simplejson
except ImportError:
simplejson = None
try:
import json
except ImportError:
json = None
try:
import rapidjson
except ImportError:
rapidjson = None
try:
import ujson
except ImportError:
ujson = None
default_data = {
'words': """
Lorem ipsum dolor sit amet, consectetur adipiscing
elit. Mauris adipiscing adipiscing placerat.
Vestibulum augue augue,
pellentesque quis sollicitudin id, adipiscing.
""",
'list': list(range(200)),
'dict': dict((str(i), 'a') for i in list(range(200))),
'int': 100100100,
'float': 100999.123456
}
user = {
"userId": 3381293,
"age": 213,
"username": "johndoe",
"fullname": u"John Doe the Second",
"isAuthorized": True,
"liked": 31231.31231202,
"approval": 31.1471,
"jobs": [1, 2],
"currJob": None
}
friends = [user, user, user, user, user, user, user, user]
def time_func(func, data, iterations):
start = time.time()
while iterations:
iterations -= 1
func(data)
return time.time() - start
def run_client_test(
name, serialize, deserialize, iterations=100 * 1000, data=default_data
):
squashed_data = serialize(data)
serialize_profile = time_func(serialize, data, iterations)
deserialize_profile = time_func(deserialize, squashed_data, iterations)
return serialize_profile, deserialize_profile
contenders = []
if yajl:
contenders.append(('yajl', yajl.Encoder().encode, yajl.Decoder().decode))
if simplejson:
contenders.append(('simplejson', simplejson.dumps, simplejson.loads))
if json:
contenders.append(('stdlib json', json.dumps, json.loads))
if rapidjson:
contenders.append(
('rapidjson', rapidjson.dumps, rapidjson.loads)
)
if ujson:
contenders.append(
('ujson', ujson.dumps, ujson.loads)
)
doubles = []
unicode_strings = []
strings = []
booleans = []
list_dicts = []
dict_lists = {}
medium_complex = [
[user, friends], [user, friends], [user, friends],
[user, friends], [user, friends], [user, friends]
]
for x in range(256):
doubles.append(sys.maxsize * random.random())
unicode_strings.append(
"نظام الحكم سلطاني وراثي في الذكور من ذرية السيد تركي بن سعيد بن سلطان ويشترط فيمن يختار لولاية الحكم من بينهم ان يكون مسلما رشيدا عاقلا ًوابنا شرعيا لابوين عمانيين ")
strings.append("A pretty long string which is in a list")
booleans.append(True)
for y in range(100):
arrays = []
list_dicts.append({str(random.random() * 20): int(random.random() * 1000000)})
for x in range(100):
arrays.append({str(random.random() * 20): int(random.random() * 1000000)})
dict_lists[str(random.random() * 20)] = arrays
@pytest.mark.parametrize('name,serialize,deserialize', contenders)
def test_json_serialization(name, serialize, deserialize, benchmark):
ser_data, des_data = benchmark(run_client_test, name, serialize, deserialize)
msg = "\n%-11s serialize: %0.3f deserialize: %0.3f total: %0.3f" % (
name, ser_data, des_data, ser_data + des_data
)
print(msg)
@pytest.mark.parametrize('name,serialize,deserialize', contenders)
def test_json_unicode_strings(name, serialize, deserialize, benchmark):
print("\nArray with 256 unicode strings:")
ser_data, des_data = benchmark(run_client_test,
name, serialize, deserialize,
data=unicode_strings,
iterations=5000,
)
msg = "%-11s serialize: %0.3f deserialize: %0.3f total: %0.3f" % (
name, ser_data, des_data, ser_data + des_data
)
print(msg)
@pytest.mark.parametrize('name,serialize,deserialize', contenders)
def test_json_scii_strings(name, serialize, deserialize, benchmark):
print("\nArray with 256 ascii strings:")
ser_data, des_data = benchmark(run_client_test,
name, serialize, deserialize,
data=strings,
)
msg = "%-11s serialize: %0.3f deserialize: %0.3f total: %0.3f" % (
name, ser_data, des_data, ser_data + des_data
)
print(msg)
@pytest.mark.parametrize('name,serialize,deserialize', contenders)
def test_json_booleans(name, serialize, deserialize, benchmark):
print("\nArray with 256 True's:")
ser_data, des_data = benchmark(run_client_test,
name, serialize, deserialize,
data=booleans,
)
msg = "%-11s serialize: %0.3f deserialize: %0.3f total: %0.3f" % (
name, ser_data, des_data, ser_data + des_data
)
print(msg)
@pytest.mark.parametrize('name,serialize,deserialize', contenders)
def test_json_list_of_dictionaries(name, serialize, deserialize, benchmark):
print("\nArray of 100 dictionaries:")
ser_data, des_data = benchmark(run_client_test,
name, serialize, deserialize,
data=list_dicts,
iterations=5,
)
msg = "%-11s serialize: %0.3f deserialize: %0.3f total: %0.3f" % (
name, ser_data, des_data, ser_data + des_data
)
print(msg)
@pytest.mark.parametrize('name,serialize,deserialize', contenders)
def test_json_dictionary_of_lists(name, serialize, deserialize, benchmark):
print("\nDictionary of 100 Arrays:")
ser_data, des_data = benchmark(run_client_test,
name, serialize, deserialize,
data=dict_lists,
iterations=5,
)
msg = "%-11s serialize: %0.3f deserialize: %0.3f total: %0.3f" % (
name, ser_data, des_data, ser_data + des_data
)
print(msg)
@pytest.mark.parametrize('name,serialize,deserialize', contenders)
def test_json_medium_complex_objects(name, serialize, deserialize, benchmark):
print("\n256 Medium Complex objects:")
ser_data, des_data = benchmark(run_client_test,
name, serialize, deserialize,
data=medium_complex,
iterations=50000,
)
msg = "%-11s serialize: %0.3f deserialize: %0.3f total: %0.3f" % (
name, ser_data, des_data, ser_data + des_data
)
print(msg)
def test_double_performance_float_precision(benchmark):
print("\n | Array with 256 doubles:")
name = 'rapidjson (precise)'
serialize = rapidjson.dumps
deserialize = rapidjson.loads
ser | _data, des_data = benchmark(run_client_test,
name, serialize, deserialize,
data=doubles,
iterations=50000,
)
msg = "%-11s serialize: %0.3f deserialize: %0.3f total: %0.3f" % (
name, ser_data, des_data, ser_data + des_data
)
print(msg)
|
vhazali/cs5331 | assignment3/crawler/items.py | Python | mit | 747 | 0.002677 | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in: |
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class URLItem(scrapy.Item):
url = scrapy.Field()
protocol = scrapy.Field()
domain = scrapy.Field()
path = s | crapy.Field()
page = scrapy.Field()
get_params = scrapy.Field()
class FormItem(scrapy.Item):
url = scrapy.Field()
id_attr = scrapy.Field()
# complete = scrapy.Field()
# name = scrapy.Field()
class InputItem(scrapy.Item):
url = scrapy.Field()
form_id = scrapy.Field()
complete = scrapy.Field()
type_attr = scrapy.Field()
# id_attr = scrapy.Field()
# name = scrapy.Field()
# placeholder = scrapy.Field()
|
majetideepak/arrow | dev/archery/archery/utils/command.py | Python | apache-2.0 | 2,217 | 0 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import shutil
import subprocess
from .logger import logger, ctx
def find_exec(executable):
exec_exists = os.path.exists(executable)
return executable if exec_exists else shutil.which(executable)
# Decorator running a command and returning stdout
class capture_stdout:
def __init__(self, strip=False):
self.strip = strip
def __call__(self, f):
def strip_it(x):
return x.strip() if self.strip else x
def wrapper(*argv, **kwargs):
# Ensure stdout is captured
kwargs["stdout"] = subprocess.PIPE
return strip_it(f(*argv, **kwargs).stdout)
return wrapper
class Command:
""" A runnable command.
Class inheriting from the Command class must provide the bin
property/attribute.
"""
def run(self, *argv, **kwargs):
assert(hasattr(self, "bin"))
invocation = [find_exec(self.bin)]
invocation.extend(argv)
for key in ["stdout", "stderr"]:
# Preserve caller intention, otherwise silence
if ke | y not in kwargs and ctx.quiet:
kwargs[key] = subprocess.PIPE
# Prefer safe by default
if "check" not in kwargs:
kwargs["check"] = True
logger.debug(f"Executing `{invocation}`")
return subprocess.run(invocation, **kwargs)
def __call__(self, *argv, **kwargs):
| self.run(*argv, **kwargs)
|
verdverm/pypge | pypge/benchmarks/yeast.py | Python | mit | 719 | 0.037552 | lol = []
vvs = []
with open("yeast2000.txt") as the_file:
first = True
idx = 0
cnt = 0
lcnt = 0
var = []
for line in the_file:
ll = [item.strip() for item in line.split()]
lcnt += 1
if first:
lol.append(ll[:l | en(ll)-2])
first = False
continue
var.append(ll)
cnt += 1
if cnt == 200:
cnt = 0
vv = [item for sublist in var for item in sublist]
vvs.a | ppend(vv)
var = []
print("flattening", lol[0][idx], idx, len(vv), lcnt)
if len(vvs) == 8:
break
idx += 1
for i in range(len(vvs[0])):
ll = [float(vvs[j][i]) for j in range(len(vvs))]
lol.append(ll)
import json
str_data = json.dumps(lol, indent=2)
with open('yeast.json', 'w') as the_file:
the_file.write(str_data)
|
chrisrossx/DotStar_Emulator | DotStar_Emulator/emulator/init/manage.py | Python | mit | 245 | 0 | """
DotStar_Emulator
config.py in current working directory will be automati | cally read and loaded.
Author: Christopher Ross
License: MIT Something Rather
"""
from DotStar_Emulator.manage import manage
if __name__ == "__main__":
manage | ()
|
butterworth1492/Visualizing-Cavity-Viruses | scurve/test/test_progress.py | Python | bsd-2-clause | 748 | 0.005348 | import scurve.progress as progress
import StringIO
class TestInplace:
def test_basic(self):
s = StringIO.StringIO()
c = progress.Inplace(stream=s)
assert s.getvalue() == ''
c.tick(10)
assert s.getvalue() == '\r10'
c.tick(10000)
assert s.getvalue() == '\r10\r10000'
c.inject("foo")
c.clear()
def test_nostream(self):
c = progress.Inplace(stream=None)
c.tick(10)
c.clear()
class TestProgress:
def test_basic(self):
s = StringIO.StringIO()
p = progress.Progre | ss(100, stream=s)
p.tick(25)
ass | ert p.prev == 0.25
p.tick(50)
assert p.prev == 0.5
p.full()
assert p.prev == 1.0
|
openstack/networking-plumgrid | networking_plumgrid/neutron/plugins/drivers/fake_plumlib.py | Python | apache-2.0 | 5,565 | 0 | # Copyright 2015 PLUMgrid, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from networking_plumgrid._i18n import _LI
from neutron.extensions import providernet as provider
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class Plumlib(object):
"""Class PLUMgrid Fake Library.
This library is a by-pass implementation for the PLUMgrid Library.
This class is being used by the unit test integration in Neutron.
"""
def __init__(self):
LOG.info(_LI('Python PLUMgrid Fake Library Started '))
pass
def director_conn(self, director_plumgrid, director_port, timeout,
director_admin, director | _password):
LOG.info(_LI('Fake Director: %s'),
director_plumgrid + ':' + str(director_port))
pass
def create_network(self, tenant_id, net_db, network, **kwargs):
net_db["network"] = {}
for key in (provider.NETWORK_TYPE,
provider.PHYSICAL_NETWORK,
provider.SEGMENTATION_ID):
| net_db["network"][key] = network["network"][key]
return net_db
def update_network(self, tenant_id, net_id, network, orig_net_db):
pass
def delete_network(self, net_db, net_id):
pass
def create_subnet(self, sub_db, net_db, ipnet):
pass
def update_subnet(self, orig_sub_db, new_sub_db, ipnet, net_db):
pass
def delete_subnet(self, tenant_id, net_db, net_id, sub_db):
pass
def create_port(self, port_db, router_db, subnet_db):
pass
def update_port(self, port_db, router_db, subnet_db):
pass
def delete_port(self, port_db, router_db):
pass
def create_router(self, tenant_id, router_db):
pass
def update_router(self, router_db, router_id):
pass
def delete_router(self, tenant_id, router_id):
pass
def add_router_interface(self, tenant_id, router_id, port_db, ipnet,
ip_version):
pass
def remove_router_interface(self, tenant_id, net_id, router_id):
pass
def create_floatingip(self, floating_ip):
pass
def update_floatingip(self, floating_ip_orig, floating_ip, id):
pass
def delete_floatingip(self, floating_ip_orig, id):
pass
def disassociate_floatingips(self, fip, port_id):
return dict((key, fip[key]) for key in ("id", "floating_network_id",
"floating_ip_address"))
def create_security_group(self, sg_db):
pass
def update_security_group(self, sg_db):
pass
def delete_security_group(self, sg_db):
pass
def create_security_group_rule(self, sg_rule_db):
pass
def create_security_group_rule_bulk(self, sg_rule_db):
pass
def delete_security_group_rule(self, sg_rule_db):
pass
def create_l2_gateway(self, director_plumgrid,
director_admin,
director_password,
gateway_info,
vendor_type,
sw_username,
sw_password):
pass
def delete_l2_gateway(self, gw_info):
pass
def add_l2_gateway_connection(self, gw_conn_info):
pass
def delete_l2_gateway_connection(self, gw_conn_info):
pass
def create_physical_attachment_point(self, physical_attachment_point):
pass
def update_physical_attachment_point(self, physical_attachment_point):
pass
def delete_physical_attachment_point(self, pap_id):
pass
def create_transit_domain(self, transit_domain, db):
pass
def update_transit_domain(self, transit_domain, db):
pass
def delete_transit_domain(self, tvd_id):
pass
def get_available_interface(self):
return "host1", "ifc1"
def create_policy_tag(self, tenant_id, policy_tag_db):
pass
def delete_policy_tag(self, tenant_id, ptag_id):
pass
def create_endpoint_group(self, tenant_id, ep_grp, ptag_db):
pass
def delete_endpoint_group(self, tenant_id, epg_id, ptag_db):
pass
def update_endpoint_group(self, tenant_id, epg_id, epg_db, ptag_db):
pass
def create_policy_service(self, tenant_id, ps_db, ps_mac_list):
pass
def delete_policy_service(self, tenant_id, ps_id):
pass
def update_policy_service(self, tenant_id, ps_id, ps_db, ps_mac_list):
pass
def create_policy_rule(self, tenant_id, pr_db):
pass
def delete_policy_rule(self, tenant_id, pr_id, remote_target=None):
pass
def create_endpoint(self, tenant_id, ep_db, port_mac=None):
pass
def delete_endpoint(self, tenant_id, ep_id, ep_db, port_mac=None):
pass
def update_endpoint(self, tenant_id, ep_id, ep_db, port_mac=None):
pass
def get_ext_links(self, tenant_id):
pass
|
userzimmermann/zetup.py | setup.py | Python | lgpl-3.0 | 2,147 | 0.000466 | from __future__ import print_function
import sys
import os
# from setuptools import Distribution
from pkg_resources import get_distribution, working_set, VersionConflict
def samefile(path, other):
"""
Workaround for missing ``os.path.samefile`` in Windows Python 2.7.
"""
return os.path.normcase(os.path.normpath(os.path.realpath(path))) \
== os.path.normcase(os.path.normpath(os.path.realpath(other)))
sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
try:
import zetup
except VersionConflict:
egg_info = 'zetup.egg-info'
dist = get_distribution('zetup')
if samefile(
dist.location, os.path.dirname(os.path.realpath(__file__))
) and os.path.exists(egg_info):
print("zetup: Removing possibly outdated %s/" % egg_info,
# don't pollute stdout
file=sys.stderr)
for fname in os.listdir(egg_info):
os.remove(os.path.join(egg_info, fname))
os.rmdir(egg_info)
# when run via pip, the egg-info is still referenced by setuptools,
# which would try to read the contents
for keys in working_set.entry_keys.values():
if 'zetup' in keys:
keys.remove('zetup')
del working_set.by_key['zetup']
from zetup import Zetup, DistributionNotFound, VersionConflict
try:
from zetup.commands import make, pytest, tox, conda
except (ImportError, DistributionNotFound, VersionConflict):
# ==> no zetup commands available
# standard setup commands work anyway
pass
# setup_req = 'setuptools >= 15.0'
# try:
# get_distribution(setup_req)
# except VersionConflict:
# for mod in ['setuptools', 'pkg_resources']:
# for name, _ in list(sys.modules.items()):
# if name == mod or name.startswith(mod + '.'):
# del sys.module | s[name]
# sys.path.insert(0, Distribution().fetch_build_egg(setup_req))
zfg = Zetup()
zetup.requires.Require | ments('setuptools >= 36.2', zfg=zfg).check()
setup = zfg.setup
setup['package_data']['zetup.commands.make'] = [
'templates/*.jinja',
'templates/package/*.jinja',
]
setup()
|
starbops/OpenADM | core/src/floodlight_modules/uipusher.py | Python | gpl-2.0 | 6,838 | 0.041533 | import logging
from pymongo import MongoClient
import json
from bson import js | on_util
import time
import datetime
logger = logging.getLogger(__name__)
class UIPusher:
def __init__(self,core,parm):
# register event handler
core.registerEventHandler("controlleradapter", self.controllerHandler)
| # register websocket api
core.registerURLApi("info/topology", self.topologyHandler)
core.registerURLApi("stat", self.statisticHandler)
# save core for ipc use
self.core = core
self.intervalList=['hourly','daily','weekly','monthly','annually']
self.intervalList[0] = 'hourly'+str(datetime.datetime.today().strftime("%Y_%m_%d"))
self.enable = True if parm['enable'] == "true" or parm['enable'] == "True" else False
self.limit = int(parm['queryinterval'])
self.count = 0
self.prevTime = time.time()
self.cache = {}
self.diff = {}
self.tmpcache = {}
if self.enable:
try:
self.client = MongoClient(parm['dbip'],int(parm['dbport']))
self.db = self.client[parm['db']]
self.db.authenticate(parm['user'],parm['password'])
except:
print "database connection failed"
def topologyHandler(self,request):
# return JSONP format
result = self.core.invokeIPC("periodicInquiry")
return "omniui(%s);" % result
def controllerHandler(self,event):
if self.enable:
#compute timestamp
now = time.time()
#12:35:39 -> 12:30:00
reduntTime = int(datetime.datetime.fromtimestamp(now).strftime('%M'))%10*60 + int(datetime.datetime.fromtimestamp(now).strftime('%S'))
data = json.loads(event)
self.count = self.count + 1
if int(now-reduntTime) != self.prevTime:
self.writeToDB()
for node in data['nodes']:
for flow in node['flows']:
key=flow.copy()
key.pop("counterByte",None)
key.pop("counterPacket",None)
key.pop("duration",None)
for dic in key['actions']:
if dic['type'] == "STRIP_VLAN":
key['actions'] = "".join(["{0}".format(dic['type'])])
else:
key['actions'] = "".join(["{0}:{1}".format(dic['type'],dic['value'])])
key['dpid'] = str(node['dpid'])
key['date'] = int(now - reduntTime)
if isinstance(key['actions'],list):
del key['actions']
hashkey = frozenset(key.items())
if hashkey in self.cache:
if self.diff[hashkey][2] > flow['duration']:
tmpCB = flow['counterByte']
tmpCP = flow['counterPacket']
else:
tmpCB = flow['counterByte'] - self.diff[hashkey][0]
tmpCP = flow['counterPacket'] - self.diff[hashkey][1]
self.cache[hashkey][0] += tmpCB
self.cache[hashkey][1] += tmpCP
self.cache[hashkey][2] = key
self.cache[hashkey][3] = flow['duration']
self.diff[hashkey][0] = flow['counterByte']
self.diff[hashkey][1] = flow['counterPacket']
self.diff[hashkey][2] = flow['duration']
else:
self.cache[hashkey] = [0,0,key,flow['duration']]
self.diff[hashkey] = [flow['counterByte'],flow['counterPacket'],flow['duration']]
self.prevTime = int(now-reduntTime)
if self.count >= self.limit and len(self.cache) > 0:
self.writeToDB()
self.event = event
def writeToDB(self):
self.count = 0
#access database
self.tmpcache = self.cache
self.cache={}
key={}
if len(self.tmpcache)==0:
return
##update db name
prevTime = datetime.datetime.fromtimestamp(self.prevTime).strftime("%Y_%m_%d")
self.intervalList[0] = 'hourly'+str(prevTime)
print self.intervalList[0]
for hashkey in self.tmpcache:
key = self.tmpcache[hashkey][2]
exist = self.db[self.intervalList[0]].find_one(key)
if exist is not None:
key['_id'] = exist['_id']
key['counterByte'] = self.tmpcache[hashkey][0] + exist['counterByte']
key['counterPacket'] = self.tmpcache[hashkey][1] + exist['counterPacket']
else:
key['counterByte'] = self.tmpcache[hashkey][0]
key['counterPacket'] = self.tmpcache[hashkey][1]
key['duration'] = self.tmpcache[hashkey][3]
self.db[self.intervalList[0]].save(key)
def statisticHandler(self, date):
if self.enable == False:
return "Time\t1\n"
#declare variable
multiGroup = {}
output = "Time"
count = 1
# for hourly query
if int(data['interval']) ==0:
fromTime = datetime.datetime.strptime(data['from'],"%Y-%m-%d")
toTime = datetime.datetime.strptime(data['to'],"%Y-%m-%d")
oneday = datetime.timedelta(days=1)
#1/26~1/27 means 1/26 00:00 to 1/27 23:59, so plus one day to toTime
toTime = toTime + oneday
keys=[]
for pattern in data['pattern']:
output+="\t"+str(count)
count = count +1
key={}
for field in pattern:
if pattern[field] !='':
key[field] = pattern[field]
currentTime = fromTime
group= {}
while currentTime != toTime:
tableName = "hourly"+currentTime.strftime("%Y_%m_%d")
currentTime = currentTime + oneday
for entry in self.db[tableName].find(key):
if entry['date'] in group:
group[entry['date']] = group[entry['date']] + entry["counterByte"]
else:
group[entry['date']] = entry["counterByte"]
for date in group:
if date in multiGroup:
multiGroup[date].append([group[date],count-1])
else:
multiGroup[date]=[[group[date],count-1]]
# for weekly,monthly...
else:
#translate datetime to timestamp
fromTime = int(time.mktime(time.strptime(data['from'],'%Y-%m-%d')))
#1/26~1/27 means 1/26 00:00 to 1/27 23:59, so plus one day to toTime
toTime = int(time.mktime(time.strptime(data['to'],'%Y-%m-%d')))+86400
#use the interval code to obtain collection name
interval = self.intervalList[ int(data['interval'])]
#flow pattern,only match non-empty field
for pattern in data['pattern']:
output+="\t"+str(count)
count = count +1
group= {}
key = {}
for field in pattern:
if pattern[field] !='':
key[field] = pattern[field]
key['date'] = {'$gte':fromTime,'$lt':toTime}
#use date to group data
for entry in self.db[interval].find(key):
if entry['date'] in group:
group[entry['date']] = group[entry['date']] + entry["counterByte"]
else:
group[entry['date']] = entry["counterByte"]
#add group to multiGroup
for date in group:
if date in multiGroup:
multiGroup[date].append([group[date],count-1])
else:
multiGroup[date]=[[group[date],count-1]]
#tsv format
output+="\n"
tmp=""
for date in sorted(multiGroup.iterkeys()):
tmp = datetime.datetime.fromtimestamp(date).strftime('%Y-%m-%d %H:%M')
#insert zero for no-traffic flow
size = count
tmpIndex = 0
for index in range(1,size):
if multiGroup[date][tmpIndex][1] == index:
tmp+=("\t"+str(multiGroup[date][tmpIndex][0]))
tmpIndex+=1
else:
pass
tmp+=("\t0")
if tmpIndex >= len(multiGroup[date]):
tmpIndex = 0
output+=tmp+"\n"
return output
|
Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/pyshared/oneconf/packagesethandler.py | Python | gpl-3.0 | 6,034 | 0.008452 | # Copyright (C) 2010 Canonical
#
# Authors:
# Didier Roche <didrocks@ubuntu.com>
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import hashlib
import json
import logging
import os
LOG = logging.getLogger(__name__)
from oneconf.hosts import Hosts, HostError
from oneconf.distributor import get_distro
from oneconf.paths import ONECONF_CACHE_DIR, PACKAGE_LIST_PREFIX
class PackageSetHandler(object):
"""
Direct access to database for getting and updating the list
"""
def __init__(self, hosts=None):
self.hosts = hosts
if not hosts:
self.hosts = Hosts()
self.distro = get_distro()
self.last_storage_sync = None
# create cache for storage package list, indexed by hostid
self.package_list = {}
def update(self):
'''update the store with package list'''
hostid = self.hosts.current_host['hostid']
LOG.debug("Updating package list")
newpkg_list = self.distro.compute_local_packagelist()
LOG.debug("Creating the checksum")
checksum = hashlib.sha224(str(newpkg_list)).hexdigest()
LOG.debug("Package list need refresh")
self.package_list[hostid] = {'valid': True, 'package_list': newpkg_list}
with open(os.path.join(self.hosts.get_currenthost_dir(), '%s_%s' % (PACKAGE_LIST_PREFIX, hostid)), 'w') as f:
json.dump(self.package_list[hostid]['package_list'], f)
if self.hosts.current_host['packages_checksum'] != checksum:
self.hosts.current_host['packages_checksum'] = checksum
self.hosts.save_current_host()
LOG.debug("Update done")
def get_packages(self, hostid=None, hostnam | e=None, only_manual=False):
'''get all installed packages from the storage'''
hostid = self.hosts.get_hostid_from_context(hostid, hostname)
LOG.debug ("Request for package list for %s with only manual packages reduced scope to: %s", hostid, only_manual)
package_list = self._get_installed_packages(hostid)
if only_m | anual:
package_list = [package_elem for package_elem in package_list if package_list[package_elem]["auto"] == False]
return package_list
def _get_installed_packages(self, hostid):
'''get installed packages from the storage or cache
Return: uptodate package_list'''
need_reload = False
try:
if self.package_list[hostid]['valid']:
LOG.debug("Hit cache for package list")
package_list = self.package_list[hostid]['package_list']
else:
need_reload = True
except KeyError:
need_reload = True
if need_reload:
self.package_list[hostid] = {'valid': True, 'package_list': self._get_packagelist_from_store(hostid)}
return self.package_list[hostid]['package_list']
def diff(self, distant_hostid=None, distant_hostname=None):
'''get a diff from current package state from another host
This function can be use to make a diff between all packages installed on both computer
, use_cache
Return: (packages_to_install (packages in distant_hostid not in local_hostid),
packages_to_remove (packages in local hostid not in distant_hostid))
'''
distant_hostid = self.hosts.get_hostid_from_context(distant_hostid, distant_hostname)
LOG.debug("Collecting all installed packages on this system")
local_package_list = set(self.get_packages(self.hosts.current_host['hostid'], False))
LOG.debug("Collecting all installed packages on the other system")
distant_package_list = set(self.get_packages(distant_hostid, False))
LOG.debug("Comparing")
packages_to_install = [x for x in distant_package_list if x not in local_package_list]
packages_to_remove = [x for x in local_package_list if x not in distant_package_list]
# for Dbus which doesn't like empty list
if not packages_to_install:
packages_to_install = ''
if not packages_to_remove:
packages_to_remove = ''
return(packages_to_install, packages_to_remove)
def _get_packagelist_from_store(self, hostid):
'''load package list for every computer in cache'''
LOG.debug('get package list from store for hostid: %s' % hostid)
# load current content in cache
try:
with open(os.path.join(self.hosts.get_currenthost_dir(), '%s_%s' % (PACKAGE_LIST_PREFIX, hostid)), 'r') as f:
# can be none in corrupted null file
pkg_list = json.load(f)
except (IOError, ValueError):
LOG.warning ("no valid package list stored for hostid: %s" % hostid)
pkg_list = None
if pkg_list is None:
pkg_list = {}
# there is no way that no package is installed in current host
# At least, there is oneconf ;) Ask for refresh
if hostid == self.hosts.current_host['hostid']:
LOG.debug ("Processing first update for current host")
self.update()
pkg_list = self.package_list[hostid]['package_list']
return pkg_list
|
network-box/uptrack | uptrack/resources.py | Python | agpl-3.0 | 1,492 | 0 | from pyramid.security import ALL_PERMISSIONS, Allow, Authenticated
from .models import DBSession, Distro, Package, Upstream, User
from uptrack.sch | emas import DistroSchema, UpstreamSchema, UserSchema
resources = {}
class RootFactory(object):
__name__ = 'RootFactory'
__parent__ = None
__acl__ = [(Allow, Authenticated, ALL_PERMISSIONS)]
def __init__(self, request):
pass
def __getitem__(self, name):
r = resources[name]()
r.__parent__ = self
r.__name__ = name
return r
class BaseResource(o | bject):
__name__ = None
__parent__ = None
def __getitem__(self, id):
o = DBSession.query(self.__model__).get(id)
if o:
o.__parent__ = self
o.__name__ = id
return o
else:
raise KeyError(id)
class DistroResource(BaseResource):
__model__ = Distro
__schema__ = DistroSchema
class PackageResource(BaseResource):
__model__ = Package
__schema__ = None
class UpstreamResource(BaseResource):
__model__ = Upstream
__schema__ = UpstreamSchema
class UserResource(BaseResource):
__model__ = User
__schema__ = UserSchema
def get_root(request):
global resources
resources.update({"distros": DistroResource,
"packages": PackageResource,
"upstreams": UpstreamResource,
"users": UserResource,
})
return RootFactory(request)
|
cloudzfy/euler | src/11.py | Python | mit | 3,512 | 0.004841 | # In the 20x20 grid below, four numbers along a diagonal line have been marked in red.
# 08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08
# 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00
# 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 | 49 13 36 65
# 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91
# 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80
# 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50
# 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70
# 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21
# 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72
# 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 9 | 5
# 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92
# 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57
# 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58
# 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40
# 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66
# 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69
# 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36
# 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16
# 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54
# 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48
# The product of these numbers is 26 x 63 x 78 x 14 = 1788696.
# What is the greatest product of four adjacent numbers in the same direction (up, down,
# left, right, or diagonally) in the 20x20 grid?
text = '08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 \
49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 \
81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 \
52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 \
22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 \
24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 \
32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 \
67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 \
24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 \
21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 \
78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 \
16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 \
86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 \
19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 \
04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 \
88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 \
04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 \
20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 \
20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 \
01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48'
nums = [int(x) for x in text.split(' ')]
ans = 0
size = 20
for i in range(0, size):
for j in range(3, size):
tmp = nums[j - 3 + i * size] * nums[j - 2 + i * size] \
* nums[j - 1 + i * size] * nums[j + i * size]
ans = max(ans, tmp)
tmp = nums[i + (j - 3) * size] * nums[i + (j - 2) * size] \
* nums[i + (j - 1) * size] * nums[i + j * size]
ans = max(ans, tmp)
for i in range(3, size):
for j in range(3, size):
tmp = nums[j - 3 + (i - 3) * size] * nums[j - 2 + (i - 2) * size] \
* nums[j - 1 + (i - 1) * size] * nums[j + i * size]
ans = max(ans, tmp)
tmp = nums[j + (i - 3) * size] * nums[j - 1 + (i - 2) * size] \
* nums[j - 2 + (i - 1) * size] * nums[j - 3 + i * size]
ans = max(ans, tmp)
print ans |
team-ferret/pip-in-toto | pip/toto/ssl_crypto/formats.py | Python | mit | 25,848 | 0.013966 | #!/usr/bin/env python
"""
<Program Name>
formats.py
<Author>
Geremy Condra
Vladimir Diaz <vladimir.v.diaz@gmail.com>
<Started>
Refactored April 30, 2012. -vladimir.v.diaz
<Copyright>
See LICENSE for licensing information.
<Purpose>
A central location for all format-related checking of TUF objects.
Note: 'formats.py' depends heavily on 'schema.py', so the 'schema.py'
module should be read and understood before tackling this module.
'formats.py' can be broken down into three sections. (1) Schemas and object
matching. (2) Classes that represent Role Metadata and help produce correctly
formatted files. (3) Functions that help produce or verify TUF objects.
The first section deals with schemas and object matching based on format.
There are two ways of checking the format of objects. The first method
raises a 'ssl_commons__exceptions.FormatError' exception if the match fails and the other
returns a Boolean result.
ssl_crypto.formats.<SCHEMA>.check_match(object)
ssl_crypto.formats.<SCHEMA>.matches(object)
Example:
rsa_key = {'keytype': 'rsa'
'keyid': 34892fc465ac76bc3232fab
'keyval': {'public': 'public_key',
'private': 'private_key'}
ssl_crypto.formats.RSAKEY_ssl_commons__schema.check_match(rsa_key)
ssl_crypto.formats.RSAKEY_ssl_commons__schema.matches(rsa_key)
In this example, if a dict key or dict value is missing or incorrect,
the match fails. There are numerous variations of object checking
provided by 'formats.py' and 'schema.py'.
The second section deals with the role metadata classes. There are
multiple top-level roles, each with differing metadata formats.
Example:
root_object = ssl_crypto.formats.RootFile.from_metadata(root_metadata_file)
targets_metadata = ssl_crypto.formats.TargetsFile.make_metadata(...)
The input and output of these classes are checked against their respective
schema to ensure correctly formatted metadata.
The last section contains miscellaneous functions related to the format of
TUF objects.
Example:
signable_object = make_signable(unsigned_object)
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import binascii
import calendar
import re
import string
import datetime
import time
import six
from ..ssl_commons import schema as ssl_commons__schema
from ..ssl_commons import exceptions as ssl_commons__exceptions
# Note that in the schema definitions below, the 'ssl_commons__schema.Object' types allow
# additional keys which are not defined. Thus, any additions to them will be
# easily backwards compatible with clients that are already deployed.
# A datetime in 'YYYY-MM-DDTHH:MM:SSZ' ISO 8601 format. The "Z" zone designator
# for the zero UTC offset is always used (i.e., a numerical offset is not
# supported.) Example: '2015-10-21T13:20:00Z'. Note: This is a simple format
# check, and an ISO8601 string should be fully verified when it is parsed.
ISO8601_DATETIME_SCHEMA = ssl_commons__schema.RegularExpression(r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z')
# A Unix/POSIX time format. An integer representing the number of seconds
# since the epoch (January 1, 1970.) Metadata uses this format for the
# 'expires' field. Set 'hi' to the upper timestamp limit (year 2038), the max
# value of an int.
UNIX_TIMESTAMP_SCHEMA = ssl_commons__schema.Integer(lo=0, hi=2147483647)
# A hexadecimal value in '23432df87ab..' format.
HASH_SCHEMA = ssl_commons__schema.RegularExpression(r'[a-fA-F0-9]+')
# A dict in {'sha256': '23432df87ab..', 'sha512': '34324abc34df..', ...} format.
HASHDICT_SCHEMA = ssl_commons__schema.DictOf(
key_schema = ssl_commons__schema.AnyString(),
value_schema = HASH_SCHEMA)
# A hexadecimal value in '23432df87ab..' format.
HEX_SCHEMA = ssl_commons__schema.RegularExpression(r'[a-fA-F0-9]+')
# A key identifier (e.g., a | hexadecimal value identifying an RSA key).
KEYID_SCHEMA = HASH_SCHEM | A
# A list of KEYID_ssl_commons__schema.
KEYIDS_SCHEMA = ssl_commons__schema.ListOf(KEYID_SCHEMA)
# The method used for a generated signature (e.g., 'RSASSA-PSS').
SIG_METHOD_SCHEMA = ssl_commons__schema.AnyString()
# A relative file path (e.g., 'metadata/root/').
RELPATH_SCHEMA = ssl_commons__schema.AnyString()
RELPATHS_SCHEMA = ssl_commons__schema.ListOf(RELPATH_SCHEMA)
# An absolute path.
PATH_SCHEMA = ssl_commons__schema.AnyString()
PATHS_SCHEMA = ssl_commons__schema.ListOf(PATH_SCHEMA)
# Uniform Resource Locator identifier (e.g., 'https://www.updateframework.com/').
URL_SCHEMA = ssl_commons__schema.AnyString()
# A dictionary holding version information.
VERSION_SCHEMA = ssl_commons__schema.Object(
object_name = 'VERSION_SCHEMA',
major = ssl_commons__schema.Integer(lo=0),
minor = ssl_commons__schema.Integer(lo=0),
fix = ssl_commons__schema.Integer(lo=0))
# An integer representing the numbered version of a metadata file.
# Must be 1, or greater.
METADATAVERSION_SCHEMA = ssl_commons__schema.Integer(lo=0)
# An integer representing length. Must be 0, or greater.
LENGTH_SCHEMA = ssl_commons__schema.Integer(lo=0)
# An integer representing logger levels, such as logging.CRITICAL (=50).
# Must be between 0 and 50.
LOGLEVEL_SCHEMA = ssl_commons__schema.Integer(lo=0, hi=50)
# A string representing a named object.
NAME_SCHEMA = ssl_commons__schema.AnyString()
NAMES_SCHEMA = ssl_commons__schema.ListOf(NAME_SCHEMA)
# A byte string representing data.
DATA_SCHEMA = ssl_commons__schema.AnyBytes()
# Supported hash algorithms.
HASHALGORITHMS_SCHEMA = ssl_commons__schema.ListOf(ssl_commons__schema.OneOf(
[ssl_commons__schema.String('md5'), ssl_commons__schema.String('sha1'),
ssl_commons__schema.String('sha224'), ssl_commons__schema.String('sha256'),
ssl_commons__schema.String('sha384'), ssl_commons__schema.String('sha512')]))
# The contents of an encrypted TUF key. Encrypted TUF keys are saved to files
# in this format.
ENCRYPTEDKEY_SCHEMA = ssl_commons__schema.AnyBytes()
# A value that is either True or False, on or off, etc.
BOOLEAN_SCHEMA = ssl_commons__schema.Boolean()
# A role's threshold value (i.e., the minimum number
# of signatures required to sign a metadata file).
# Must be 1 and greater.
THRESHOLD_SCHEMA = ssl_commons__schema.Integer(lo=1)
# A string representing a role's name.
ROLENAME_SCHEMA = ssl_commons__schema.AnyString()
# The minimum number of bits for an RSA key. Must be 2048 bits, or greater
# (recommended by TUF). Crypto modules like 'pycrypto_keys.py' may set further
# restrictions on keys (e.g., the number of bits must be a multiple of 256).
# Recommended RSA key sizes:
# http://www.emc.com/emc-plus/rsa-labs/historical/twirl-and-rsa-key-size.htm#table1
RSAKEYBITS_SCHEMA = ssl_commons__schema.Integer(lo=2048)
# The number of hashed bins, or the number of delegated roles. See
# delegate_hashed_bins() in 'repository_tool.py' for an example. Note:
# Tools may require further restrictions on the number of bins, such
# as requiring them to be a power of 2.
NUMBINS_SCHEMA = ssl_commons__schema.Integer(lo=1)
# A PyCrypto signature.
PYCRYPTOSIGNATURE_SCHEMA = ssl_commons__schema.AnyBytes()
# A pyca-cryptography signature.
PYCACRYPTOSIGNATURE_SCHEMA = ssl_commons__schema.AnyBytes()
# An RSA key in PEM format.
PEMRSA_SCHEMA = ssl_commons__schema.AnyString()
# A string representing a password.
PASSWORD_SCHEMA = ssl_commons__schema.AnyString()
# A list of passwords.
PASSWORDS_SCHEMA = ssl_commons__schema.ListOf(PASSWORD_SCHEMA)
# The actual values of a key, as opposed to meta data such as a key type and
# key identifier ('rsa', 233df889cb). For RSA keys, the key value is a pair of
# public and private keys in PEM Format stored as strings.
KEYVAL_SCHEMA = ssl_commons__schema.Object(
object_name = 'KEYVAL_SCHEMA',
public = ssl_commons__schema.AnyString(),
private = ssl_commons__schema.Opti |
ad-lebedev/django-todo-rest | todo-project/todo_api/serializers.py | Python | mit | 359 | 0 | # coding=utf-8
from | __future__ import unicode_literals
from django.contrib.auth.models import User
from rest_framework import serializers
__author__ = 'ad'
__date__ = '20/08/16'
class SignInSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('username', 'password')
read_only = ('user | name', 'password')
|
elpaso/django-simplemenu | simplemenu/pages.py | Python | bsd-3-clause | 2,730 | 0.003663 | import copy
import types
from django.core.urlresolvers import reverse
from django.db.models.query import QuerySet
registry = []
def register(*args):
"""
Register urls, views, model instances and QuerySets to be potential
pages for menu items.
Example::
import simplemenu
simplemenu.register(
'package.module.view',
('package.module.view','name'),
FlatPage.objects.all(),
(FlatPage.objects.all(),'attr_containing_name'),
Products.objects.get(pk=1),
)
"""
registry.extend(args)
class PageWrapper(object):
"""
A helper-object to wrap the pages, which might be django models or
strings.
"""
def __init__(self, urlobj_or_str, name=None):
if isinstance(urlobj_or_str, types.StringTypes):
self.urlobj = None
self.urlstr = urlobj_or_str
else:
self.urlobj = urlobj_or_str
self.urlstr = str()
self._name = name
def name(self):
if self._name:
name = self._name
elif self.urlobj:
name = unicode(self.urlobj)
elif "/" in self.urlstr:
name = self.urlstr
else:
name = self.urlstr.rsplit('.', 1)[-1]
name = | name.replace("_", " ").capitalize()
return name
def url(self):
if self.urlobj:
url = self.urlobj.get_absolute_url()
elif "/" in self.urlstr:
url = self.urlstr
else:
| url = reverse(self.urlstr)
return url
def strkey(self):
"""
Generates somewhat unique string id of the wrappee.
"""
if self.urlobj:
return "%s.%s.pk%s" % (self.urlobj.__module__,
self.urlobj.__class__.__name__,
self.urlobj.pk)
else:
return self.urlstr
def get_registered_pages():
"""
Returns all registered pages wrapped in PageWrapper helper-object
evaluating all QuerySets along the way.
"""
pages = []
for reg in map(copy.deepcopy, registry):
name = None
if isinstance(reg, types.TupleType):
reg, name = reg
if isinstance(reg, QuerySet):
# Name is the given attr if possible elsewise just use unicode(obj)
if not name:
f = lambda obj: PageWrapper(obj, unicode(obj))
else:
f = lambda obj: PageWrapper(obj, getattr(obj, name, unicode(obj)))
# evaluating QuerySet objects by iteration
pages.extend(map(f, reg))
else:
pages.append(PageWrapper(reg, name))
return pages
|
mtils/ems | ems/qt4/location/landmarks/landmarkfetchrequest.py | Python | mit | 4,739 | 0.009074 | '''
Created on 24.10.2011
@author: michi
'''
from PyQt4.QtCore import QObject, QMutexLocker
from landmarkabstractrequest import LandmarkAbstractRequest #@UnresolvedImport
from landmarkfilter import LandmarkFilter #@UnresolvedImport
class LandmarkFetchRequest(LandmarkAbstractRequest):
'''
The QLandmarkFetchRequest class allows a client to asynchronously
request a list of landmarks from a landmark manager.
For a QLandmarkFetchRequest, the resultsAvailable() signal will be emitted when the resultant
landmarks (which may be retrieved by calling landmarks()) are updated, as well as if
the overall operation error (which may be retrieved by calling error()) is updated.
Please see the class documentation for QLandmarkAbstractRequest for more information about
the usage of request classes and ownership semantics.
'''
def __init__(self, manager, parent=None):
'''
Creates a new landmark fetch request object with the given \a manager \a parent.
@param manager: The given manager
@type manager: LandmarkManager
@param parent: A parent Object
@type parent: QObject
'''
try:
super(LandmarkFetchRequest, self).__init__(manager, parent)
except NotImplementedError:
pass
self._filter = LandmarkFilter()
self._sorting = []
self._limit = -1
self._offset = 0
self._landmarks = []
def filter_(self):
'''
Returns the filter which will be used to select the landmarks.
By default, the filter's type will be a QLandmarkFilter::DefaultFilter
and thus match all landmarks.
@rtype: LandmarkFilter
'''
ml = QMutexLocker(self._mutex)
return self._filter
def setFilter(self, filter_):
'''
Sets the \a filter which will be used to select landmarks.
@param filter: The filter
@type filter: LandmarkFilter
'''
ml = QMutexLocker(self._mutex)
self._filter = filter_
def sorting(self):
'''
Returns the sort ordering which is used to sort the result. By default
the sort order list is empty, thus no sorting will take place.
@rtype: list
'''
ml = QMutexLocker(self._mutex)
return self._sorting
d | ef setSort | ing(self, sorting):
'''
Sets the sort ordering of the request to \a sorting. This
function will only have an effect on the results if invoked
prior to calling \l QLandmarkAbstractRequest::start().
@param sorting: The sorting as a list
@type sorting: list
'''
ml = QMutexLocker(self._mutex)
if isinstance(sorting, list):
self._sorting = sorting
else:
self._sorting = [sorting,]
def limit(self):
'''
Returns the maximum number of landmarks to be returned. By default the limit
is -1 indicating that all landmarks matching the filter sould be retrieved.
@rtype: int
'''
ml = QMutexLocker(self._mutex)
return self._limit
def setLimit(self, limit):
'''
Sets the maximum number of landmarks to be returned to \a limit.
A limit of -1 will retrieve all landmarks that match the filter.
(A limit of 0 will retrieve no landmarks.)
@param limit: The limit
@type limit: int
'''
ml = QMutexLocker(self._mutex)
self._limit = limit
def offset(self):
'''
Returns the index offset for the request. By default the offset is set to 0.
The offset determines the first index which is retrieved, it is generally
used in conjunction with limit() to facilitate paging.
For example, if there are 10 landmarks in the landmark store, setting the offset
to 2 and limit to 5 will retrieve the 3rd to 7th landmarks inclusively. (The order
of the landmarks is specified by the sorting field).
@rtype: int
'''
ml = QMutexLocker(self._mutex)
return self._offset
def setOffset(self, offset):
'''
Sets the index \a offset for the request.
@param offset: Offset
@type offset: int
'''
ml = QMutexLocker(self._mutex)
self._offset = offset
def landmarks(self):
'''
Returns the list of landmarks which matched the
filter.
@rtype: list
'''
ml = QMutexLocker(self._mutex)
return self._landmarks |
itucsdb1509/itucsdb1509 | server.py | Python | gpl-3.0 | 27,509 | 0.00927 | import datetime
import time
import os
import json
import re
import psycopg2 as dbapi2
from flask import Flask
from flask import redirect
from flask import request
from flask import render_template
from flask.helpers import url_for
from store import Store
from fixture import *
from sponsors import *
from championship import *
from clubs import *
from curlers import *
from countries import *
from stadiums import *
from coach import *
from federations import *
from news import *
from money_balance import *
from penalty import *
from equipments import *
from points import *
app = Flask(__name__)
def get_elephantsql_dsn(vcap_services):
"""Returns the data source name for ElephantSQL."""
parsed = json.loads(vcap_services)
uri = parsed["elephantsql"][0]["credentials"]["uri"]
match = re.match('postgres://(.*?):(.*?)@(.*?)(:(\d+))?/(.*)', uri)
user, password, host, _, port, dbname = match.groups()
dsn = """user='{}' password='{}' host='{}' port={}
dbname='{}'""".format(user, password, host, port, dbname)
return dsn
@app.route('/')
def home_page():
now = datetime.datetime.now()
return render_template('home.html', current_time=now.ctime())
@app.route('/initdb')
def initialize_database():
connection = dbapi2.connect(app.config['dsn'])
try:
cursor =connection.cursor()
try:
cursor.execute('''
DROP TABLE IF EXISTS CLUBS CASCADE;
DROP TABLE IF EXISTS FIXTURE CASCADE;
DROP TABLE IF EXISTS SPONSORS CASCADE;
DROP TABLE IF EXISTS CHAMPIONSHIP CASCADE;
DROP TABLE IF EXISTS CURLERS CASCADE;
DROP TABLE IF EXISTS COUNTRIES CASCADE;
DROP TABLE IF EXISTS STADIUMS CASCADE;
DROP TABLE IF EXISTS COACHES CASCADE;
DROP TABLE IF EXISTS FEDERATIONS CASCADE;
DROP TABLE IF EXISTS NEWS CASCADE;
DROP TABLE IF EXISTS PENALTY CASCADE;
DROP TABLE IF EXISTS EQUIPMENTS CASCADE;
DROP TABLE IF EXISTS POINTS CASCADE;
DROP TABLE IF EXISTS MONEY_BALANCE CASCADE;
''')
init_countries_db(cursor)
init_stadiums_db(cursor)
init_clubs_db(cursor)
init_fixture_db(cursor)
init_sponsors_db(cursor)
init_championships_db(cursor)
init_curlers_db(cursor)
init_coach_db(cursor)
init_federations_db(cursor)
init_news_db(cursor)
init_money_balances_db(cursor)
init_penalty_db(cursor)
init_equipments_db(cursor)
init_points_db(cursor)
except dbapi2.Error as e:
| print(e.pgerror)
finally:
| cursor.close()
###########
except dbapi2.Error as e:
print(e.pgerror)
connection.rollback()
finally:
connection.commit()
connection.close()
return redirect(url_for('home_page'))
@app.route('/championships', methods=['GET', 'POST'])
def championships_page():
connection = dbapi2.connect(app.config['dsn'])
now = datetime.datetime.now()
try:
cursor = connection.cursor()
try:
cursor = connection.cursor()
if request.method == 'GET':
query = "SELECT CH.ID,CH.NAME,C.COUNTRY_NAME,CH.DATE,CH.TYPE,CH.NUMBER_OF_TEAMS,CH.REWARD FROM CHAMPIONSHIP AS CH,COUNTRIES AS C WHERE(CH.PLACE=C.COUNTRY_ID)"
cursor.execute(query)
championship=cursor.fetchall()
cursor.close()
cursor = connection.cursor()
cursor.execute("SELECT COUNTRY_ID,COUNTRY_NAME FROM COUNTRIES")
countries=cursor.fetchall()
return render_template('championships.html', championship = championship,countries=countries, current_time = now.ctime())
elif "add" in request.form:
championship1 = Championships(request.form['name'],
request.form['place'],
request.form['date'],
request.form['type'],
request.form['number_of_teams'],
request.form['reward'])
add_championship(cursor, request, championship1)
connection.commit()
return redirect(url_for('championships_page'))
elif "delete" in request.form:
for line in request.form:
if "checkbox" in line:
delete_championship(cursor, int(line[9:]))
connection.commit()
return redirect(url_for('championships_page'))
elif "search" in request.form:
result=search_championship(cursor, request.form['search_name'])
return render_template('championship_search.html', championship = result, current_time=now.ctime())
except dbapi2.Error as e:
print(e.pgerror)
finally:
cursor.close()
except dbapi2.Error as e:
print(e.pgerror)
## cursor.rollback()
connection.rollback()
## connection.close()
finally:
connection.commit()
connection.close()
def search_championship(cursor,championship1):
res = ()
connection = dbapi2.connect(app.config['dsn'])
try:
cursor = connection.cursor()
try:
query = """SELECT CH.ID,CH.NAME,C.COUNTRY_NAME,CH.DATE,CH.TYPE,CH.NUMBER_OF_TEAMS,CH.REWARD
FROM CHAMPIONSHIP AS CH,COUNTRIES AS C
WHERE(
(CH.PLACE=C.COUNTRY_ID) AND ((CH.NAME LIKE %s)OR(C.COUNTRY_NAME LIKE %s)))
"""
cursor.execute(query,('%'+championship1+'%','%'+championship1+'%'))
res = cursor.fetchall()
except dbapi2.Error as e:
print(e.pgerror)
finally:
cursor.close()
except dbapi2.Error as e:
print(e.pgerror)
connection.rollback()
finally:
connection.close()
return res
@app.route('/championships/<championship_id>', methods=['GET', 'POST'])
def championship_update_page(championship_id):
connection = dbapi2.connect(app.config['dsn'])
cursor = connection.cursor()
if request.method == 'GET':
cursor.close()
cursor = connection.cursor()
cursor.execute("SELECT COUNTRY_ID,COUNTRY_NAME FROM COUNTRIES")
countries=cursor.fetchall()
query = """SELECT * FROM CHAMPIONSHIP WHERE (ID = %s)"""
cursor.execute(query,championship_id)
now = datetime.datetime.now()
return render_template('championship_update.html', championship = cursor,countries=countries, current_time=now.ctime())
elif request.method == 'POST':
if "update" in request.form:
championship1 = Championships(request.form['name'],
request.form['place'],
request.form['date'],
request.form['type'],
request.form['number_of_teams'],
request.form['reward'])
update_championship(cursor, request.form['championship_id'], championship1)
connection.commit()
return redirect(url_for('championships_page'))
@app.route('/countries',methods=['GET', 'POST'])
def countries_page():
connection = dbapi2.connect(app.config['dsn'])
cursor = connection.cursor()
now = datetime.datetime.now()
if request.method == 'GET':
query = """SELECT COUNTRY_ID,COUNTRY_NAME,COUNTRY_CONTINENT,COUNTRY_CAPITAL,COUNTRY_INDEPEN_YEAR
FROM COUNTRIES GROUP BY COUNTRY_ID
ORDER BY COUNTRY_NAME """
cursor.execute(query)
return render_template('countries.html', countries = cursor.fetchall(), current_time=now.ctime())
elif "add" in request.form:
country1 = Countries(request.form['country'],
request.form['continent'],
request.form['capital'],
request.for |
mmpi/SvgPresenter | qt/movie/MovieData.py | Python | gpl-3.0 | 1,178 | 0.011036 | import os.path
from PyQt4 import QtCore, QtGui
import vlc.vlc as vlc
class MovieData:
libvlc = vlc.Instance(["--no-audio","--no-xlib"])
def __init__(self, basePath, dict):
self.basePath = basePath
self.data = dict
# load poster pixmap
PngBase64 = "data:image/png;base64,"
imageData = self.data["image"]
if imageData.startswith(PngBase64):
byteArray = QtCore.QByteArray.fromBase6 | 4(imageData[len(PngBase64):])
self.pixmap = QtGui.QPixmap()
self.pixmap.loadFromData(byteArray)
# load medium
path = self.data["path"]
head, tail = os.path.split(path)
if head=="":
path = os.path.join(self.basePath, tail)
self.media = self.libvlc.medi | a_new(unicode(path))
if self.data["loop"]:
self.media.add_option("input-repeat=-1") # repeat
def scaledRectangle(self, factor):
return QtCore.QRect(factor*self.data["x"], factor*self.data["y"], factor*self.data["width"], factor*self.data["height"])
def aspectRatio(self):
return "%d:%d"%(self.data["width"],self.data["height"]) |
nicole-a-tesla/meetup.pizza | pizzaplace/tests/test_pizza_place.py | Python | mit | 1,885 | 0.008488 | from django.test import TestCase
from django.db import DataError
from django.db import IntegrityError
from pizzaplace.models import PizzaPlace
class TestPizzaPlace(TestCase):
def setUp(self):
self.prince_street_pizza_url = 'https://www.yelp.com/biz/prince-st-pizza-new-york'
self.pizza_name1 = 'Such Pizza'
def test_pizza_is_real(self):
pizza_place = PizzaPlace()
self.assertIsInstance(pizza_place, PizzaPlace)
def test_creation_of_pizza_place_with_name(self):
pizza_place = PizzaPlace(name=self.pizza_name1, yelp_url=self.prince_street_pizza_url)
self.assertEquals(pizza_place.name, self.pizza_name1)
def test_name_must_be_unique(self):
PizzaPlace.objects.create(name=self.pizza_name1, yelp_url=self.prince_street_pizza_url)
place2 = PizzaPlace(name=self.pizza_name1, yelp_url='https://www.yelp.com/biz/lombardis-pizza-new-york')
self.assertRaises(IntegrityError, place2.save)
def test_url_must_be_unique(self):
PizzaPlace.objects.create(name="Much Pizza", yelp_url=self.prince_street_pizza_url)
place2 = PizzaPlace(name=self.pizza_name1, yelp_url=self.prince_street_pizza_url)
self.assertRaises(IntegrityError, place2.save)
def test_string_representation_of_pizza_place(self):
place = PizzaPlace(name=self.pizza_name1, yelp_url=self.prince_street_pizza_url)
self.assertEquals(self.pizza_name1, str(place))
def test_name_length_inv | alid_if_over_500_char(self):
name = "x" * 501
place = PizzaPlace(name=name, yelp_url=self.prince_street_pizza_url)
self.assertRaises(DataError, place.save)
def test_raises_error_if_name_is_blank(self):
place = PizzaPlace(yelp_url=self.prince_street_pizza_url)
self.assertRaises(IntegrityError, place.save)
def test_raises_error_if_url_is_blank(self):
place | = PizzaPlace(name=self.pizza_name1)
self.assertRaises(IntegrityError, place.save)
|
adrienpacifico/openfisca-france-data | openfisca_france_data/tests/test_yaml.py | Python | agpl-3.0 | 14,154 | 0.01194 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Test YAML files."""
from __future__ import division
import collections
import copy
import logging
import os
import numpy as np
from openfisca_core import conv, periods, scenarios
from openfisca_core.tools import assert_near
from openfisca_france_data.tests.base import france_data_tax_benefit_system as tax_benefit_system
import yaml
log = logging.getLogger(__name__)
options_by_dir = collections.OrderedDict((
(
os.path.abspath(os.path.join(os.path.dirname(__file__), 'formulas')),
dict(
accept_other_period = False,
default_absolute_error_margin = 0.005,
),
),
))
tax_benefit_system_by_reform_name = {
None: tax_benefit_system,
}
# YAML configuration
class folded_unicode(unicode):
pass
class literal_unicode(unicode):
pass
def dict_constructor(loader, node):
return collections.OrderedDict(loader.construct_pairs(node))
yaml.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, dict_constructor)
yaml.add_representer(collections.OrderedDict, lambda dumper, data: dumper.represent_dict(
(copy.deepcopy(key), value)
for key, value in data.iteritems()
))
yaml.add_representer(dict, lambda dumper, data: dumper.represent_dict(
(copy.deepcopy(key), value)
for key, value in data.iteritems()
))
yaml.add_representer(folded_unicode, lambda dumper, data: dumper.represent_scalar(u'tag:yaml.org,2002:str',
data, style='>'))
yaml.add_representer(literal_unicode, lambda dumper, data: dumper.represent_scalar(u'tag:yaml.org,2002:str',
data, style='|'))
yaml.add_representer(np.ndarray, lambda dumper, data: dumper.represent_list(data.tolist()))
yaml.add_representer(periods.Instant, lambda dumper, data: dumper.represent_scalar(u'tag:yaml.org,2002:str', str(data)))
yaml.add_representer(periods.Period, lambda dumper, data: dumper.represent_scalar(u'tag:yaml.org,2002:str', str(data)))
yaml.add_representer(tuple, lambda dumper, data: dumper.represent_list(data))
yaml.add_representer(unicode, lambda dumper, data: dumper.represent_scalar(u'tag:yaml.org,2002:str', data))
# Functions
def assert_near_any_period(value, target_value, absolute_error_margin = 0, message = '', relative_error_margin = None):
# Redefinition of assert_near that accepts to compare monthy values with yearly values.
assert absolute_error_margin is not None or relative_error_margin is not None
if isinstance(value, (list, tuple)):
value = np.array(value)
if isinstance(target_value, (list, tuple)):
target_value = np.array(target_value)
if isinstance(message, unicode):
message = message.encode('utf-8')
if isinstance(value, np.ndarray):
if absolute_error_margin is not None:
assert (abs(target_value - value) <= absolute_error_margin).all() \
or (abs(target_value - value * 12) <= absolute_error_margin).all() \
or (abs(target_value - value / 12) <= absolute_error_margin).all(), \
'{}{} differs from {} with an absolute margin {} > {}'.format(message, value, target_value,
abs(target_value - value), absolute_error_margin)
if relative_error_margin is not None:
assert (abs(target_value - value) <= abs(relative_error_margin * target_value)).all() \
or (abs(target_value - value * 12) <= abs(relative_error_margin * target_value)).all() \
| or (abs(target_value - value / 12) <= abs(relative_error_margin * target_value)).all(), \
'{}{} differs from {} with a relative margin {} > {}'.format(message, value, target_value,
abs(target_value - value), abs(relative_error_margin * target_value))
else:
if absolute_error_margin is not None:
assert abs(target_value - value) <= absolute_error_margin \
or abs(target_value - value * 12) <= absolute_error_margin \
| or abs(target_value - value / 12) <= absolute_error_margin, \
'{}{} differs from {} with an absolute margin {} > {}'.format(message, value, target_value,
abs(target_value - value), absolute_error_margin)
if relative_error_margin is not None:
assert abs(target_value - value) <= abs(relative_error_margin * target_value) \
or abs(target_value - value * 12) <= abs(relative_error_margin * target_value) \
or abs(target_value - value / 12) <= abs(relative_error_margin * target_value), \
'{}{} differs from {} with a relative margin {} > {}'.format(message, value, target_value,
abs(target_value - value), abs(relative_error_margin * target_value))
def check(name, period_str, test, force):
scenario = test['scenario']
scenario.suggest()
simulation = scenario.new_simulation(debug = True)
output_variables = test.get(u'output_variables')
if output_variables is not None:
output_variables_name_to_ignore = test.get(u'output_variables_name_to_ignore') or set()
for variable_name, expected_value in output_variables.iteritems():
if not force and variable_name in output_variables_name_to_ignore:
continue
if isinstance(expected_value, dict):
for requested_period, expected_value_at_period in expected_value.iteritems():
assert_near(
simulation.calculate(variable_name, requested_period),
expected_value_at_period,
absolute_error_margin = test.get('absolute_error_margin'),
message = u'{}@{}: '.format(variable_name, requested_period),
relative_error_margin = test.get('relative_error_margin'),
)
else:
assert_near(
simulation.calculate(variable_name),
expected_value,
absolute_error_margin = test.get('absolute_error_margin'),
message = u'{}@{}: '.format(variable_name, period_str),
relative_error_margin = test.get('relative_error_margin'),
)
def check_any_period(name, period_str, test, force):
scenario = test['scenario']
scenario.suggest()
simulation = scenario.new_simulation(debug = True)
output_variables = test.get(u'output_variables')
if output_variables is not None:
output_variables_name_to_ignore = test.get(u'output_variables_name_to_ignore') or set()
for variable_name, expected_value in output_variables.iteritems():
if not force and variable_name in output_variables_name_to_ignore:
continue
if isinstance(expected_value, dict):
for requested_period, expected_value_at_period in expected_value.iteritems():
assert_near_any_period(
simulation.calculate(variable_name, requested_period, accept_other_period = True),
expected_value_at_period,
absolute_error_margin = test.get('absolute_error_margin'),
message = u'{}@{}: '.format(variable_name, |
swails/mdtraj | mdtraj/tests/test_xyz.py | Python | lgpl-2.1 | 3,492 | 0.002577 | ##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2013 Stanford University and the Authors
# Authors: Christoph Klein
# Contributors:
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public Lic | ense for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with | MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
import tempfile, os
import numpy as np
import mdtraj as md
from mdtraj.formats import XYZTrajectoryFile
from mdtraj.testing import get_fn, eq
fd, temp = tempfile.mkstemp(suffix='.xyz')
fd_gz, temp_gz = tempfile.mkstemp(suffix='.xyz.gz')
def teardown_module(module):
"""Remove the temporary file created by tests in this file
this gets automatically called by nose. """
os.close(fd)
os.unlink(temp)
def test_read_0():
with XYZTrajectoryFile(get_fn('frame0.xyz')) as f:
xyz = f.read()
with XYZTrajectoryFile(get_fn('frame0.xyz')) as f:
xyz3 = f.read(stride=3)
eq(xyz[::3], xyz3)
def test_read_1():
reference = md.load(get_fn('frame0.dcd'), top=get_fn('native.pdb'))
traj = md.load(get_fn('frame0.xyz'), top=get_fn('native.pdb'))
eq(reference.xyz[0], traj.xyz[0], decimal=3)
def test_read_gz():
reference = md.load(get_fn('frame0.dcd'), top=get_fn('native.pdb'))
traj = md.load(get_fn('frame0.xyz.gz'), top=get_fn('native.pdb'))
eq(reference.xyz[0], traj.xyz[0], decimal=3)
def test_read_write():
xyz = np.around(10 * np.random.randn(100, 11, 3), decimals=3)
with XYZTrajectoryFile(temp, mode='w') as f:
f.write(xyz)
with XYZTrajectoryFile(temp) as f:
xyz2 = f.read()
eq(xyz, xyz2)
def test_mdwrite():
t = md.load(get_fn('frame0.xyz'), top=get_fn('native.pdb'))
t.save(temp)
t.save(temp_gz)
def test_multiread():
reference = md.load(get_fn('frame0.xyz'), top=get_fn('native.pdb'))
with XYZTrajectoryFile(get_fn('frame0.xyz')) as f:
xyz0 = f.read(n_frames=1)
xyz1 = f.read(n_frames=1)
eq(reference.xyz[0], xyz0[0]/10)
eq(reference.xyz[1], xyz1[0]/10)
def test_seek():
reference = md.load(get_fn('frame0.xyz'), top=get_fn('native.pdb'))
with XYZTrajectoryFile(get_fn('frame0.xyz')) as f:
f.seek(1)
eq(1, f.tell())
xyz1 = f.read(n_frames=1)
eq(reference.xyz[1], xyz1[0]/10)
f.seek(10)
eq(10, f.tell())
xyz10 = f.read(n_frames=1)
eq(reference.xyz[10], xyz10[0]/10)
eq(11, f.tell())
f.seek(-8, 1)
xyz3 = f.read(n_frames=1)
eq(reference.xyz[3], xyz3[0]/10)
f.seek(4, 1)
xyz8 = f.read(n_frames=1)
eq(reference.xyz[8], xyz8[0]/10)
def test_len():
with md.open(get_fn('frame0.xyz')) as fh:
assert len(fh) == 501
assert fh._frame_index == 0
assert len(fh.read()) == 501
|
Architektor/PySnip | contrib/scripts/timedmute.py | Python | gpl-3.0 | 1,733 | 0.021927 | # Timed mute: !tm <player> <seconds> <reason>
# default time 5 minutes, default reason None
# by topologist June 30th 2012
from scheduler import Scheduler
from commands import add, admin, get_player, join_arguments, name
@name('tm')
@admin
def timed_mute(connection, *args):
protocol = connection.protocol
nick = args[0]
time = int(args[1])
reason = join_arguments(args[2:])
player = get_player(protocol, nick)
if time < 0:
raise ValueError()
if not player.mute:
TimedMute(player, time, reason)
else:
return '%s is already muted!' % nick
add(timed_mute)
class TimedMute(object):
player = None
time = None
def __init__(self, player, time = 300, reason = 'None'):
if time == 0:
player.mute = True
| player.protocol.send_chat('%s was muted indefinitely (Reason: %s)' % (
player.name, reason), irc = True)
return
schedule = Scheduler(player.protocol)
schedule.call_later(time, self.end)
player.mute_schedule = schedule
player.protocol.send_chat('%s was muted for %s seconds (Reason: %s)' % (
player.name, time, reason), irc = True)
player.mute = True
self.player = player
self.time = time
def end(self):
self.player.mute = | False
message = '%s was unmuted after %s seconds' % (self.player.name, self.time)
self.player.protocol.send_chat(message, irc = True)
def apply_script(protocol, connection, config):
class TimedMuteConnection(connection):
mute_schedule = None
def on_disconnect(self):
if self.mute_schedule:
del self.mute_schedule
connection.on_disconnect(self)
return protocol, TimedMuteConnection
|
michaelneuder/image_quality_analysis | bin/nets/old/pixel_diff_conv_net_automated.py | Python | mit | 5,287 | 0.015131 | #!/usr/bin/env python3
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import numpy as np
np.set_printoptions(threshold=np.nan)
import tensorflow as tf
import time
# seeding for debug purposes --- dont forget to remove
SEED = 12345
np.random.seed(SEED)
tf.set_random_seed(SEED)
def convolve_inner_layers(x, W, b):
y = tf.nn.conv2d(x, W, strides = [1,1,1,1], padding='SAME')
y = tf.nn.bias_add(y, b)
return tf.nn.relu(y)
def convolve_ouput_layer(x, W, b):
y = tf.nn.conv2d(x, W, strides = [1,1,1,1], padding='SAME')
y = tf.nn.bias_add(y, b)
return y
def conv_net(x, W, b):
conv1 = convolve_inner_layers(x, W['weights1'], b['bias1'])
conv2 = convolve_inner_layers(conv1, W['weights2'], b['bias2'])
conv3 = convolve_inner_layers(conv2, W['weights3'], b['bias3'])
output = convolve_ouput_layer(conv3, W['weights_out'], b['bias_out'])
return output
def run_training(image_dim_, initializer_scale_, learning_rate_):
# parameters
filter_dim = 11
number_images = 100
image_dim = image_dim_
input_layer = 2
first_layer = 50
second_layer = 25
third_layer = 10
output_layer = 1
initializer_scale = initializer_scale_
# train images
rand_img_train_1 = np.random.random_sample((number_images,image_dim**2))
rand_img_train_2 = np.random.random_sample((number_images,image_dim**2))
difference_train = abs(rand_img_train_1 - rand_img_train_2)
# test image
rand_img_test_1 = np.random.random_sample((number_images,image_dim**2))
rand_img_test_2 = np.random.random_sample((number_images,image_dim**2))
difference_test = abs(rand_img_test_1 - rand_img_test_2)
# stacking & reshaping images
train_data = np.reshape(np.dstack((rand_img_train_1, rand_img_train_2)), [number_images,image_dim,image_dim,2])
test_data = np.reshape(np.dstack((rand_img_test_1, rand_img_test_2)), [number_images,image_dim,image_dim,2])
target_data_train = np.reshape(difference_train, [number_images,image_dim,image_dim,1])
target_data_test = np.reshape(difference_test, [number_images,image_dim,image_dim,1])
# initializing variables --- fan in
weights = {
'weights1': tf.Variable(tf.random_normal([filter_dim,filter_dim,input_layer,first_layer],stddev=(1.0/(initializer_scale*filter_dim*filter_dim*input_layer)))),
'weights2': tf.Variable(tf.random_normal([filter_dim,filter_dim,first_layer,second_layer],stddev=(1.0/(initializer_scale*filter_dim*filter_dim*first_layer)))),
'weights3': tf.Variable(tf.random_normal([filter_dim,filter_dim,second_layer,third_layer],stddev=(1.0/(initializer_scale*filter_dim*filter_dim*second_layer)))),
'weights_out': tf.Variable(tf.random_normal([filter_dim,filter_dim,third_layer,output_layer],stddev=(1.0/(initializer_scale*filter_dim*filter_dim*third_layer))))
}
biases = {
'bias1': tf.Variable(tf.random_normal([first_layer],stddev=(1.0/(initializer_scale*filter_dim*filter_dim*input_layer)))),
'bias2': tf.Variable(tf.random_normal([second_layer],stddev=(1.0/(initializer_scale*filter_dim*filter_dim*first_layer)))),
'bias3': tf.Variable(tf.random_normal([third_layer],stddev=(1.0/(initializer_scale*filter_dim*filter_dim*second_layer)))),
'bias_out': tf.Variable(tf.random_normal([output_layer],stddev=(1.0/(initializer_scale*filter_dim*filter_dim*third_layer))))
}
# tf Graph input
x = tf.placeholder(tf.float32, [None, image_dim, image_dim, 2])
y = tf.placeholder(tf.float32, [None, image_dim, image_dim, 1])
# paramaters
learning_rate = learning_rate_
epochs = 1000
# model
prediction = conv_net(x, weights, biases)
# loss and optimization
cost = tf.reduce_mean(tf.square(tf.subtract(prediction, y)))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
# session
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
epoch_count = 0
start_time = time.time()
print("starting training with paramaers: (im_dim={}, init_scale={}, lr={})".format(image_dim, initializer_scale, learning_rate))
while epoch_count < epochs:
x_data_train, y_data_train = train_data, target_data_train
sess.ru | n(optimizer, feed_dict={x : x_data_train, y : y_data_train})
loss = sess.run(cost, feed_dict={x : x_data_train, y : y_data_train})
| epoch_count+=1
print(' optimization finished!')
score = sess.run(cost, feed_dict={x: test_data, y: target_data_test})
print(' score : {} '.format(score))
return (image_dim, initializer_scale, learning_rate), (loss, score)
def main():
results = {}
image_dims = [1,2,3,4,5]
init_scales = [.01, .1, 1.0, 10.0]
learning_rates = [.1, .01, .001]
for dim in image_dims:
for scale in init_scales:
for learning_rate in learning_rates:
setting, result = run_training(dim, scale, learning_rate)
results[setting] = result
with open('results.txt', mode='w') as write_file:
for setting in results:
write_file.write(str(setting)+','+str(results[setting])+'\n')
write_file.close()
if __name__ == '__main__':
main()
|
tensor-tang/Paddle | python/paddle/fluid/tests/unittests/test_parallel_executor_seresnext_base_cpu.py | Python | apache-2.0 | 1,446 | 0 | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing | permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import seresnext_net
from seresnext_test_base import TestResnetBase
from functools import partial
class TestResnetCPU(TestResnetBase):
def test_seresnext_with_learning_rate_decay(self):
# NOTE(zcd): This test is compare the result of use parallel_executor
# and executor, and the result of drop_out op and batch_norm op in
# this two executor have diff, s | o the two ops should be removed
# from the model.
check_func = partial(
self.check_network_convergence,
optimizer=seresnext_net.optimizer,
use_parallel_executor=False)
self._compare_result_with_origin_model(
check_func, use_cuda=False, compare_seperately=False, delta2=1e-3)
if __name__ == '__main__':
unittest.main()
|
satterly/alerta5 | alerta/app/views/__init__.py | Python | apache-2.0 | 939 | 0.007455 |
from flask import Blueprint, request, jsonify, current_app
from alerta.app.utils.api import absolute_url
from alerta.app.exceptions import ApiError
api = Blueprint('api', __name__)
from . import alerts, blackouts, customers, heartbeats, keys, permissions, users, oembed
@api. | before_request
def only_json():
if request.method in ['POST', 'PUT'] and not request.is_json:
raise ApiError("POST and PUT requests must set 'Content-t | ype' to 'application/json'", 415)
@api.route('/', methods=['OPTIONS', 'GET'])
def index():
links = []
for rule in current_app.url_map.iter_rules():
links.append({
"rel": rule.endpoint,
"href": absolute_url(rule.rule) ,
"method": ','.join([m for m in rule.methods if m not in ['HEAD', 'OPTIONS']])
})
return jsonify(status="ok", uri=absolute_url(), data={'description':'Alerta API'}, links=sorted(links, key=lambda k: k["href"]))
|
eduNEXT/edx-platform | openedx/core/djangoapps/discussions/urls.py | Python | agpl-3.0 | 752 | 0.00133 | """
Configure URL endpoints for the djangoapp
"""
from django.urls import re_path
from django.conf import settings
from .views import CombinedDiscussionsConfigurationView, DiscussionsConfigurationSettingsView, DiscussionsProvidersView
urlpatterns = [
re_path(
fr'^v0/{settings.COURSE_KEY_PATTERN}$',
CombinedDiscussionsConfigurationView.as_view(),
name='discussions',
),
re_path(
fr'^v0/course/{settings.COURSE_KEY_PATTERN}/settings$',
DiscussionsConfigurationSettingsView.as_view(),
name='discussions-settings',
),
re_path(
fr'^v0/course/{settings.COURSE_KEY_PATTERN}/providers$', |
DiscussionsProvidersView.as_view(),
name='di | scussions-providers',
),
]
|
VoIP-co-uk/sftf | UserAgentBasicTestSuite/case202.py | Python | gpl-2.0 | 4,465 | 0.018365 | #
# Copyright (C) 2004 SIPfoundry Inc.
# Licensed by SIPfoundry under the GPL license.
#
# Copyright (C) 2004 SIP Forum
# Licensed to SIPfoundry under a Contributor Agreement.
#
#
# This file is part of SIP Forum User Agent Basic Test Suite which
# belongs to the SIP Forum Test Framework.
#
# SIP Forum User Agent Basic Test Su | ite is free software; you can
# redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foun | dation; either
# version 2 of the License, or (at your option) any later version.
#
# SIP Forum User Agent Basic Test Suite is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SIP Forum User Agent Basic Test Suite; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
# $Id: case202.py,v 1.2 2004/05/02 18:57:35 lando Exp $
#
from TestCase import TestCase
import NetworkEventHandler as NEH
import Log
class case202 (TestCase):
def config(self):
self.name = "Case 202"
self.description = "Wide range of valid charaters"
self.isClient = True
self.transport = "UDP"
def run(self):
self.neh = NEH.NetworkEventHandler(self.transport)
inv = self.createRequest("INVITE")
inv.rUri.username = "1_unusual.URI~(to-be!sure)&isn't+it$/crazy?,/;;*:&it+has=1,weird!*pass$wo~d_too.(doesn't-it)"
via = inv.getParsedHeaderValue("Via")
via.branch = "z9hG4bK-.!%66*_+`'~"
inv.setHeaderValue("Via", via.create())
to = inv.getParsedHeaderValue("To")
to.displayname = "BEL:\\\x07 NUL:\\\x00 DEL:\\\x7F"
to.uri.username = "1_unusual.URI~(to-be!sure)&isn't+it$/crazy?,/;;*:&it+has=1,weird!*pass$wo~d_too.(doesn't-it)"
inv.setHeaderValue("To", to.create())
inv.transaction.dialog.remoteUri = to
self.writeMessageToNetwork(self.neh, inv)
self.code = 0
while (self.code <= 200):
repl = self.readReplyFromNetwork(self.neh)
if (repl is not None) and (repl.code > self.code):
self.code = repl.code
elif repl is None:
self.code = 999
if repl is None:
self.addResult(TestCase.TC_FAILED, "missing reply on request")
self.neh.closeSock()
def onDefaultCode(self, message):
if message.code > self.code:
self.code = message.code
if message.code >= 200:
if (message.hasParsedHeaderField("CSeq") and (message.getParsedHeaderValue("CSeq").method == "INVITE")):
Log.logDebug("case202: sending ACK for >= 200 reply", 3)
ack = self.createRequest("ACK", trans=message.transaction)
self.writeMessageToNetwork(self.neh, ack)
if message.code != 487:
self.addResult(TestCase.TC_WARN, "INVITE with wide range of characters rejected with '" + str(message.code) + "'")
elif message.code == 200:
if len(self.results):
self.addResult(TestCase.TC_PASSED, "INVITE with wide range of characters accepted")
Log.logDebug("case202: sending BYE for accepted INVITE", 3)
bye = self.createRequest("BYE", dia=message.transaction.dialog)
self.writeMessageToNetwork(self.neh, bye)
rep = self.readReplyFromNetwork(self.neh)
if rep is None:
self.addResult(TestCase.TC_ERROR, "missing response on BYE")
elif (message.hasParsedHeaderField("CSeq")) and (message.getParsedHeaderValue("CSeq").method == "CANCEL") and (message.code != 200):
self.addResult(TestCase.TC_WARN, "received \'" + str(message.code) + "\' for CANCEL")
elif (not message.transaction.canceled) and (message.hasParsedHeaderField("CSeq")) and (message.getParsedHeaderValue("CSeq").method == "INVITE"):
Log.logDebug("case202: sending ACK for >= 200 reply", 3)
ack = self.createRequest("ACK", trans=message.transaction)
self.writeMessageToNetwork(self.neh, ack)
if message.code != 487:
self.addResult(TestCase.TC_WARN, "INVITE with wide range of characters rejected with '" + str(message.code) + "'")
else:
self.addResult(TestCase.TC_PASSED, "INVITE with wide range of characters accepted")
can = self.createRequest("CANCEL", trans=message.transaction)
message.transaction.canceled = True
self.writeMessageToNetwork(self.neh, can)
canrepl = self.readReplyFromNetwork(self.neh)
if canrepl is None:
self.addResult(TestCase.TC_ERROR, "missing 200 on CANCEL")
|
commonsmachinery/rdf_metadata | src/RDFMetadata/test/__init__.py | Python | gpl-2.0 | 14 | 0 |
__al | l__ = [] | |
sentriz/steely | steely/plugins/train.py | Python | gpl-3.0 | 2,407 | 0.001248 | #!/usr/bin/env python3
'''
.train <train station>
get irish rail train state times
'''
import requests
import re
from operator import itemgetter
from xml.etree import ElementTree
from formatting import *
__author__ = 'izaakf'
COMMAND = 'train'
NAMESPACES = {'realtime': 'http://api.irishrail.ie/realtime/'}
REALTIME_URL = f"{NAMESPACES['realtime']}realtime.asmx/getStationDataByNameXML"
def get_train_times(station):
params = {'StationDesc': station}
response = requests.get(REALTIME_URL, params=params).text
response_tree = ElementTree.fromstring(response)
for station in response_tree.findall('realtime:objStationData', NAMESPACES):
yield parse_direction(station.find('./realtime:Direction', NAMESPACES).text), \
station.find('./realtime:Origin', NAMESPACES).text, \
station.find('./realtime:Destination', NAMESPACES).text, \
station.find('./realtime:Duein', NAMESPACES).text
def parse_direction(direction):
aliases = {'Northbound': '↑',
'Southbound': '↓'}
return aliases.get(direction, '-')
def len_longest_string_of(column):
return max(len(str(row)) for row in column)
def gen_c | olumn_widths(times):
for column in zip(* | times):
print(column)
yield len_longest_string_of(column)
def gen_reply_string(times, widths):
_, max_origin, max_destin, max_time = widths
yield f" {'from':<{max_origin}} to"
for direction, origin, destin, time in times:
yield f"{direction} {origin:<{max_origin}} {destin:<{max_destin}} {time:>{max_time}}min"
def main(bot, author_id, message, thread_id, thread_type, **kwargs):
def send_message(message):
bot.sendMessage(message, thread_id=thread_id, thread_type=thread_type)
if not message:
send_message("invalid train station",
thread_id=thread_id, thread_type=thread_type)
return
try:
times = list(get_train_times(message))
widths = gen_column_widths(times)
except requests.exceptions.RequestException:
send_message("error retrieving results")
if times:
send_message(code_block("\n".join(gen_reply_string(times, widths))))
else:
send_message("no results")
if __name__ == "__main__":
times = list(get_train_times("bayside"))
widths = gen_column_widths(times)
print("\n".join(gen_reply_string(times, widths)))
|
MikhailMS/Final_Project | download_music/__init__.py | Python | bsd-2-clause | 79 | 0 | # W | hat should be exported from module
from download_music import run_midi | _load
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.