max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
startup/users/30-user-Katz.py
|
NSLS-II-SMI/profile_collection
| 0
|
12778051
|
<gh_stars>0
def alignement_katz_2021_1():
global names, x_piezo, y_piezo, z_piezo, incident_angles, y_piezo_aligned
names = ['sample1', 'sample2', 'sample3', 'sample4', 'sample5', 'sample6', 'sample7']
x_piezo = [ 55000, 42000, 19000, 2000, -16000, -31000, -49000]
y_piezo = [ 4800, 2900, 2900, 2900, 2900, 2900, 3300]
x_hexa = [ 7, 0, 0, 0, 0, 0, 0]
incident_angles = [ 0, 0, 0, 0, 0, 0, 0]
y_piezo_aligned = [4757.703, 3054.9, 3133.065, 3031.989, 3414.158, 3546.666, 3715.74]
#sample2: y = 5332.784, th = 0.973826
#sample 4:: th [2, 0.9738, 2, 0.97, 0.582, 0.297, 0.0655], y: [7100, 5332.784, 5142.4, 4975.875, 5447.996, 5487.398, 5792.193]
# incident_angles = [2, 0.9738, 2, 0.97, 0.582, 0.297, 0.0655]
# y_piezo_aligned = [7100, 5332.784, 5142.4, 4975.875, 5447.996, 5487.398, 5792.193]
smi = SMI_Beamline()
yield from smi.modeAlignment(technique='gisaxs')
for name, xs_piezo, ys_piezo, xs_hexa in zip(names, x_piezo, y_piezo, x_hexa):
yield from bps.mv(stage.x, xs_hexa)
yield from bps.mv(piezo.x, xs_piezo)
yield from bps.mv(piezo.y, ys_piezo)
# yield from alignement_gisaxs(0.3)
yield from alignement_gisaxs_multisample_special(angle = 0.25)
y_piezo_aligned = y_piezo_aligned + [piezo.y.position]
yield from smi.modeMeasurement()
print(incident_angles)
def nexafs_Sedge_Katz(t=1):
dets = [pil300KW, pil900KW]
names = ['sample1', 'sample2', 'sample3', 'sample4', 'sample5', 'sample6', 'sample7']
x_piezo = [ 55000, 42000, 19000, 2000, -16000, -31000, -49000]
y_piezo = [ 4800, 2900, 2900, 2900, 2900, 2900, 3300]
x_hexa = [ 7, 0, 0, 0, 0, 0, 0]
incident_angles = [ 0, 0, 0, 0, 0, 0, 0]
y_piezo_aligned = [4757.703, 3054.9, 3133.065, 3031.989, 3414.158, 3546.666, 3715.74]
energies = 7 + np.asarray(np.arange(2445, 2470, 5).tolist() + np.arange(2470, 2480, 0.25).tolist() + np.arange(2480, 2490, 1).tolist()+ np.arange(2490, 2501, 5).tolist())
waxs_arc = [52.5]
for name, xs, ys, zs, aiss, ys in zip(names, x_piezo, y_piezo, z_piezo, incident_angles, y_piezo_aligned):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yield from bps.mv(piezo.z, zs)
yield from bps.mv(piezo.th, aiss + 0.7)
ai0 = piezo.th.position
yield from bps.mv(waxs, waxs_arc[0])
det_exposure_time(t,t)
name_fmt = 'nexafs_{sample}_{energy}eV_wa60.0_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2490)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
def nexafs_Caedge_Katz(t=1):
dets = [pil300KW]
names = ['sample7_1']
energies = np.linspace(4030, 4110, 81)
waxs_arc = [52.5]
for name in names:
ai0 = piezo.th.position
yield from bps.mv(waxs, waxs_arc[0])
det_exposure_time(t,t)
name_fmt = 'nexafs_{sample}_{energy}eV_wa52.5_ai0.7deg_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4100)
yield from bps.mv(energy, 4080)
yield from bps.mv(energy, 4050)
def saxs_14keV_Matt_2021_2(t=1):
xlocs = [44000, 35000, 21500, 11000, -1000, -12000, -23000, -36000, 44000, 32500, 21000, 10000, -2000, -13500]
ylocs = [-5000, -4500, -5000, -5000, -5000, -5000, -5000, -5000, 8000, 8000, 8000, 8000, 8000, 8000]
zlocs = [ 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700]
names = ['MWET_01', 'MWET_02', 'MWET_03', 'MWET_04', 'MWET_05', 'MWET_06', 'MWET_07a', 'MWET_07b', 'MWET_08', 'MWET_09', 'MWET_10', 'MWET_11',
'MWET_12', 'MWET_13']
user = 'ML'
det_exposure_time(t,t)
assert len(xlocs) == len(names), f'Number of X coordinates ({len(xlocs)}) is different from number of samples ({len(names)})'
# Detectors, motors:
dets = [pil300KW, pil900KW, pil1M]
waxs_range = [0, 2, 19.5, 21.5, 39, 41]
ypos = [-500, 500, 3]
for wa in waxs_range[::-1]:
yield from bps.mv(waxs, wa)
for sam, x, y, z in zip(names, xlocs, ylocs, zlocs):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
yield from bps.mv(piezo.z, z)
name_fmt = '{sam}_stats1_14.0keV_sdd8.3m_wa{waxs}'
sample_name = name_fmt.format(sam=sam, waxs='%2.1f'%wa)
sample_id(user_name=user, sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.rel_scan(dets, piezo.y, *ypos)
yield from bps.sleep(2)
sample_id(user_name='test', sample_name='test')
det_exposure_time(0.3, 0.3)
def saxs_2p4keV_Matt_2021_2(t=1):
xlocs = [44000, 35000, 21500, 11000, -1000, -12000, -23000, -36000, 44000, 32500, 21000, 10000, -2000, -13500]
ylocs = [-5000, -4500, -5000, -5000, -5000, -5000, -5000, -5000, 8000, 8000, 8000, 8000, 8000, 8000]
zlocs = [ 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700]
names = ['MWET_01', 'MWET_02', 'MWET_03', 'MWET_04', 'MWET_05', 'MWET_06', 'MWET_07a', 'MWET_07b', 'MWET_08', 'MWET_09', 'MWET_10', 'MWET_11',
'MWET_12', 'MWET_13']
user = 'ML'
det_exposure_time(t,t)
assert len(xlocs) == len(names), f'Number of X coordinates ({len(xlocs)}) is different from number of samples ({len(names)})'
# Detectors, motors:
dets = [pil300KW, pil900KW, pil1M]
waxs_range = [0, 2, 19.5, 21.5, 39, 41]
ypos = [-500, 500, 3]
for wa in waxs_range[::-1]:
yield from bps.mv(waxs, wa)
for sam, x, y, z in zip(names, xlocs, ylocs, zlocs):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
yield from bps.mv(piezo.z, z)
name_fmt = '{sam}_stats1_2.45keV_sdd3.0m_wa{waxs}'
sample_name = name_fmt.format(sam=sam, waxs='%2.1f'%wa)
sample_id(user_name=user, sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.rel_scan(dets, piezo.y, *ypos)
yield from bps.sleep(2)
sample_id(user_name='test', sample_name='test')
det_exposure_time(0.3, 0.3)
def nexafs_Sedge_Katz_2021_2(t=1):
dets = [pil300KW, pil900KW]
x_piezo = [ 32500]
y_piezo = [ 8000]
z_piezo = [ 2700]
names = ['MWET_09']
energies = np.linspace(2450, 2530, 81)
waxs_arc = [59]
for name, xs, ys, zs in zip(names, x_piezo, y_piezo, z_piezo):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yield from bps.mv(piezo.z, zs)
yield from bps.mv(waxs, waxs_arc[0])
det_exposure_time(t,t)
name_fmt = 'nexafs_{sample}_{energy}eV_wa59_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2500)
yield from bps.mv(energy, 2480)
yield from bps.mv(energy, 2450)
def nexafs_Sedge_Katz_2021_3(t=1):
dets = [pil300KW, pil900KW]
x_piezo = [ 32500]
y_piezo = [ 8000]
z_piezo = [ 2700]
names = ['MWET_09']
energies = np.linspace(2450, 2530, 81)
waxs_arc = [59]
for name, xs, ys, zs in zip(names, x_piezo, y_piezo, z_piezo):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yield from bps.mv(piezo.z, zs)
yield from bps.mv(waxs, waxs_arc[0])
det_exposure_time(t,t)
name_fmt = 'nexafs_{sample}_{energy}eV_wa59_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(3)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2500)
yield from bps.sleep(3)
yield from bps.mv(energy, 2480)
yield from bps.sleep(3)
yield from bps.mv(energy, 2450)
yield from bps.sleep(3)
def nexafs_Sedge_Katz_2021_2(t=1):
dets = [pil900KW]
# names = ['sample1', 'sample2', 'sample3', 'sample4', 'sample5']
# x_piezo = [ 54000, 38000, 18000, 3000, -17000]
# inc_angl = [ -0.6074, -0.4144, 0.185, -0.1982, -2.4638]
# y_piezo = [ 4647.88, 5180.45, 4970.04, 4909.86, 5090.90]
names = [ 'sample4_redo']
x_piezo = [ 3200]
inc_angl = [ -0.1982]
y_piezo = [ 4890.86]
energies = 7 + np.asarray(np.arange(2445, 2470, 5).tolist() + np.arange(2470, 2480, 0.25).tolist() + np.arange(2480, 2490, 1).tolist()+ np.arange(2490, 2501, 5).tolist())
waxs_arc = 60
angle_mes = [0.1]
for name, xs, ys, aiss in zip(names, x_piezo, y_piezo, inc_angl):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yield from bps.mv(piezo.th, aiss)
yield from bps.mv(waxs, 59)
det_exposure_time(t,t)
for angle_me in angle_mes:
yield from bps.mv(piezo.th, aiss + angle_me)
name_fmt = 'nexafs_{sample}_{energy}eV_wa60_bpm{xbpm}_ai{ai}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, xbpm = '%4.3f'%bpm, ai='%1.2f'%angle_me)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2490)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
def nexafs_Caedge_David(t=1):
dets = [pil900KW]
# names = ['sample1', 'sample2', 'sample3', 'sample4', 'sample5']
# x_piezo = [ 54000, 38000, 18000, 3000, -17000]
# inc_angl = [ -0.6074, -0.4144, 0.185, -0.1982, -2.4638]
# y_piezo = 40 + np.asarray([ 4647.88, 5180.45, 4970.04, 4909.86, 5090.90])
names = ['sample3', 'sample4', 'sample5']
x_piezo = [ 18000, 3000, -17000]
inc_angl = [ 0.185, -0.1982, -2.4638]
y_piezo = 40 + np.asarray([ 4970.04, 4909.86, 5090.90])
# names = [ 'sample2', 'sample3', 'sample4', 'sample5']
# x_piezo = [ 38000, 18000, 3000, -17000]
# inc_angl = [ -0.4144, 0.185, -0.1982, -2.4638]
# y_piezo = 40 + np.asarray([ 5180.45, 4970.04, 4909.86, 5090.90])
# energies = np.linspace(4030, 4110, 81)
energies = np.asarray(np.arange(4020, 4035, 5).tolist() + np.arange(4035, 4042, 2).tolist() + np.arange(4042, 4070, 0.5).tolist() + np.arange(4070, 4080, 2).tolist() + np.arange(4080, 4130, 5).tolist())
for name, xs, ys, aiss in zip(names, x_piezo, y_piezo, inc_angl):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yield from bps.mv(piezo.th, aiss)
yield from bps.mv(waxs, 59)
det_exposure_time(t,t)
angle_mes = [0.1]
for angle_me in angle_mes:
yield from bps.mv(piezo.th, aiss + angle_me)
name_fmt = 'nexafs_{sample}_{energy}eV_wa60_ai{ai}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, ai='%1.2f'%angle_me , xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4100)
yield from bps.mv(energy, 4080)
yield from bps.mv(energy, 4050)
def nexafs_Caedge_Matt(t=0.5, name='test'):
yield from bps.mv(waxs, 59)
dets = [pil900KW]
energies = np.asarray(np.arange(4020, 4035, 5).tolist() + np.arange(4035, 4042, 2).tolist() + np.arange(4042, 4070, 0.5).tolist() + np.arange(4070, 4080, 2).tolist() + np.arange(4080, 4140, 5).tolist())
samples = ['mwet_01', 'mwet_02', 'mwet_03', 'mwet_04', 'mwet_05', 'mwet_06', 'mwet_07', 'mwet_08', 'mwet_09', 'mwet_10', 'mwet_11']
x_list = [ 46000, 35000, 22500, 11000, 0, -12000, -24000, -35000, 24000, 12000, 0]
y_list = [ -8500, -8500, -8500, -8500, -8500, -8500, -8500, -8500, 4500, 4500, 4500]
for name, x, y in zip(samples, x_list, y_list):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
det_exposure_time(t,t)
name_fmt = 'nexafs_{sample}_{energy}eV_wa60_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, xbpm = '%4.3f'%bpm)
sample_id(user_name='GS', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4110)
yield from bps.sleep(2)
yield from bps.mv(energy, 4070)
yield from bps.sleep(2)
yield from bps.mv(energy, 4030)
yield from bps.sleep(2)
sample_id(user_name='test', sample_name='test')
def saxs_prep_multisample(t=1):
dets = [pil900KW, pil1M]
energies = [4030, 4040, 4050, 4055, 4075]
det_exposure_time(t,t)
waxs_range = [0, 2, 19.5, 21.5, 39, 41]
det_exposure_time(t,t)
xpos = [-500, 500, 3]
for wa in waxs_range[::-1]:
yield from bps.mv(waxs, wa)
samples = ['mwet_01', 'mwet_02', 'mwet_03', 'mwet_04', 'mwet_05', 'mwet_06', 'mwet_07', 'mwet_08', 'mwet_09', 'mwet_10', 'mwet_11']
x_list = [ 46000, 35000, 22500, 11000, 0, -12000, -24000, -35000, 24000, 12000, 0]
y_list = 100+ np.asarray([ -8500, -8500, -8500, -8500, -8500, -8500, -8500, -8500, 4500, 4500, 4500])
for name, x, y in zip(samples, x_list, y_list):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
for k, e in enumerate(energies):
yield from bps.mv(energy, e)
yield from bps.sleep(2)
yield from bps.mv(piezo.y, y + k * 100)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}_wa{wa}'
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name,energy='%6.2f'%e, xbpm = '%3.1f'%bpm, wa='%2.1f'%wa)
sample_id(user_name='OS', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.rel_scan(dets, piezo.x, *xpos)
yield from bps.mv(energy, 4050)
yield from bps.mv(energy, 4030)
def nexafs_Caedge_Katz_2021_3(t=1):
dets = [pil900KW]
# names = ['ref_calcite', 'ref_cacooh', 'calcium_01', 'calcium_02', 'calcium_03', 'calcium_04', 'calcium_05', 'calcium_06', 'calcium_07', 'calcium_08', 'calcium_09',
# 'calcium_10', 'calcium_11', 'calcium_12', 'calcium_13','calcium_14']
# xs = [43000, 33000, 21000, 9500, -1000, -13000, -25000, -36000, 45000, 35000, 29500, 24000, 14000, 2000, -10500, -24000]
# ys = [ -500, -500, -500, -500, -500, -500, -500, -1500, 2000, 2000, 1500, 1500, 1500, 1500, 1500, 1500]
# ys_hexa = [-5, -5, -5, -5, -5, -5, -5, -5, 5, 5, 5, 5, 5, 5, 5, 5]
names = ['calcium_13']
xs = [43000]
ys = [ -500]
ys_hexa = [-5]
assert len(xs) == len(names), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(names)})'
assert len(xs) == len(ys), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(ys)})'
assert len(xs) == len(ys_hexa), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(ys_hexa)})'
energies = np.asarray(np.arange(4020, 4035, 5).tolist() + np.arange(4035, 4042, 2).tolist() + np.arange(4042, 4070, 0.5).tolist() + np.arange(4070, 4080, 2).tolist() + np.arange(4080, 4140, 5).tolist())
waxs_arc = [50]
for x, y, y_hexa, name in zip(xs, ys, ys_hexa, names):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
yield from bps.mv(stage.y, y_hexa)
yield from bps.mv(waxs, waxs_arc[0])
det_exposure_time(t,t)
name_fmt = 'nexafs_{sample}_{energy}eV_wa50_bpm{xbpm}'
yss = np.linspace(y, y + 500, 80)
xss = np.linspace(x, x, 1)
yss, xss = np.meshgrid(yss, xss)
yss = yss.ravel()
xss = xss.ravel()
for e, xsss, ysss in zip(energies, xss, yss):
yield from bps.mv(energy, e)
yield from bps.sleep(3)
yield from bps.mv(piezo.y, ysss)
yield from bps.mv(piezo.x, xsss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4100)
yield from bps.sleep(3)
yield from bps.mv(energy, 4080)
yield from bps.sleep(3)
yield from bps.mv(energy, 4050)
yield from bps.sleep(3)
def swaxs_Caedge_Katz_2021_3(t=1):
dets = [pil900KW, pil1M]
energies = [4030, 4040, 4050, 4055, 4075]
det_exposure_time(t,t)
waxs_range = [0, 2, 20, 22, 40, 42]
det_exposure_time(t,t)
xpos = [-500, 500, 3]
# names = ['ref_calcite', 'ref_cacooh', 'calcium_01', 'calcium_02', 'calcium_03', 'calcium_04', 'calcium_05', 'calcium_06', 'calcium_07', 'calcium_08', 'calcium_09',
# 'calcium_10', 'calcium_11', 'calcium_12', 'calcium_13','calcium_14']
# xs = 300 + np.asarray([43000, 33000, 21000, 9500, -1000, -13000, -25000, -36000, 45000, 35000, 29500, 24000, 14000, 2000, -10500, -24000])
# ys = [ -500, -500, -500, -500, -500, -500, -500, -1500, 2000, 2000, 1500, 1500, 1500, 1500, 1500, 1500]
# ys_hexa = [-5, -5, -5, -5, -5, -5, -5, -5, 5, 5, 5, 5, 5, 5, 5, 5]
names = ['calcium_13']
xs = 300 + np.asarray([43000])
ys = [ -500]
ys_hexa = [-5]
assert len(xs) == len(names), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(names)})'
assert len(xs) == len(ys), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(ys)})'
assert len(xs) == len(ys_hexa), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(ys_hexa)})'
for wa in waxs_range[::-1]:
yield from bps.mv(waxs, wa)
for x, y, y_hexa, name in zip(xs, ys, ys_hexa, names):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
yield from bps.mv(stage.y, y_hexa)
for k, e in enumerate(energies):
yield from bps.mv(energy, e)
yield from bps.sleep(3)
yield from bps.mv(piezo.y, y + k * 100)
name_fmt = '{sample}_{energy}eV_sdd1.7m_xbpm{xbpm}_wa{wa}'
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name,energy='%6.2f'%e, xbpm = '%3.1f'%bpm, wa='%2.1f'%wa)
sample_id(user_name='OS', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.rel_scan(dets, piezo.x, *xpos)
yield from bps.mv(energy, 4050)
yield from bps.sleep(3)
yield from bps.mv(energy, 4030)
yield from bps.sleep(3)
def night_katz(t=1):
proposal_id('2021_3', '307898_Katz')
yield from nexafs_Caedge_Katz_2021_3(t=t)
proposal_id('2021_3', '307898_Katz2')
yield from swaxs_Caedge_Katz_2021_3(t=t)
def nexafs_Agedge_Katz_2021_3(t=1):
dets = [pil900KW]
names = ['silver_01', 'silver_02', 'silver_03', 'silver_04', 'silver_05', 'silver_06', 'silver_07', 'silver_08', 'silver_09', 'silver_10']
xs = [33400, 18000, 6000, -4000, -14000,-27000, 30000, 20000, 5000, -9000]
ys = [ -500, -500, -500, -500, -500, -500, 1500, 1500, 1500, 1500]
ys_hexa = [-5, -5, -5, -5, -5, -5, 5, 5, 5, 5]
assert len(xs) == len(names), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(names)})'
assert len(xs) == len(ys), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(ys)})'
assert len(xs) == len(ys_hexa), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(ys_hexa)})'
energies = np.asarray(np.arange(3300, 3340, 5).tolist() + np.arange(3340, 3350, 2).tolist() + np.arange(3350, 3390, 1).tolist() + np.arange(3390, 3400, 2).tolist() + np.arange(3400, 3450, 5).tolist())
waxs_arc = [40]
for x, y, y_hexa, name in zip(xs, ys, ys_hexa, names):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
yield from bps.mv(stage.y, y_hexa)
yield from bps.mv(waxs, waxs_arc[0])
det_exposure_time(t,t)
name_fmt = 'nexafs_{sample}_{energy}eV_wa50_bpm{xbpm}'
yss = np.linspace(y, y + 500, 68)
xss = np.linspace(x, x, 1)
yss, xss = np.meshgrid(yss, xss)
yss = yss.ravel()
xss = xss.ravel()
for e, xsss, ysss in zip(energies, xss, yss):
yield from bps.mv(energy, e)
yield from bps.sleep(3)
yield from bps.mv(piezo.y, ysss)
yield from bps.mv(piezo.x, xsss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 3410)
yield from bps.sleep(3)
yield from bps.mv(energy, 3370)
yield from bps.sleep(3)
yield from bps.mv(energy, 3320)
yield from bps.sleep(3)
def swaxs_Agedge_Katz_2021_3(t=1):
dets = [pil900KW, pil1M]
energies = [3300, 3350, 3357, 3367, 3400, 3430]
det_exposure_time(t,t)
waxs_range = [0, 20, 40]
det_exposure_time(t,t)
xpos = [-500, 500, 3]
names = ['silver_01', 'silver_02', 'silver_03', 'silver_04', 'silver_05', 'silver_06', 'silver_ref', 'silver_07', 'silver_08', 'silver_09', 'silver_10']
xs = [33400, 18000, 6000, -4000, -14000,-27000, 43000, 30000, 20000, 5000, -9000]
ys = [ -500, -500, -500, -500, -500, -500, 1500, 1500, 1500, 1500, 1500]
ys_hexa = [-5, -5, -5, -5, -5, -5, 5, 5, 5, 5, 5]
assert len(xs) == len(names), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(names)})'
assert len(xs) == len(ys), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(ys)})'
assert len(xs) == len(ys_hexa), f'Number of X coordinates ({len(xs)}) is different from number of samples ({len(ys_hexa)})'
for wa in waxs_range[::-1]:
if wa == 42:
dets = [pil1M]
yield from bps.mv(GV7.open_cmd, 1 )
yield from bps.mv(att2_10.open_cmd, 1)
yield from bps.mv(att2_11.open_cmd, 1)
else:
dets = [pil900KW]
yield from bps.mv(GV7.close_cmd, 1 )
yield from bps.mv(att2_10.close_cmd, 1)
yield from bps.mv(att2_11.close_cmd, 1)
yield from bps.mv(att2_9.open_cmd, 1)
yield from bps.mv(waxs, wa)
for x, y, y_hexa, name in zip(xs, ys, ys_hexa, names):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
yield from bps.mv(stage.y, y_hexa)
for k, e in enumerate(energies):
yield from bps.mv(energy, e)
yield from bps.sleep(3)
yield from bps.mv(piezo.y, y + k * 100)
name_fmt = '{sample}_{energy}eV_sdd6.0m_xbpm{xbpm}_wa{wa}'
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name,energy='%6.2f'%e, xbpm = '%3.1f'%bpm, wa='%2.1f'%wa)
sample_id(user_name='OS', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.rel_scan(dets, piezo.x, *xpos)
yield from bps.mv(energy, 3400)
yield from bps.sleep(3)
yield from bps.mv(energy, 3350)
yield from bps.sleep(3)
yield from bps.mv(energy, 3300)
yield from bps.sleep(3)
def alignement_SVA_(t=1):
global names, x_hexa, y_hexa, incident_angles, y_hexa_aligned
names = ['sample1', 'sample4']
x_hexa = [ 16, 22]
y_hexa = [0.6, 0.8]
incident_angles = []
y_hexa_aligned = []
# ai01 = 3.1
# ai02 = 3.1
for name, xs_hexa, ys_hexa in zip(names, x_hexa, y_hexa):
yield from bps.mv(stage.x, xs_hexa)
yield from bps.mv(stage.y, ys_hexa)
yield from alignement_special_hex(angle = 0.45)
incident_angles = incident_angles + [stage.th.position]
y_hexa_aligned = y_hexa_aligned + [stage.y.position]
def nexafs_Sedge_SVA_Katz_2021_3(t=1):
humidity = '%3.2f'%readHumidity(verbosity=0)
dets = [pil900KW]
energies = 7 + np.asarray(np.arange(2445, 2470, 5).tolist() + np.arange(2470, 2480, 0.25).tolist() + np.arange(2480, 2490, 1).tolist()+ np.arange(2490, 2501, 5).tolist())
waxs_arc = 30
angle_mes = [0.7]
for name, xs, aiss, ys in zip(['kapton'], x_hexa, incident_angles, y_hexa_aligned):
# yield from bps.mv(stage.x, xs)
# yield from bps.mv(stage.y, ys)
# yield from bps.mv(stage.th, aiss)
yield from bps.mv(waxs, waxs_arc)
det_exposure_time(t,t)
for angle_me in angle_mes:
# yield from bps.mv(stage.th, aiss + angle_me)
name_fmt = 'nexafs_{sample}_{energy}eV_wa40_bpm{xbpm}_ai{ai}_hum{hum}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, xbpm = '%4.3f'%bpm, ai='%1.2f'%angle_me, hum = humidity)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2490)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
# # Measure at flow 100 percent
setDryFlow(0)
setWetFlow(5)
yield from bps.sleep(600)
humidity = '%3.2f'%readHumidity(verbosity=0)
dets = [pil900KW]
energies = 7 + np.asarray(np.arange(2445, 2470, 5).tolist() + np.arange(2470, 2480, 0.25).tolist() + np.arange(2480, 2490, 1).tolist()+ np.arange(2490, 2501, 5).tolist())
waxs_arc = 40
angle_mes = [0.1]
for name, xs, aiss, ys in zip(names, x_hexa, incident_angles, y_hexa_aligned):
yield from bps.mv(stage.x, xs)
yield from bps.mv(stage.y, ys)
yield from bps.mv(stage.th, aiss)
yield from bps.mv(waxs, 40)
det_exposure_time(t,t)
for angle_me in angle_mes:
yield from bps.mv(stage.th, aiss + angle_me)
name_fmt = 'nexafs_{sample}_{energy}eV_wa40_bpm{xbpm}_ai{ai}_hum{hum}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, xbpm = '%4.3f'%bpm, ai='%1.2f'%angle_me, hum = humidity)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2490)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
setDryFlow(0)
setWetFlow(0)
def saxs_2021_3(t=1):
xlocs = [39500, 28000, 16000, 6000, -6000, -18000, -29000, -41000, 42000, 30000]
ylocs = [-5200, -5200, -5200, -5200, -5200, -5200, -5200, -5200, 7200, 7200]
zlocs = [ 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700, 2700]
names = ['sample_01', 'sample_02', 'sample_03', 'sample_04', 'sample_05', 'sample_06', 'sample_07', 'sample_08', 'sample_09', 'sample_10']
user = 'ML'
det_exposure_time(t,t)
assert len(xlocs) == len(names), f'Number of X coordinates ({len(xlocs)}) is different from number of samples ({len(names)})'
# Detectors, motors:
dets = [pil1M]
waxs_range = [30]
ypos = [-200, 200, 3]
for wa in waxs_range[::-1]:
yield from bps.mv(waxs, wa)
for sam, x, y, z in zip(names, xlocs, ylocs, zlocs):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
yield from bps.mv(piezo.z, z)
name_fmt = '{sam}_16.1keV_sdd8.3m_wa{waxs}'
sample_name = name_fmt.format(sam=sam, waxs='%2.1f'%wa)
sample_id(user_name=user, sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.rel_scan(dets, piezo.y, *ypos)
yield from bps.sleep(2)
sample_id(user_name='test', sample_name='test')
det_exposure_time(0.3, 0.3)
| 1.984375
| 2
|
cornflow-dags/DAG/update_all_schemas.py
|
baobabsoluciones/cornflow
| 1
|
12778052
|
# General imports
import importlib as il
import os
import sys
# Partial imports
from airflow.operators.python import PythonOperator
from airflow.models import Variable
from airflow import DAG
from airflow.utils.db import create_session
from datetime import datetime, timedelta
from typing import List
# Import from cornflow environment
from cornflow_client import ApplicationCore
default_args = {
"owner": "baobab",
"depends_on_past": False,
"start_date": datetime(2020, 2, 1),
"email": [""],
"email_on_failure": False,
"email_on_retry": False,
"retries": -1,
"retry_delay": timedelta(minutes=1),
"schedule_interval": "@hourly",
"catchup": False,
}
schemas = ["instance", "solution", "config"]
def get_new_apps() -> List[ApplicationCore]:
# we need to run this to be sure to import modules
import_dags()
new_apps = ApplicationCore.__subclasses__()
return [app_class() for app_class in new_apps]
def import_dags():
sys.path.append(os.path.dirname(__file__))
_dir = os.path.dirname(__file__)
print("looking for apps in dir={}".format(_dir))
files = os.listdir(_dir)
print("Files are: {}".format(files))
# we go file by file and try to import it if matches the filters
for dag_module in files:
filename, ext = os.path.splitext(dag_module)
if ext not in [".py", ""]:
continue
if filename in ["activate_apps"]:
continue
try:
_import_file(filename)
except Exception as e:
continue
def _import_file(filename):
return il.import_module(filename)
def get_schemas_dag_file(_module):
contents = {k: getattr(_module, k) for k in schemas}
return contents
def get_all_schemas():
apps = get_new_apps()
if len(apps):
print("Found the following apps: {}".format([app.name for app in apps]))
else:
print("No apps were found to update")
schemas_new = {app.name: app.get_schemas() for app in apps}
print("Found the following new apps: {}".format([app.name for app in apps]))
return schemas_new
def update_schemas(**kwargs):
schemas = get_all_schemas()
# we update all schemas that we found:
for key, value in schemas.items():
Variable.set(key=key, value=value, serialize_json=True)
# now we clean the variables that do not exist anymore:
with create_session() as session:
current_vars = set(var.key for var in session.query(Variable))
apps_to_delete = current_vars - schemas.keys()
print("About to delete old apps: {}".format(apps_to_delete))
for _var in apps_to_delete:
Variable.delete(_var, session)
dag = DAG(
"update_all_schemas", default_args=default_args, catchup=False, tags=["internal"]
)
update_schema2 = PythonOperator(
task_id="update_all_schemas",
provide_context=True,
python_callable=update_schemas,
dag=dag,
)
if __name__ == "__main__":
update_schemas()
| 2.296875
| 2
|
app/language_r/r_lang_obj.py
|
jwons/raas
| 0
|
12778053
|
<gh_stars>0
import os
import subprocess
import json
import docker
import re
from glob import glob
from app.languageinterface import LanguageInterface
from app.languageinterface import StaticAnalysisResults
from app.language_r.preproc_helpers import all_preproc
from shutil import copy
# Debugging
from celery.contrib import rdb
class RLang(LanguageInterface):
def __init__(self):
self.dataset_dir = None
@staticmethod
def build_docker_package_install(package, version):
"""Outputs formatted dockerfile command to install a specific version
of an R package into a docker image
Parameters
----------
package : string
Name of the R package to be installed
version : string
Version number of the desired package
"""
return 'RUN R -e \"require(\'devtools\'); {install_version(\'' + package + \
'\', version=\'' + version + '\', repos=\'http://cran.rstudio.com\')}"\n'
@staticmethod
def build_docker_package_install_no_version(package):
"""Outputs formatted dockerfile command to install a specific version
of an R package into a docker image
Parameters
----------
package : string
"""
return 'if(!(\'' + package + '\'' \
'%in% rownames(installed.packages()))){install.packages(\'' + package + '\')}\n' + \
'if(!(\'' + package + '\'' \
'%in% rownames(installed.packages()))){BiocManager::install(\'' + package + \
'\', update = F)}\n'
def script_analysis(self, preprocess, dataverse_key='', data_folder='', run_instr='', user_pkg=''):
# This variable controls whether the container is built despite the existence
# of errors detected in the script
build_with_errors = False
dockerfile_dir = self.get_dockerfile_dir(data_folder)
self.dataset_dir = os.path.join(dockerfile_dir, os.listdir(dockerfile_dir)[0])
original_scripts_dir = os.path.join(dockerfile_dir, "__original_scripts__")
static_analysis_dir = os.path.join(dockerfile_dir, "static_analysis")
# ---------- Preprocessing ------------
src_ignore = []
if preprocess:
r_files = [y for x in os.walk(os.path.join(self.dataset_dir)) for y in glob(os.path.join(x[0], '*.R'))]
if not os.path.exists(original_scripts_dir):
os.makedirs(original_scripts_dir)
for r_file in r_files:
r_file = os.path.split(r_file)
sourced_files = all_preproc(r_file[1], r_file[0])
copy(os.path.join(r_file[0], r_file[1]), os.path.join(original_scripts_dir, r_file[1]))
src_ignore.append(os.path.join("/__original_scripts__", r_file[1]))
src_ignore = src_ignore + sourced_files
os.remove(os.path.join(r_file[0], r_file[1]))
pre_files = [y for x in os.walk(os.path.join(self.dataset_dir)) for y in
glob(os.path.join(x[0], '*__preproc__.R'))]
for pre_file in pre_files:
pre_file = os.path.split(pre_file)
filename = re.split('\__preproc__.[rR]$', pre_file[1])[0]
os.rename(os.path.join(pre_file[0], pre_file[1]), os.path.join(pre_file[0], filename + ".R"))
# ---------- STATIC ANALYSIS ----------
subprocess.run(['bash', 'app/language_r/static_analysis.sh', self.dataset_dir, static_analysis_dir])
# ---------- PARSING STATIC ANALYSIS ----------
# assemble a set of packages used and get system requirements
sys_reqs = []
used_packages = []
with open(os.path.join(static_analysis_dir, "static_analysis.json")) as json_file:
data = json.load(json_file)
if not build_with_errors:
if data['errors']:
return {'current': 100, 'total': 100,
'status': ['Static analysis found errors in script.', data['errors']]}
used_packages = data['packages']
sys_reqs = data['sys_deps']
sys_reqs.append("libjpeg-dev")
return StaticAnalysisResults(lang_packages=used_packages, sys_libs=sys_reqs, lang_specific={"src_ignore":
src_ignore})
def build_docker_file(self, dir_name, static_results, code_btw, run_instr):
ext_pkgs = code_btw
# TODO: equivalent for install_instructions, is there a difference for R/Python?
special_packages = None
special_install = None
install_instructions = ''
if install_instructions is not '':
special_install = json.loads(install_instructions)
special_packages = [special_install["packages"][key][0]
for key in special_install["packages"].keys()]
docker_file_dir = self.get_dockerfile_dir(dir_name)
if not os.path.exists(docker_file_dir):
return {'current': 100, 'total': 100, 'status': 'Directory missing.'}
if len(static_results.lang_specific["src_ignore"]) > 0:
with open(os.path.join(docker_file_dir, '.srcignore'), 'w') as src_ignore_file:
for line in static_results.lang_specific["src_ignore"]:
src_ignore_file.write(line + "\n")
src_ignore_file.write('\n')
with open(os.path.join(docker_file_dir, 'install__packages.R'), 'w') as install_packs:
install_packs.write('require(\'devtools\')\n')
install_packs.write('require(\'BiocManager\')\n')
# perform any pre-specified installs
if special_packages:
for key in special_install["packages"].keys():
instruction = special_install["packages"][key][1] + '"\n'
install_packs.write(instruction)
# install packages
docker_packages = list(set(static_results.lang_packages))
if docker_packages:
for package in docker_packages:
if special_packages and (package not in special_packages):
install_packs.write(self.build_docker_package_install_no_version(package))
if special_packages is None:
install_packs.write(self.build_docker_package_install_no_version(package))
with open(os.path.join(docker_file_dir, 'Dockerfile'), 'w') as new_docker:
new_docker.write('FROM rocker/tidyverse:latest\n')
# install system requirements
sysinstall = "RUN export DEBIAN_FRONTEND=noninteractive; apt-get -y update && apt-get install -y "
if len(static_results.sys_libs) != 0:
new_docker.write(sysinstall + ' '.join(static_results.sys_libs) + '\n')
# perform any pre-specified installs
if special_install:
if "sys-libs" in special_install.keys():
new_docker.write(sysinstall + ' '.join(special_install["sys-libs"]) + '\n')
# Install libraries
new_docker.write('COPY install__packages.R /home/rstudio/\n')
new_docker.write('RUN Rscript /home/rstudio/install__packages.R\n')
# These scripts will execute the analyses and collect provenance. Copy them to the
# Dockerfile directory first since files copied to the image cannot be outside it
copy("app/language_r/get_prov_for_doi.sh", docker_file_dir)
copy("app/language_r/get_dataset_provenance.R", docker_file_dir)
copy("app/language_r/create_report.R", docker_file_dir)
# Add the dataset to the container
new_docker.write('COPY . /home/rstudio/' + dir_name + '\n')
# Add permissions or the scripts will fail
new_docker.write('RUN chown -R rstudio:rstudio /home/rstudio/\n')
# Execute analysis and collect provenance
new_docker.write('RUN /home/rstudio/' + dir_name + '/get_prov_for_doi.sh /home/rstudio/' + dir_name + \
'/' + os.path.basename(self.dataset_dir) + ' ' + '/home/rstudio/' + \
dir_name + '/get_dataset_provenance.R' + '\n')
# Collect installed package information for the report
new_docker.write("RUN Rscript /home/rstudio/" + dir_name + "/create_report.R /home/rstudio/" + dir_name +
"/prov_data \n")
def create_report(self, current_user_id, name, dir_name, time):
# ---------- Generate Report About Build Process ----------
# The report will have various information from the creation of the container
# for the user
# Reconstruct image name from user info
client = docker.from_env()
# to get report we need to run the container
container = client.containers.run(image=self.get_container_tag(current_user_id, name),
environment=["PASSWORD=<PASSWORD>"], detach=True)
# Grab the files from inside the container and the filter to just JSON files
report = json.loads(container.exec_run("cat /home/rstudio/report.json")[1].decode())
report["Additional Information"] = {}
report["Additional Information"]["Container Name"] = self.get_container_tag(current_user_id, name)
report["Additional Information"]["Build Time"] = time
# information from the container is no longer needed
container.kill()
return report
| 2.171875
| 2
|
results/informer_dataset_ftS_sl96_ll48_pl24_dm512_nh8_el2_dl1_df2048_atprob_fc5_ebtimeF_dtTrue_mxTrue_test_0/test.py
|
LeoYoung1996/Experiment
| 0
|
12778054
|
<gh_stars>0
"""
@Time : 2021/12/15 17:23
@Author : Leo
@FileName: test.py
@SoftWare: PyCharm
@description:
"""
import numpy as np
a = np.load('true.npy')
b = np.load('pred.npy')
print(a[0])
print("-----------------------------------------------")
print(b[0])
| 1.960938
| 2
|
python/day1/main.py
|
kp42/aoc2020
| 0
|
12778055
|
<filename>python/day1/main.py
import functools
import random
def first_part(data):
current = None
used_lines = []
not_found = True
while not_found:
current = None
for line in data:
int_line = int(line)
if current is None and int_line not in used_lines:
current = int_line
used_lines.append(int_line)
elif current is not None:
res = current + int_line
if res == 2020:
print("First part:", current * int_line)
not_found = False
break
def second_part(data):
items = [0, 0, 0]
while sum_of_items(items) != 2020:
items = [random.choice(data), random.choice(data), random.choice(data)]
print("Second part:", functools.reduce(lambda a, b: int(a) * int(b), items))
def sum_of_items(items):
return functools.reduce(lambda a, b: int(a) + int(b), items)
with open("./input.txt") as file:
data = file.readlines()
first_part(data)
second_part(data)
| 3.765625
| 4
|
stimuli/Python/one_file_per_item/en/36_# math_for 18.py
|
ALFA-group/neural_program_comprehension
| 6
|
12778056
|
<reponame>ALFA-group/neural_program_comprehension
start = 3
total = 0
for i in range(start, -1, -1):
total -= i*i
print(total)
| 2.984375
| 3
|
zerobin.py
|
bmintz/python-snippets
| 2
|
12778057
|
<reponame>bmintz/python-snippets<filename>zerobin.py<gh_stars>1-10
#!/usr/bin/env python3
# encoding: utf-8
import asyncio
import base64
import json
import logging
import os
import zlib
import aiohttp
import sjcl
log = logging.getLogger(__name__)
def get_surrogate(cpt):
num = cpt - 0x010000
return ((num & (0x03ff << 10)) >> 10) + 0xd800, (num & 0x03ff) + 0xdc00
def get_surrogates(cpts):
for cpt in cpts:
if cpt < 0x10000:
yield cpt
else:
yield from get_surrogate(cpt)
def mangle_string(thing):
"""Mimics base64.js "convertUTF16ArrayToUTF8Array" """
wew = get_surrogates(map(ord, thing))
result = []
for n in wew:
if n < 0x80:
result.append(n)
elif n < 0x800:
result.append(0xc0 | (n >> 6))
result.append(0x80 | (n & 0x3f))
else:
result.append(0xe0 | ((n >> 12) & 0x0f))
result.append(0x80 | ((n >> 6) & 0x3f))
result.append(0x80 | (n & 0x3f))
return result
global_zerobin_lock = asyncio.Lock()
async def upload_zerobin(string_content, loop=None):
async with global_zerobin_lock:
if not loop:
loop = asyncio.get_event_loop()
encrypted_data, encoded_key = await loop.run_in_executor(
None, make_zerobin_payload, string_content)
payload = json.dumps(
encrypted_data, default=lambda x: x.decode('utf8'))
if len(payload) > 512000:
raise ValueError("Content too big")
tries = 0
with aiohttp.ClientSession() as c:
while tries < 2:
tries += 1
async with c.post("https://zerobin.net/", data=dict(
data=payload,
expire="never",
burnafterreading="0",
opendiscussion="0",
syntaxcoloring="0")) as resp:
resp_content = await resp.text()
try:
resp_json = json.loads(resp_content)
except json.JSONDecodeError:
log.error(resp_content)
else:
if resp_json['status'] == 0:
log.info("To delete: %s" %
make_zerobin_delete_url(resp_json))
return make_zerobin_url(resp_json, encoded_key)
elif resp_json['status'] == 1: # Rate limited
await asyncio.sleep(10)
raise Exception("Failed uploading to zbin")
def make_zerobin_payload(string_content):
compress = zlib.compressobj(
0, zlib.DEFLATED, -zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, 4)
compressed_data = compress.compress(bytes(mangle_string(string_content)))
compressed_data += compress.flush()
encoded_data = base64.b64encode(compressed_data)
key = os.urandom(32)
encoded_key = base64.urlsafe_b64encode(key)
encrypted_data = sjcl.SJCL().encrypt(encoded_data, encoded_key)
return encrypted_data, encoded_key
def decrypt_zerobin_payload(encrypted_data, encoded_key):
b64_deflated = sjcl.SJCL().decrypt(encrypted_data, encoded_key)
deflated = base64.urlsafe_b64decode(b64_deflated)
inflater = zlib.decompressobj(-zlib.MAX_WBITS)
data = inflater.decompress(deflated)
return data
def make_zerobin_url(response_json, encoded_key):
return "https://zerobin.net?%s#%s" % (
response_json['id'], encoded_key.decode('utf8'))
def make_zerobin_delete_url(response_json):
return "https://zerobin.net?pasteid=%s&deletetoken=%s" % (
response_json['id'], response_json['deletetoken'])
if __name__ == '__main__':
loop = asyncio.get_event_loop()
print(loop.run_until_complete(upload_zerobin('hello \N{DANGO}')))
| 2.375
| 2
|
satemdata/feature/utils.py
|
energyandcleanair/satem_data
| 0
|
12778058
|
<gh_stars>0
import datetime as dt
from . import DATE_FORMAT
def get_feature_date(feature):
return dt.datetime.strptime(feature['date'], DATE_FORMAT)
def clean_date(date):
if isinstance(date, dt.datetime):
return date
if isinstance(date, dt.date):
return dt.datetime.combine(date, dt.datetime.min.time())
else:
try:
date = dt.datetime.strptime(date, DATE_FORMAT)
except ValueError:
raise ValueError("date should be with format %s (actual value: %s)" % (DATE_FORMAT, date))
return date
def clean_feature(feature):
"""
Clean (and check) a feature
:param feature:
:return: cleaned feature
"""
if not isinstance(feature, dict):
raise ValueError("feature should be a dictionary")
# Ensure date is in the right format
feature['date'] = clean_date(feature['date'])
# Check required fields are here
required_fields = ["location_id", "date", "tropomi_no2"]
missing = set(required_fields) - set(feature.keys())
if missing:
raise ValueError("Missing fields in feature: %s"% (missing,))
return feature
| 2.90625
| 3
|
code/vae_train/vae_encoder.py
|
GT-SALT/Persuasive-Orderings
| 12
|
12778059
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from vae_train.vae_utils import *
class Encoder(nn.Module):
def __init__(self, embedding_size=128, n_highway_layers=0, encoder_hidden_size=128, n_class=None, encoder_layers=1, bidirectional=False):
super(Encoder, self).__init__()
self.n_class = n_class
self.encoder_hidden_size = encoder_hidden_size
self.encoder_layers = encoder_layers
self.bidirectional = 2 if bidirectional else 1
if n_class is None:
self.lstm = nn.LSTM(input_size=embedding_size, hidden_size=encoder_hidden_size,
num_layers=encoder_layers, batch_first=True, bidirectional=bidirectional)
else:
self.lstm = nn.LSTM(input_size=embedding_size + n_class, hidden_size=encoder_hidden_size,
num_layers=encoder_layers, batch_first=True, bidirectional=bidirectional)
def forward(self, x, y=None, sent_len=None):
batch_size = x.shape[0]
seq_len = x.shape[1]
if self.n_class is not None and y is not None:
y = torch.cat([y]*seq_len, 1).view(batch_size,
seq_len, self.n_class)
x = torch.cat([x, y], dim=2)
output, (h_n, c_n) = self.lstm(x)
hidden = output[torch.arange(output.shape[0]), sent_len]
return hidden
| 2.609375
| 3
|
fun.py
|
OfficialZandrex/discord-zeebot
| 0
|
12778060
|
<filename>fun.py
import discord
from discord.ext.commands import Bot
from discord.ext import commands
import asyncio
import json
import os
import chalk
import youtube_dl
import random
import io
import aiohttp
import time
import datetime
from datetime import datetime as dt
import logging
import re
from itertools import cycle
class Fun:
def __init__(self, bot):
self.client = bot
choices = [
"It is certain :8ball:",
"It is decidedly so :8ball:",
"Without a doubt :8ball:",
"Yes, definitely :8ball:",
"You may rely on it :8ball:",
"As I see it, yes :8ball:",
"Most likely :8ball:",
"Outlook good :8ball:",
"Yes :8ball:",
"Signs point to yes :8ball:",
"Reply hazy try again :8ball:",
"Ask again later :8ball:",
"Better not tell you now :8ball:",
"Cannot predict now :8ball:",
"Concentrate and ask again :8ball:",
"Don't count on it :8ball:",
"My reply is no :8ball:",
"My sources say no :8ball:",
"Outlook not so good :8ball:",
"Very doubtful :8ball:"
]
@commands.command(pass_context=True, no_pm=True,name="8", aliases=["8ball"])
async def _8ball(self, ctx):
embed = discord.Embed(
title="Magic 8 Ball says: ",
color=discord.Colour.blue()
)
embed.add_field(name="Hmmm", value=str(random.choice(choices)), inline=True)
await self.client.say(embed=embed)
# self.client.say("Magic 8 Ball says: " + random.choice(choices))
########### CoinFlip ###########
@commands.command(pass_context=True, no_pm=True)
async def coinflip(self, ctx):
embed = discord.Embed(
color=discord.Colour.blue()
)
coin = ["Heads :cd: ", "Tails :dvd: "]
# gif = discord.Embed(title='Fliping...', color=discord.Colour.blue())
# gif.set_image(url='https://thumbs.gfycat.com/KeenMeaslyCatbird-max-1mb.gif')
# await self.client.say(embed=gif)
# await asyncio.sleep(2.6)
embed.add_field(name="It is", value=str(random.choice(coin)))
await self.client.say(embed=embed)
########### Roll ###########
@commands.command(pass_context=True, no_pm=True)
async def roll(self, ctx):
r = random.randint(1,100)
embed = discord.Embed(
title="Rolling from 1 to 100 ",
color=discord.Colour.blue()
)
embed.add_field(name="You rolled ", value=":game_die: " + str(r) + "! :game_die:", inline=True)
await self.client.say(embed=embed)
########### Choose ###########
@commands.command(pass_context=True, no_pm=True)
async def choose(self, ctx, *, choices: str):
choicesList = choices.split(",")
chosen = choicesList[random.randrange(len(choicesList))]
await self.client.say(ctx.message.author.mention + ": I chose " + chosen + "")
########### Gif ###########
@commands.command(pass_context=True, no_pm=True)
async def gif(self, ctx, *keywords):
"""Retrieves first search result from giphy"""
if keywords:
keywords = "+".join(keywords)
# else:
# await bot.send_cmd_help(ctx)
# return
GIPHY_API_KEY = "<KEY>"
url = ("http://api.giphy.com/v1/gifs/random?&api_key={}&tag={}".format(GIPHY_API_KEY, keywords))
async with aiohttp.get(url) as r:
result = await r.json()
if r.status == 200:
if result["data"]:
await self.client.say(result["data"]["url"])
else:
await self.client.say("No results found.")
else:
await self.client.say("Error contacting the API")
def setup(bot):
bot.add_cog(Fun(bot))
| 2.71875
| 3
|
cellcutter/alpha/modeling/common.py
|
jiyuuchc/cellcutter
| 5
|
12778061
|
<reponame>jiyuuchc/cellcutter<gh_stars>1-10
import tensorflow as tf
import tensorflow.keras.layers as layers
class BatchConv2D(tf.keras.layers.Layer):
def __init__(self, num_filters, size = 3, activation = 'relu', name=None, **kwargs):
super(BatchConv2D, self).__init__(name=name)
self._config_dict = {
'num_filters': num_filters,
'size': size,
'activation': activation,
'name': name,
}
self._config_dict.update(**kwargs)
conv_kwargs = {
'padding': 'same',
}
conv_kwargs.update(kwargs)
self._conv = layers.Conv2D(num_filters, size, name= 'conv', **conv_kwargs)
self._activation = layers.Activation(activation, name = activation)
self._batchnorm = layers.BatchNormalization(name='norm')
def get_config(self):
return self._config_dict
def call(self, inputs, **kwargs):
x = self._conv(inputs, **kwargs)
x = self._activation(x, **kwargs)
x = self._batchnorm(x, **kwargs)
return x
| 2.21875
| 2
|
gcp_airflow_foundations/source_class/gcs_source.py
|
badal-io/gcp-airflow-foundations
| 3
|
12778062
|
<reponame>badal-io/gcp-airflow-foundations
from dataclasses import fields
from os import X_OK
from urllib.parse import urlparse
from dacite import from_dict
from dataclasses import dataclass
from airflow.operators.dummy import DummyOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook
from airflow.operators.python_operator import PythonOperator
from airflow.providers.google.cloud.sensors.gcs import GCSObjectExistenceSensor
from gcp_airflow_foundations.operators.api.sensors.gcs_sensor import GCSObjectListExistenceSensor
from gcp_airflow_foundations.operators.api.sensors.gcs_prefix_sensor import GCSObjectPrefixListExistenceSensor
from gcp_airflow_foundations.source_class.generic_file_source import GenericFileIngestionDagBuilder
from gcp_airflow_foundations.common.gcp.load_builder import load_builder
class GCSFileIngestionDagBuilder(GenericFileIngestionDagBuilder):
"""
Builds DAGs to load files from GCS to a BigQuery Table.
For GCS->BQ ingestion, either a metadata file is required or the field templated_file_name must be provided.
If a metadata file is provided, itt can be a fixed file, or can be a new file supplied daily.
Airflow context variables are supported for the file naming, e.g.
TABLE_METADATA_FILE_{{ ds }}.csv
for a metadata file supplied daily.
The format of the metadata file should be a csv with one column as follows:
FILE_NAME_1
...
FILE_NAME_N
with all files to ingest
"""
source_type = "GCS"
def flag_file_sensor(self, table_config, taskgroup):
if "flag_file_path" in table_config.extra_options.get("file_table_config"):
flag_file_path = table_config.extra_options.get("file_table_config")["flag_file_path"]
bucket = self.config.source.extra_options["gcs_bucket"]
return GCSObjectExistenceSensor(
task_id="wait_for_flag_file",
bucket=bucket,
object=flag_file_path,
task_group=taskgroup
)
else:
return None
def file_ingestion_task(self, table_config, taskgroup):
"""
No ingestion is needed - data is already in GCS, so return a dummy operator.
"""
return None
def file_sensor(self, table_config, taskgroup):
"""
Returns an Airflow sensor that waits for the list of files specified by the metadata file provided.
"""
bucket = self.config.source.extra_options["gcs_bucket"]
files_to_wait_for = "{{ ti.xcom_pull(key='file_list', task_ids='ftp_taskgroup.get_file_list') }}"
if self.config.source.extra_options["file_source_config"]["file_prefix_filtering"]:
return GCSObjectPrefixListExistenceSensor(
task_id="wait_for_files_to_ingest",
bucket=bucket,
prefixes=files_to_wait_for,
task_group=taskgroup
)
else:
return GCSObjectListExistenceSensor(
task_id="wait_for_files_to_ingest",
bucket=bucket,
objects=files_to_wait_for,
task_group=taskgroup
)
def delete_files(self, table_config, **kwargs):
ti = kwargs["ti"]
files_to_load = ti.xcom_pull(key='loaded_files', task_ids='ftp_taskgroup.load_gcs_to_landing_zone')
data_source = self.config.source
bucket = data_source.extra_options["gcs_bucket"]
gcs_hook = GCSHook()
for file in files_to_load:
gcs_hook.delete(bucket_name=bucket, object_name=file)
def delete_gcs_files(self, table_config, taskgroup):
return PythonOperator(
task_id="delete_gcs_files",
op_kwargs={"table_config": table_config},
python_callable=self.delete_files,
task_group=taskgroup
)
def validate_extra_options(self):
# GCS Source only requires the checks for the base file_source_config and file_table_configs:
# other sources like SFTP require extra checks
super().validate_extra_options()
| 2.234375
| 2
|
src/features/build_features.py
|
denizhankara/Multi-class-classification-task
| 0
|
12778063
|
<gh_stars>0
#from src.data.make_dataset import main
def f():
lst = [lambda : i**2 for i in range(100)]
return lst[0]()
if __name__ == "__main__":
f()
pass
| 2.03125
| 2
|
code-files/frosch2010_Tabu_language.py
|
Frosch2010/discord-tabu
| 2
|
12778064
|
from frosch2010_Tabu_settings import tabu_settings
class tabu_language:
tabu_wrong_arguments = ""
tabu_game_already_running = ""
tabu_no_game_running = ""
tabu_more_players_needed = ""
tabu_user_already_joined = ""
tabu_user_joined_game = ""
tabu_user_started_game = ""
tabu_game_paused_by = ""
tabu_game_already_paused = ""
tabu_game_unpaused_by = ""
tabu_game_is_not_paused = ""
tabu_search_for_new_terms = ""
tabu_search_for_new_terms_finished = ""
tabu_false_term_format = ""
tabu_false_edit_term_format = ""
tabu_edit_term_not_exist = ""
tabu_edit_term_has_an_editor = ""
tabu_edit_description = ""
tabu_edit_word = ""
tabu_edit_delete_word = ""
tabu_edit_sure_delete_card = ""
tabu_save = ""
tabu_time_left = ""
tabu_time_is_up = ""
tabu_it_will_start_soon = ""
tabu_it_will_start_in = ""
tabu_game_paused = ""
tabu_team_1_won = ""
tabu_team_2_won = ""
tabu_card_term_prefix = ""
tabu_card_term_suffix = ""
tabu_graph_points = ""
tabu_graph_points_per_round = ""
tabu_graph_round = ""
tabu_game_cant_stopped = ""
tabu_game_stopped_by = ""
tabu_no_kick_user = ""
tabu_kick_user_isnt_player = ""
tabu_cant_kick_current_explainer = ""
tabu_user_kicked = ""
tabu_shutdown_bot = ""
tabu_revenge_asking = ""
tabu_revenge_starting = ""
tabu_revenge_canceled = ""
tabu_same_chance =""
| 1.875
| 2
|
controller/controller/manage_pods.py
|
emattia/sigopt-python
| 67
|
12778065
|
<reponame>emattia/sigopt-python
from http import HTTPStatus
from kubernetes.client.exceptions import ApiException as KubernetesApiException
import logging
import signal
import threading
from sigopt.run_context import RunContext
from controller.create_pod import create_run_pod
from controller.event_repeater import EventRepeater
from controller.pod_status import is_pod_active
from controller.refill_pods import RefillExperimentPodsThread
from controller.run_state import RunState
from controller.settings import ExperimentSettings, RunSettings
from controller.watch_pods import WatchPodsThread
def create_run_state(sigopt_settings, pod, k8s_settings):
run_id = pod.metadata.labels["run"]
sigopt_conn = sigopt_settings.conn
run = sigopt_conn.training_runs(run_id).fetch()
run_context = RunContext(sigopt_conn, run)
return RunState(run_context, sigopt_settings, k8s_settings, pod.metadata.name)
def set_events_on_sigterm(events):
def handler(signum, frame):
logging.error("sigterm received")
for event in events:
event.set()
signal.signal(signal.SIGTERM, handler)
class RunPodsManager:
def __init__(self, k8s_settings, run_name, run_id, sigopt_settings):
self.k8s_settings = k8s_settings
self.run_name = run_name
self.run_id = run_id
self.sigopt_settings = sigopt_settings
self.run_states = dict()
self.pod_modified_event = threading.Event()
self.stop_event = threading.Event()
self.watcher_thread = WatchPodsThread(
k8s_settings=self.k8s_settings,
label_selector=f"run-name={self.run_name},type=run",
run_states=self.run_states,
pods_modified_event=self.pod_modified_event,
stop_threads_event=self.stop_event,
)
self.logger = logging.getLogger("controller:RunPodsManager")
@classmethod
def from_env(cls):
s = RunSettings()
return cls(
k8s_settings=s.k8s_settings,
run_name=s.run_name,
run_id=s.run_id,
sigopt_settings=s.sigopt_settings,
)
def start(self):
sigterm_event = threading.Event()
set_events_on_sigterm([sigterm_event, self.stop_event])
try:
pod = self.k8s_settings.api.read_namespaced_pod(self.run_name, self.k8s_settings.namespace)
self.logger.info("found existing pod %s", self.run_name)
run_state = create_run_state(self.sigopt_settings, pod, self.k8s_settings)
except KubernetesApiException as kae:
if kae.status != HTTPStatus.NOT_FOUND:
raise
sigopt_conn = self.sigopt_settings.conn
run = sigopt_conn.training_runs(self.run_id).fetch()
run_context = RunContext(sigopt_conn, run)
run_state = RunState(run_context, self.sigopt_settings, self.k8s_settings, self.run_name)
pod = create_run_pod(
k8s_settings=self.k8s_settings,
run_context=run_context,
)
self.logger.info("created pod %s", pod.metadata.name)
self.run_states.update({self.run_name: run_state})
self.watcher_thread.start()
try:
while not self.stop_event.is_set() and not run_state.is_finished():
try:
self.stop_event.wait(timeout=1)
except TimeoutError:
pass
except KeyboardInterrupt:
pass
self.stop_event.set()
self.watcher_thread.join()
if self.watcher_thread.exception_occurred.is_set():
raise Exception("An exception occurred in the watcher thread")
if sigterm_event.is_set():
raise Exception("Sigterm received")
class ExperimentPodsManager:
def __init__(self, k8s_settings, sigopt_settings, experiment_id):
self.k8s_settings = k8s_settings
self.sigopt_settings = sigopt_settings
self.experiment_id = experiment_id
self.run_label_selector = f"experiment={self.experiment_id},type=run"
self.run_state = dict()
self.manage_pods_event = threading.Event()
self.stop_threads_event = threading.Event()
self.management_event_repeater = EventRepeater(5, self.manage_pods_event)
self.refiller_thread = RefillExperimentPodsThread(
self.k8s_settings,
self.sigopt_settings,
self.experiment_id,
self.run_state,
self.manage_pods_event,
stop_threads_event=self.stop_threads_event,
)
self.watcher_thread = WatchPodsThread(
self.k8s_settings,
self.run_label_selector,
self.run_state,
self.manage_pods_event,
stop_threads_event=self.stop_threads_event,
)
@classmethod
def from_env(cls):
s = ExperimentSettings()
return cls(
k8s_settings=s.k8s_settings,
sigopt_settings=s.sigopt_settings,
experiment_id=s.experiment_id,
)
def start(self):
sigterm_event = threading.Event()
set_events_on_sigterm([sigterm_event, self.stop_threads_event])
self.run_state.update({
pod.metadata.name: create_run_state(self.sigopt_settings, pod, self.k8s_settings)
for pod in self.k8s_settings.api.list_namespaced_pod(
self.k8s_settings.namespace,
label_selector=self.run_label_selector,
).items
if is_pod_active(pod)
})
self.manage_pods_event.set()
threads = [self.refiller_thread, self.watcher_thread]
for thread in threads:
thread.start()
self.management_event_repeater.start()
try:
while not self.stop_threads_event.is_set():
try:
self.stop_threads_event.wait(timeout=5)
except TimeoutError:
pass
except KeyboardInterrupt:
pass
finally:
self.management_event_repeater.cancel()
self.stop_threads_event.set()
self.manage_pods_event.set()
for thread in threads:
thread.join()
if any(thread.exception_occurred.is_set() for thread in threads):
raise Exception("An exception ocurred in at least 1 thread")
if sigterm_event.is_set():
raise Exception("Sigterm received")
| 1.890625
| 2
|
riscemu/instructions/RV32A.py
|
jodalyst/riscemu
| 9
|
12778066
|
from .InstructionSet import InstructionSet, LoadedInstruction
from ..Exceptions import INS_NOT_IMPLEMENTED
from ..helpers import int_from_bytes, int_to_bytes, to_unsigned, to_signed
class RV32A(InstructionSet):
"""
The RV32A instruction set. Currently, load-reserved and store conditionally are not supported
due to limitations in the way the MMU is implemented. Maybe a later implementation will add support
for this?
"""
def instruction_lr_w(self, ins: 'LoadedInstruction'):
INS_NOT_IMPLEMENTED(ins)
def instruction_sc_w(self, ins: 'LoadedInstruction'):
INS_NOT_IMPLEMENTED(ins)
def instruction_amoswap_w(self, ins: 'LoadedInstruction'):
dest, addr, val = self.parse_rd_rs_rs(ins)
if dest == 'zero':
self.mmu.write(addr, int_to_bytes(addr, 4))
else:
old = int_from_bytes(self.mmu.read(addr, 4))
self.mmu.write(addr, int_to_bytes(val, 4))
self.regs.set(dest, old)
def instruction_amoadd_w(self, ins: 'LoadedInstruction'):
dest, addr, val = self.parse_rd_rs_rs(ins)
old = int_from_bytes(self.mmu.read(addr, 4))
self.mmu.write(addr, int_to_bytes(old + val, 4))
self.regs.set(dest, old)
def instruction_amoand_w(self, ins: 'LoadedInstruction'):
dest, addr, val = self.parse_rd_rs_rs(ins)
old = int_from_bytes(self.mmu.read(addr, 4))
self.mmu.write(addr, int_to_bytes(old & val, 4))
self.regs.set(dest, old)
def instruction_amoor_w(self, ins: 'LoadedInstruction'):
dest, addr, val = self.parse_rd_rs_rs(ins)
old = int_from_bytes(self.mmu.read(addr, 4))
self.mmu.write(addr, int_to_bytes(old | val, 4))
self.regs.set(dest, old)
def instruction_amoxor_w(self, ins: 'LoadedInstruction'):
dest, addr, val = self.parse_rd_rs_rs(ins)
old = int_from_bytes(self.mmu.read(addr, 4))
self.mmu.write(addr, int_to_bytes(old ^ val, 4))
self.regs.set(dest, old)
def instruction_amomax_w(self, ins: 'LoadedInstruction'):
dest, addr, val = self.parse_rd_rs_rs(ins)
old = int_from_bytes(self.mmu.read(addr, 4))
self.mmu.write(addr, int_to_bytes(max(old, val), 4))
self.regs.set(dest, old)
def instruction_amomaxu_w(self, ins: 'LoadedInstruction'):
dest, addr, val = self.parse_rd_rs_rs(ins)
val = to_unsigned(val)
old = int_from_bytes(self.mmu.read(addr, 4), unsigned=True)
self.mmu.write(addr, int_to_bytes(to_signed(max(old, val)), 4))
self.regs.set(dest, old)
def instruction_amomin_w(self, ins: 'LoadedInstruction'):
dest, addr, val = self.parse_rd_rs_rs(ins)
old = int_from_bytes(self.mmu.read(addr, 4))
self.mmu.write(addr, int_to_bytes(min(old, val), 4))
self.regs.set(dest, old)
def instruction_amominu_w(self, ins: 'LoadedInstruction'):
dest, addr, val = self.parse_rd_rs_rs(ins)
val = to_unsigned(val)
old = int_from_bytes(self.mmu.read(addr, 4), unsigned=True)
self.mmu.write(addr, int_to_bytes(to_signed(min(old, val)), 4))
self.regs.set(dest, old)
| 2.296875
| 2
|
apps/interactor/interactor/commander/animations.py
|
Djelibeybi/photons
| 51
|
12778067
|
from photons_canvas.animations import register, AnimationRunner
from photons_canvas.animations.action import expand
from photons_app.errors import PhotonsAppError
from photons_app import helpers as hp
from delfick_project.option_merge import MergedOptions
from delfick_project.norms import sb
from textwrap import dedent
import logging
import asyncio
import io
log = logging.getLogger("interactor.commander.animations")
def errors(e):
if isinstance(e, KeyboardInterrupt):
return
if not isinstance(e, PhotonsAppError):
log.exception(e)
else:
log.error(e)
class Animation:
def __init__(self, final_future, identity, runner, pauser):
self.runner = runner
self.pauser = pauser
self.paused = False
self.identity = identity
self.final_future = final_future
@property
def info(self):
return self.runner.info
async def pause(self):
if not self.paused:
await self.pauser.acquire()
self.paused = True
async def resume(self):
self.paused = False
self.pauser.release()
async def stop(self):
self.final_future.cancel()
def start(self, tasks, *callbacks):
async def animation():
async with self.runner:
await self.runner.run()
self.task = tasks.add(animation())
for cb in callbacks:
self.task.add_done_callback(cb)
return self
class Animations:
available = register.available_animations()
def __init__(self, final_future, tasks, sender, animation_options):
self.tasks = tasks
self.sender = sender
self.final_future = final_future
self.animation_options = animation_options
self.animations = {}
def info(self, identity=None, expand=False, **extra):
if identity is not None:
if identity not in self.animations:
return
else:
return self.animations[identity].info
animations = {
identity: animation.info for identity, animation in sorted(self.animations.items())
}
if not expand:
animations = sorted(animations)
return {
"animations": animations,
"paused": sorted(
[animation.identity for animation in self.animations.values() if animation.paused]
),
**extra,
}
async def start(
self,
identity,
reference,
*,
run_options=sb.NotSpecified,
animations=sb.NotSpecified,
):
pauser = asyncio.Semaphore()
final_future = hp.ChildOfFuture(
self.final_future, name=f"Animations::start({identity})[final_future]"
)
if run_options is sb.NotSpecified:
run_options = {}
if animations is not sb.NotSpecified:
run_options = MergedOptions.using(run_options, {"animations": animations}).as_dict()
runner = AnimationRunner(
self.sender,
reference,
run_options,
final_future=final_future,
error_catcher=errors,
animation_options=self.animation_options,
)
runner.run_options.pauser = pauser
def remove(res):
if identity in self.animations:
del self.animations[identity]
self.animations[identity] = Animation(final_future, identity, runner, pauser).start(
self.tasks, remove
)
return self.info(started=identity)
async def pause(self, *identities):
return await self.action("pause", "pausing", identities)
async def resume(self, *identities):
return await self.action("resume", "resuming", identities)
async def stop(self, *identities):
return await self.action("stop", "stopping", identities)
async def help(self, animation_name=None):
out = io.StringIO()
def p(s=""):
print(s, file=out)
animation_kls = None
if animation_name in register.animations:
animation_kls = register.animations[animation_name].Animation
if animation_kls is None:
p("Available animations include")
for animation in register.available_animations():
p(f"* {animation}")
p()
p("To see options for a particular animation, run this again")
p("but with the `animation_name` option set to the name of the animation.")
p()
else:
p()
p("-" * 80)
p(f"{animation_name} animation")
p("-" * 80)
p()
expand(dedent(animation_kls.__doc__ or "").strip(), output=out)
out.flush()
out.seek(0)
return out.read()
async def action(self, method, verb, identities):
if not identities:
identities = list(self.animations)
changed = []
async with hp.TaskHolder(self.final_future, name=f"Animations::action({method})[ts]") as ts:
for identity in identities:
if identity in self.animations:
changed.append(identity)
ts.add(getattr(self.animations[identity], method)())
return self.info(**{verb: changed})
| 1.984375
| 2
|
src/uploader.py
|
Marcellofabrizio/Python-Scheduled-Backup-Service
| 0
|
12778068
|
<filename>src/uploader.py
import os
import boto3
import logging
from botocore.exceptions import NoCredentialsError
from botocore.exceptions import ClientError
from file_handler import remove_file
AWS_ACCESS_KEY_ID= 'YOUR AWS ID '
AWS_SECRET_ACCESS_KEY= 'YOUR SUPER SECRET AWS ACCESS KEY'
def upload_to_aws(file_name, bucket, object_name=None, remove_zips=True):
"""
Upload a file to an S3 bucket
Args:
file_name: file to be uploaded.
bucket: bucket to be uploaded.
object_name: the s3 object name. If not specified, it
will have the same name as file_name.
remove_zips: if true, will remove the zips files after
uploading. Only recommend setting it False
for testing reasons.
"""
if object_name == None:
object_name = file_name
s3_client = boto3.client('s3', aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
try:
s3_client.upload_file(file_name, bucket, object_name)
if remove_zips:
remove_file(file_name)
except ClientError as e:
logging.error(e)
return False
except NoCredentialsError as e:
logging.error(e)
return False
except FileNotFoundError as e:
logging.error(e)
return False
return True
def upload_all_files(path_to_dir, bucket):
directory = os.fsencode(path_to_dir)
for file in os.listdir(directory):
filename = os.fsdecode(file)
if filename.endswith('.zip'):
upload_to_aws(path_to_dir + '/' + filename, bucket)
| 2.703125
| 3
|
lib/python/treadmill/scheduler/zkbackend.py
|
bretttegart/treadmill
| 2
|
12778069
|
<gh_stars>1-10
"""Zookeeper scheduler/master backend.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import kazoo
from treadmill import zknamespace as z
from treadmill import zkutils
from . import backend
_LOGGER = logging.getLogger(__name__)
# ACL which allows all servers in the cell to full control over node.
#
# Set in /finished, /servers
_SERVERS_ACL = zkutils.make_role_acl('servers', 'rwcda')
# Delete only servers ACL
_SERVERS_ACL_DEL = zkutils.make_role_acl('servers', 'd')
class ZkReadonlyBackend(backend.Backend):
"""Implements readonly Zookeeper based storage."""
def __init__(self, zkclient):
self.zkclient = zkclient
super(ZkReadonlyBackend, self).__init__()
def list(self, path):
"""Return path listing."""
try:
return self.zkclient.get_children(path)
except kazoo.client.NoNodeError:
raise backend.ObjectNotFoundError()
def get_default(self, path, default=None):
"""Return stored object or default if not found."""
return zkutils.get_default(self.zkclient, path, default=default)
def get(self, path):
"""Return stored object given path."""
try:
return zkutils.get(self.zkclient, path)
except kazoo.client.NoNodeError:
raise backend.ObjectNotFoundError()
def get_with_metadata(self, path):
"""Return stored object with metadata."""
try:
return zkutils.get_with_metadata(self.zkclient, path)
except kazoo.client.NoNodeError:
raise backend.ObjectNotFoundError()
def exists(self, path):
"""Check if object exists."""
try:
return self.zkclient.exists(path)
except kazoo.client.NoNodeError:
raise backend.ObjectNotFoundError()
class ZkBackend(ZkReadonlyBackend):
"""Implements RW Zookeeper storage."""
def __init__(self, zkclient):
super(ZkBackend, self).__init__(zkclient)
self.acls = {
'/': None,
z.ALLOCATIONS: None,
z.APPMONITORS: None,
z.BUCKETS: None,
z.CELL: None,
z.DISCOVERY: [_SERVERS_ACL],
z.DISCOVERY_STATE: [_SERVERS_ACL],
z.IDENTITY_GROUPS: None,
z.PLACEMENT: None,
z.PARTITIONS: None,
z.SCHEDULED: [_SERVERS_ACL_DEL],
z.SCHEDULED_STATS: None,
z.SCHEDULER: None,
z.SERVERS: None,
z.STATE_REPORTS: None,
z.STRATEGIES: None,
z.FINISHED: [_SERVERS_ACL],
z.FINISHED_HISTORY: None,
z.TRACE: None,
z.TRACE_HISTORY: None,
z.VERSION_ID: None,
z.ZOOKEEPER: None,
z.BLACKEDOUT_SERVERS: [_SERVERS_ACL],
z.ENDPOINTS: [_SERVERS_ACL],
z.path.endpoint_proid('root'): [_SERVERS_ACL],
z.EVENTS: [_SERVERS_ACL],
z.RUNNING: [_SERVERS_ACL],
z.SERVER_PRESENCE: [_SERVERS_ACL],
z.VERSION: [_SERVERS_ACL],
z.VERSION_HISTORY: [_SERVERS_ACL],
z.REBOOTS: [_SERVERS_ACL],
}
for path in z.trace_shards():
self.acls[path] = [_SERVERS_ACL]
def _acl(self, path):
"""Returns ACL of the Zookeeper node."""
if path in self.acls:
return self.acls[path]
if path.startswith(z.path.placement('')):
return [_SERVERS_ACL]
if path.startswith(z.path.reboot('')):
return [_SERVERS_ACL_DEL]
if path.startswith(z.path.finished('')):
return [_SERVERS_ACL]
return None
def put(self, path, value):
"""Store object at a given path."""
return zkutils.put(self.zkclient, path, value, acl=self._acl(path))
def ensure_exists(self, path):
"""Ensure storage path exists."""
return zkutils.ensure_exists(self.zkclient, path, acl=self._acl(path))
def delete(self, path):
"""Delete object given the path."""
return zkutils.ensure_deleted(self.zkclient, path)
def update(self, path, data, check_content=False):
"""Set data into ZK node."""
try:
zkutils.update(self.zkclient, path, data, check_content)
except kazoo.client.NoNodeError:
raise backend.ObjectNotFoundError()
| 1.953125
| 2
|
guillotina_volto/interfaces/image.py
|
enfold/guillotina-volto
| 5
|
12778070
|
<gh_stars>1-10
from zope.interface import Interface
class IHasImage(Interface):
pass
| 0.832031
| 1
|
tests/test_echo_clips.py
|
hainesm6/basicsynbio
| 0
|
12778071
|
<reponame>hainesm6/basicsynbio
from platemap.PlateUtils import add_volume
from platemap.plate import Plate
import basicsynbio as bsb
import zipfile
import os
import pandas as pd
import numpy as np
from pathlib import Path
import pytest
from .test_fixtures import small_build_example
def getLinkerPlate():
linkerPlate = Plate(size=384, well_volume=10, deadspace=0)
add_volume(linkerPlate, "A1", 10, "LMP-P")
add_volume(linkerPlate, "B1", 10, "LMP-S")
add_volume(linkerPlate, "C1", 10, "LMS-P")
add_volume(linkerPlate, "D1", 10, "LMS-S")
return linkerPlate
def getPartPlate():
partPlate = Plate(size=384, well_volume=10, deadspace=0)
add_volume(partPlate, "A1", 10, bsb.BASIC_SEVA_PARTS["v0.1"]["18"].id)
add_volume(partPlate, "B1", 10, bsb.BASIC_CDS_PARTS["v0.1"]["sfGFP"].id)
add_volume(partPlate, "C1", 10, bsb.BASIC_SEVA_PARTS["v0.1"]["26"].id)
partPlate["A1"]["composition"][bsb.BASIC_SEVA_PARTS["v0.1"]["18"].id][
"concentration"
] = 40 # ng / ul
partPlate["B1"]["composition"][bsb.BASIC_CDS_PARTS["v0.1"]["sfGFP"].id][
"concentration"
] = 30 # ng / ul
partPlate["C1"]["composition"][bsb.BASIC_SEVA_PARTS["v0.1"]["26"].id][
"concentration"
] = 50 # ng / ul
return partPlate
def test_echo_instructions_small_build(small_build_example):
linker_plate = getLinkerPlate()
part_plate = getPartPlate()
echo_clips_zippath = bsb.export_echo_clips_instructions(
small_build_example, linker_plate=linker_plate, part_plate=part_plate
)
with zipfile.ZipFile(echo_clips_zippath, "r") as zip_ref:
try:
zip_ref.extractall()
finally:
zip_ref.close()
os.remove(echo_clips_zippath)
stage1 = pd.read_csv(Path.cwd() / "stage_1_half_linkers.csv")
stage2 = pd.read_csv(Path.cwd() / "stage_2_parts.csv")
stage3 = pd.read_csv(Path.cwd() / "stage_3_water_buffer.csv")
os.remove(Path.cwd() / "stage_1_half_linkers.csv")
os.remove(Path.cwd() / "stage_2_parts.csv")
os.remove(Path.cwd() / "stage_3_water_buffer.csv")
expected_stage1 = [
["A1", "C1", 0.7],
["A1", "B1", 0.7],
["B1", "A1", 0.7],
["B1", "D1", 0.7],
["C1", "C1", 0.7],
["C1", "B1", 0.7],
]
expected_stage2 = [
["A1", "A1", 2.7],
["B1", "B1", 3.4],
["C1", "C1", 2.0],
]
expected_stage3 = [
["A1", "A1", 6.7],
["A1", "B1", 9.2],
["B1", "A1", 6.7],
["B1", "B1", 8.5],
["C1", "A1", 6.7],
["C1", "B1", 9.9],
]
assert expected_stage1 == stage1.to_numpy().tolist()
assert expected_stage2 == stage2.to_numpy().tolist()
assert expected_stage3 == stage3.to_numpy().tolist()
def test_echo_instructions_small_build_default_plate(small_build_example):
part_plate = getPartPlate()
echo_clips_zippath = bsb.export_echo_clips_instructions(
small_build_example, part_plate=part_plate
)
with zipfile.ZipFile(echo_clips_zippath, "r") as zip_ref:
try:
zip_ref.extractall()
finally:
zip_ref.close()
os.remove(echo_clips_zippath)
stage1 = pd.read_csv(Path.cwd() / "stage_1_half_linkers.csv")
stage2 = pd.read_csv(Path.cwd() / "stage_2_parts.csv")
stage3 = pd.read_csv(Path.cwd() / "stage_3_water_buffer.csv")
os.remove(Path.cwd() / "stage_1_half_linkers.csv")
os.remove(Path.cwd() / "stage_2_parts.csv")
os.remove(Path.cwd() / "stage_3_water_buffer.csv")
expected_stage1 = [
["A1", "C15", 0.7],
["A1", "A13", 0.7],
["B1", "C13", 0.7],
["B1", "A15", 0.7],
["C1", "C15", 0.7],
["C1", "A13", 0.7],
]
expected_stage2 = [
["A1", "A1", 2.7],
["B1", "B1", 3.4],
["C1", "C1", 2.0],
]
expected_stage3 = [
["A1", "A1", 6.7],
["A1", "B1", 9.2],
["B1", "A1", 6.7],
["B1", "B1", 8.5],
["C1", "A1", 6.7],
["C1", "B1", 9.9],
]
assert expected_stage1 == stage1.to_numpy().tolist()
assert expected_stage2 == stage2.to_numpy().tolist()
assert expected_stage3 == stage3.to_numpy().tolist()
| 2.03125
| 2
|
ret_benchmark/losses/contrastive_loss.py
|
alibaba-edu/Ranking-based-Instance-Selection
| 20
|
12778072
|
<reponame>alibaba-edu/Ranking-based-Instance-Selection<gh_stars>10-100
from __future__ import absolute_import
import torch,pickle
from torch import nn
from torch.autograd import Variable
import numpy as np
from ret_benchmark.losses.registry import LOSS
from ret_benchmark.utils.log_info import log_info
import os
@LOSS.register("contrastive_loss")
class ContrastiveLoss(nn.Module):
def __init__(self, cfg, checked_outlier=None):
super(ContrastiveLoss, self).__init__()
self.margin = 0.5
self.checked_outlier=None
self.iteration=0
self.name=cfg.NAME
def forward(self, inputs_col, targets_col, inputs_row, target_row,is_noise=None):
n = inputs_col.size(0)
is_batch=(inputs_col.shape[0] == inputs_row.shape[0])
# Compute similarity matrix
sim_mat = torch.matmul(inputs_col, inputs_row.t())
epsilon = 1e-5
loss = list()
pos_mask = targets_col.expand(target_row.shape[0], n).t() == target_row.expand(n, target_row.shape[0])
neg_mask = (~pos_mask) & (sim_mat>self.margin)
pos_mask = pos_mask & (sim_mat<(1-epsilon))
pos_pair=sim_mat[pos_mask]
neg_pair=sim_mat[neg_mask]
pos_loss = torch.sum(-pos_pair + 1)
if len(neg_pair) > 0:
neg_loss = torch.sum(neg_pair)
else:
neg_loss = 0
if is_batch:
prefix = "batch_"
else:
prefix = "memory_"
loss = (pos_loss+neg_loss) / n # / all_targets.shape[1]
if not is_batch:
prefix = "xbm_"
log_info[f"{prefix}loss"] = loss.item()
self.iteration+=1
return loss
| 1.882813
| 2
|
tests/unit/test_table_pandas.py
|
KoffieLabs/python-bigquery
| 1
|
12778073
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import decimal
from unittest import mock
import pyarrow
import pytest
from google.cloud import bigquery
pandas = pytest.importorskip("pandas")
TEST_PATH = "/v1/project/test-proj/dataset/test-dset/table/test-tbl/data"
@pytest.fixture
def class_under_test():
from google.cloud.bigquery.table import RowIterator
return RowIterator
def test_to_dataframe_nullable_scalars(monkeypatch, class_under_test):
# See tests/system/test_arrow.py for the actual types we get from the API.
arrow_schema = pyarrow.schema(
[
pyarrow.field("bignumeric_col", pyarrow.decimal256(76, scale=38)),
pyarrow.field("bool_col", pyarrow.bool_()),
pyarrow.field("bytes_col", pyarrow.binary()),
pyarrow.field("date_col", pyarrow.date32()),
pyarrow.field("datetime_col", pyarrow.timestamp("us", tz=None)),
pyarrow.field("float64_col", pyarrow.float64()),
pyarrow.field("int64_col", pyarrow.int64()),
pyarrow.field("numeric_col", pyarrow.decimal128(38, scale=9)),
pyarrow.field("string_col", pyarrow.string()),
pyarrow.field("time_col", pyarrow.time64("us")),
pyarrow.field(
"timestamp_col", pyarrow.timestamp("us", tz=datetime.timezone.utc)
),
]
)
arrow_table = pyarrow.Table.from_pydict(
{
"bignumeric_col": [decimal.Decimal("123.456789101112131415")],
"bool_col": [True],
"bytes_col": [b"Hello,\x00World!"],
"date_col": [datetime.date(2021, 8, 9)],
"datetime_col": [datetime.datetime(2021, 8, 9, 13, 30, 44, 123456)],
"float64_col": [1.25],
"int64_col": [-7],
"numeric_col": [decimal.Decimal("-123.456789")],
"string_col": ["abcdefg"],
"time_col": [datetime.time(14, 21, 17, 123456)],
"timestamp_col": [
datetime.datetime(
2021, 8, 9, 13, 30, 44, 123456, tzinfo=datetime.timezone.utc
)
],
},
schema=arrow_schema,
)
nullable_schema = [
bigquery.SchemaField("bignumeric_col", "BIGNUMERIC"),
bigquery.SchemaField("bool_col", "BOOLEAN"),
bigquery.SchemaField("bytes_col", "BYTES"),
bigquery.SchemaField("date_col", "DATE"),
bigquery.SchemaField("datetime_col", "DATETIME"),
bigquery.SchemaField("float64_col", "FLOAT"),
bigquery.SchemaField("int64_col", "INT64"),
bigquery.SchemaField("numeric_col", "NUMERIC"),
bigquery.SchemaField("string_col", "STRING"),
bigquery.SchemaField("time_col", "TIME"),
bigquery.SchemaField("timestamp_col", "TIMESTAMP"),
]
mock_client = mock.create_autospec(bigquery.Client)
mock_client.project = "test-proj"
mock_api_request = mock.Mock()
mock_to_arrow = mock.Mock()
mock_to_arrow.return_value = arrow_table
rows = class_under_test(mock_client, mock_api_request, TEST_PATH, nullable_schema)
monkeypatch.setattr(rows, "to_arrow", mock_to_arrow)
df = rows.to_dataframe()
# Check for expected dtypes.
# Keep these in sync with tests/system/test_pandas.py
assert df.dtypes["bignumeric_col"].name == "object"
assert df.dtypes["bool_col"].name == "boolean"
assert df.dtypes["bytes_col"].name == "object"
assert df.dtypes["date_col"].name == "dbdate"
assert df.dtypes["datetime_col"].name == "datetime64[ns]"
assert df.dtypes["float64_col"].name == "float64"
assert df.dtypes["int64_col"].name == "Int64"
assert df.dtypes["numeric_col"].name == "object"
assert df.dtypes["string_col"].name == "object"
assert df.dtypes["time_col"].name == "dbtime"
assert df.dtypes["timestamp_col"].name == "datetime64[ns, UTC]"
# Check for expected values.
assert df["bignumeric_col"][0] == decimal.Decimal("123.456789101112131415")
assert df["bool_col"][0] # True
assert df["bytes_col"][0] == b"Hello,\x00World!"
# object is used by default, but we can use "datetime64[ns]" automatically
# when data is within the supported range.
# https://github.com/googleapis/python-bigquery/issues/861
assert df["date_col"][0] == datetime.date(2021, 8, 9)
assert df["datetime_col"][0] == pandas.to_datetime("2021-08-09 13:30:44.123456")
assert df["float64_col"][0] == 1.25
assert df["int64_col"][0] == -7
assert df["numeric_col"][0] == decimal.Decimal("-123.456789")
assert df["string_col"][0] == "abcdefg"
# Pandas timedelta64 might be a better choice for pandas time columns. Then
# they can more easily be combined with date columns to form datetimes.
# https://github.com/googleapis/python-bigquery/issues/862
assert df["time_col"][0] == datetime.time(14, 21, 17, 123456)
assert df["timestamp_col"][0] == pandas.to_datetime("2021-08-09 13:30:44.123456Z")
def test_to_dataframe_nullable_scalars_with_custom_dtypes(
monkeypatch, class_under_test
):
"""Passing in explicit dtypes is merged with default behavior."""
arrow_schema = pyarrow.schema(
[
pyarrow.field("int64_col", pyarrow.int64()),
pyarrow.field("other_int_col", pyarrow.int64()),
]
)
arrow_table = pyarrow.Table.from_pydict(
{"int64_col": [1000], "other_int_col": [-7]},
schema=arrow_schema,
)
nullable_schema = [
bigquery.SchemaField("int64_col", "INT64"),
bigquery.SchemaField("other_int_col", "INT64"),
]
mock_client = mock.create_autospec(bigquery.Client)
mock_client.project = "test-proj"
mock_api_request = mock.Mock()
mock_to_arrow = mock.Mock()
mock_to_arrow.return_value = arrow_table
rows = class_under_test(mock_client, mock_api_request, TEST_PATH, nullable_schema)
monkeypatch.setattr(rows, "to_arrow", mock_to_arrow)
df = rows.to_dataframe(dtypes={"other_int_col": "int8"})
assert df.dtypes["int64_col"].name == "Int64"
assert df["int64_col"][0] == 1000
assert df.dtypes["other_int_col"].name == "int8"
assert df["other_int_col"][0] == -7
def test_to_dataframe_arrays(monkeypatch, class_under_test):
arrow_schema = pyarrow.schema(
[pyarrow.field("int64_repeated", pyarrow.list_(pyarrow.int64()))]
)
arrow_table = pyarrow.Table.from_pydict(
{"int64_repeated": [[-1, 0, 2]]},
schema=arrow_schema,
)
nullable_schema = [
bigquery.SchemaField("int64_repeated", "INT64", mode="REPEATED"),
]
mock_client = mock.create_autospec(bigquery.Client)
mock_client.project = "test-proj"
mock_api_request = mock.Mock()
mock_to_arrow = mock.Mock()
mock_to_arrow.return_value = arrow_table
rows = class_under_test(mock_client, mock_api_request, TEST_PATH, nullable_schema)
monkeypatch.setattr(rows, "to_arrow", mock_to_arrow)
df = rows.to_dataframe()
assert df.dtypes["int64_repeated"].name == "object"
assert tuple(df["int64_repeated"][0]) == (-1, 0, 2)
| 2.140625
| 2
|
common/aist_common/grammar/observation_in_collection.py
|
sfahad1414/AGENT
| 15
|
12778074
|
from aist_common.grammar.observation import Observation
class ObservationInCollection(Observation):
def __init__(self):
super().__init__()
self.capture = None
def with_capture(self, capture):
self.capture = capture
def __str__(self):
output = "OBSERVE" if self.observe else "NOTOBSERVE"
if self.capture:
output += " " + str(self.capture)
output += " IN COLLECTION"
return output
| 2.796875
| 3
|
handlers/users/change_datas.py
|
KARTASAR/DatingBot
| 12
|
12778075
|
<reponame>KARTASAR/DatingBot
from keyboards.inline.lifestyle_choice_inline import lifestyle_inline_kb
from keyboards.inline.change_profile_inline import change_profile_kb
from aiogram.utils.exceptions import MessageToReplyNotFound
from aiogram.types import CallbackQuery, ContentType
from keyboards.inline.main_menu import inline_start
from states.new_data_state import NewData
from aiogram.dispatcher import FSMContext
from loader import dp, bot, db
from aiogram import types
@dp.callback_query_handler(text='change_profile')
async def start_change_data(call: CallbackQuery):
await call.answer(cache_time=60)
await bot.send_message(call.from_user.id, f'Выберите, что вы хотите изменить: ', reply_markup=change_profile_kb)
@dp.message_handler(text='Имя')
async def change_name(message: types.Message):
await message.reply(f'Введите новое имя')
await NewData.name.set()
@dp.message_handler(state=NewData.name)
async def change_name(message: types.Message, state: FSMContext):
try:
await db.update_user_varname(varname=message.text, telegram_id=message.from_user.id)
await message.reply(f'Ваше новое имя: <b>{message.text}</b>', reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Возраст')
async def change_age(message: types.Message):
await message.reply(f'Введите новый возраст')
await NewData.age.set()
@dp.message_handler(state=NewData.age)
async def change_age(message: types.Message, state: FSMContext):
try:
await db.update_user_age(age=message.text, telegram_id=message.from_user.id)
await message.reply(f'Ваш новый возраст: <b>{message.text}</b>', reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Национальность')
async def change_nationality(message: types.Message):
await message.reply(f'Введите новую национальность')
await NewData.nationality.set()
@dp.message_handler(state=NewData.nationality)
async def change_nationality(message: types.Message, state: FSMContext):
try:
await db.update_user_national(national=message.text, telegram_id=message.from_user.id)
await message.reply(f'Ваша новая национальность: <b>{message.text}</b>',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Город')
async def change_city(message: types.Message):
await message.reply(f'Введите новый город')
await NewData.city.set()
@dp.message_handler(state=NewData.city)
async def change_city(message: types.Message, state: FSMContext):
try:
await db.update_user_city(city=message.text, telegram_id=message.from_user.id)
await message.reply(f'Ваш новый город: <b>{message.text}</b>', reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Пол')
async def change_sex(message: types.Message):
keyboard = types.InlineKeyboardMarkup()
btn1 = types.InlineKeyboardButton(text='Мужской', callback_data='male')
keyboard.add(btn1)
btn2 = types.InlineKeyboardButton(text='Женский', callback_data='female')
keyboard.add(btn2)
await message.reply(f'Выберите новый пол: ', reply_markup=keyboard)
await NewData.sex.set()
@dp.callback_query_handler(text='male', state=NewData.sex)
@dp.callback_query_handler(text='female', state=NewData.sex)
async def change_sex(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'male':
try:
await db.update_user_sex(sex='Мужской', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Ваш новый пол: <b>Мужской</b>',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
if call.data == 'female':
try:
await db.update_user_sex(sex='Женский', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Ваш новый пол: <b>Женский</b>',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Машина')
async def change_car(message: types.Message):
keyboard = types.InlineKeyboardMarkup()
btn1 = types.InlineKeyboardButton(text='Есть', callback_data='true')
keyboard.add(btn1)
btn2 = types.InlineKeyboardButton(text='Нет', callback_data='false')
keyboard.add(btn2)
await message.reply(f'Есть ли у Вас машина?: ', reply_markup=keyboard)
await NewData.car.set()
@dp.callback_query_handler(text='true', state=NewData.car)
@dp.callback_query_handler(text='false', state=NewData.car)
async def change_car(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'true':
try:
await db.update_user_car(car=True, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>есть</b> машина',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
if call.data == 'false':
try:
await db.update_user_car(car=False, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>нет</b> машины',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Дети')
async def change_kids(message: types.Message):
keyboard = types.InlineKeyboardMarkup()
btn1 = types.InlineKeyboardButton(text='Есть', callback_data='true')
keyboard.add(btn1)
btn2 = types.InlineKeyboardButton(text='Нет', callback_data='false')
keyboard.add(btn2)
await message.reply(f'Есть ли у Вас дети?: ', reply_markup=keyboard)
await NewData.child.set()
@dp.callback_query_handler(text='true', state=NewData.child)
@dp.callback_query_handler(text='false', state=NewData.child)
async def change_children(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'true':
try:
await db.update_user_kids(kids=True, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>есть</b> дети',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
if call.data == 'false':
try:
await db.update_user_kids(kids=False, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>нет</b> детей',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Жилье')
async def change_home(message: types.Message):
keyboard = types.InlineKeyboardMarkup()
btn1 = types.InlineKeyboardButton(text='Есть', callback_data='true')
keyboard.add(btn1)
btn2 = types.InlineKeyboardButton(text='Нет', callback_data='false')
keyboard.add(btn2)
await message.reply(f'Есть ли у Вас квартира: ', reply_markup=keyboard)
await NewData.own_home.set()
@dp.callback_query_handler(text='true', state=NewData.own_home)
@dp.callback_query_handler(text='false', state=NewData.own_home)
async def change_home(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'true':
try:
await db.update_user_apartment(apartment=True, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>есть</b> квартира',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
if call.data == 'false':
try:
await db.update_user_apartment(apartment=False, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>нет</b> квартиры',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Образование')
async def change_education(message: types.Message):
keyboard = types.InlineKeyboardMarkup()
btn1 = types.InlineKeyboardButton(text='Высшее', callback_data='higher_edu')
keyboard.add(btn1)
btn2 = types.InlineKeyboardButton(text='Среднее', callback_data='secondary_edu')
keyboard.add(btn2)
await message.reply(f'Какое у Вас образование: ', reply_markup=keyboard)
await NewData.education.set()
@dp.callback_query_handler(text='higher_edu', state=NewData.education)
@dp.callback_query_handler(text='secondary_edu', state=NewData.education)
async def change_education(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'higher_edu':
try:
await db.update_user_apartment(apartment=True, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>Высшее</b> образование',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
if call.data == 'secondary_edu':
try:
await db.update_user_apartment(apartment=False, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>Среднее</b> образование',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Занятие')
async def change_style(message: types.Message):
await message.reply(f'Чем вы занимаетесь?', reply_markup=lifestyle_inline_kb)
await NewData.hobbies.set()
@dp.callback_query_handler(state=NewData.hobbies,
text_contains=['study_lifestyle'])
@dp.callback_query_handler(state=NewData.hobbies,
text_contains=['work_lifestyle'])
@dp.callback_query_handler(state=NewData.hobbies,
text_contains=['job_find_lifestyle'])
@dp.callback_query_handler(state=NewData.hobbies,
text_contains=['householder_lifestyle'])
async def change_style(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'study_lifestyle':
try:
await db.update_user_lifestyle(lifestyle='Учусь', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь вы учитесь!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
elif call.data == 'work_lifestyle':
try:
await db.update_user_lifestyle(lifestyle='Работаю', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь вы работаете!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
elif call.data == 'job_find_lifestyle':
try:
await db.update_user_lifestyle(lifestyle='Ищу работу', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь вы ищете работу!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
elif call.data == 'householder_lifestyle':
try:
await db.update_user_lifestyle(lifestyle='Домохозяйка/Домохозяин', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь вы домохозяин/домохозяйка!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Фото')
async def new_photo(message: types.Message):
await message.reply(f'Отправьте мне новую фотографию')
await NewData.photo.set()
@dp.message_handler(content_types=ContentType.PHOTO, state=NewData.photo)
async def update_photo_complete(message: types.Message, state: FSMContext):
file_id = message.photo[0].file_id
try:
await db.update_user_photo_id(photo_id=file_id, telegram_id=message.from_user.id)
await message.reply(f'Фото принято!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла ошибка! Попробуйте еще раз либо отправьте другую фотографию. \n'
f'Если ошибка осталась, напишите системному администратору.')
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='О себе')
async def new_comment(message: types.Message):
await message.reply(f'Отправьте мне новое описание анкеты: ')
await NewData.commentary.set()
@dp.message_handler(state=NewData.commentary)
async def update_comment_complete(message: types.Message, state: FSMContext):
try:
await db.update_user_commentary(commentary=message.text, telegram_id=message.from_user.id)
await message.reply(f'Комментарий принят!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла ошибка! Попробуйте еще раз изменить описание. '
f'Возможно, Ваше сообщение слишком большое\n'
f'Если ошибка осталась, напишите системному администратору.')
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
| 2.21875
| 2
|
backend/app/app/db/init_db.py
|
reppertj/earworm
| 18
|
12778076
|
<reponame>reppertj/earworm
from sqlalchemy.orm import Session
from app import crud, schemas
from app.core.config import settings
from app.db import base # noqa: F401
# make sure all SQL Alchemy models are imported (app.db.base) before initializing DB
# otherwise, SQL Alchemy might fail to initialize relationships properly
# for more details: https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/28
def init_db(db: Session) -> None:
user = crud.user.get_by_email(db, email=settings.FIRST_SUPERUSER)
if not user:
user_in = schemas.UserCreate(
email=settings.FIRST_SUPERUSER,
password=<PASSWORD>.FIRST_<PASSWORD>USER_PASSWORD,
is_superuser=True,
)
crud.user.create(db, obj_in=user_in)
embedding_model = crud.embedding_model.get_by_name(db, name=settings.ACTIVE_MODEL_NAME)
if not embedding_model:
model_in = schemas.EmbeddingModelCreate(
name=settings.ACTIVE_MODEL_NAME
)
crud.embedding_model.create(db, obj_in=model_in)
| 2
| 2
|
train/basketball/multi.py
|
jypark0/mrtl
| 10
|
12778077
|
<reponame>jypark0/mrtl
import logging
import os
import time
from math import ceil
import torch
import utils
from config import config
from train.basketball import model
from train.basketball.model import DataParallelPassthrough
logger = logging.getLogger(config.parent_logger_name).getChild(__name__)
class BasketballMulti:
def __init__(self, device):
# Model
self.dims = []
self.scale = None
self.model = None
self.loss_fn = None
self.optimizer = None
self.scheduler = None
self.device = device
# Hyperparameters
self.params = None
self.K_B = None
self.K_C = None
# Data
self.train_loader = None
self.val_loader = None
self.train_T = None
self.eval_T = None
# Results
self.train_times = []
self.val_times = []
self.train_loss = []
self.val_loss = []
self.accum_gradients = []
self.gradients = []
self.grad_norms = []
self.grad_entropies = []
self.grad_vars = []
# Metrics
self.decision_threshold = 0.5
self.val_conf_matrix = []
self.val_acc = []
self.val_precision = []
self.val_recall = []
self.val_F1 = []
# Best
self.best_epochs = 0
self.best_model_dict = None
self.best_lr = 0
self.best_val_conf_matrix = None
self.best_val_acc = None
self.best_F1 = -1.
self.best_val_loss = float('inf')
def init_full_model(self, train_set):
counts = utils.class_counts(train_set)
self.dims = [train_set.b_dims, train_set.c_dims]
self.model = model.Full(train_set.a_dims, train_set.b_dims,
train_set.c_dims, counts)
if torch.cuda.device_count() > 1:
logger.info(f'Using {torch.cuda.device_count()} GPUs')
self.model = DataParallelPassthrough(self.model)
self.model.to(self.device)
self.accum_gradients.append(
torch.zeros_like(self.model.W.cpu(),
dtype=torch.float64).to(self.device))
self.gradients.append(
torch.zeros_like(self.model.W.cpu(),
dtype=torch.float64).to(self.device))
self.scale = (train_set.b_dims[1] / 5.) * (train_set.c_dims[0] / 6.)
def init_low_model(self, train_set, K):
counts = utils.class_counts(train_set)
self.dims = [train_set.b_dims, train_set.c_dims]
self.model = model.Low(train_set.a_dims, train_set.b_dims,
train_set.c_dims, K, counts)
if torch.cuda.device_count() > 1:
logger.info(f'Using {torch.cuda.device_count()} GPUs')
self.model = DataParallelPassthrough(self.model)
self.model.to(self.device)
self.accum_gradients.append(
torch.zeros_like(self.model.A,
dtype=torch.float64).to(self.device))
self.accum_gradients.append(
torch.zeros_like(self.model.B,
dtype=torch.float64).to(self.device))
self.accum_gradients.append(
torch.zeros_like(self.model.C,
dtype=torch.float64).to(self.device))
self.gradients.append(
torch.zeros_like(self.model.A,
dtype=torch.float64).to(self.device))
self.gradients.append(
torch.zeros_like(self.model.B,
dtype=torch.float64).to(self.device))
self.gradients.append(
torch.zeros_like(self.model.C,
dtype=torch.float64).to(self.device))
self.scale = (train_set.b_dims[1] / 5.) * (train_set.c_dims[0] / 6.)
def init_params(self, **kwargs):
self.params = kwargs.copy()
assert 'lr' in self.params, "lr is a required param"
# Optimizer and scheduler
self.optimizer = torch.optim.Adam(self.model.parameters(),
lr=self.params['lr'])
if 'step_size' in self.params and 'gamma' in self.params:
self.scheduler = torch.optim.lr_scheduler.StepLR(
self.optimizer,
step_size=self.params['step_size'],
gamma=self.params['gamma'])
if 'sigma' in self.params:
# Precompute kernel matrices
self.K_B = utils.create_kernel(self.dims[0], self.params['sigma'],
self.device)
self.K_C = utils.create_kernel(self.dims[1], self.params['sigma'],
self.device)
def init_loaders(self, train_set, val_set):
# Pos_weight
self.train_loader = torch.utils.data.DataLoader(
train_set,
batch_size=self.params['batch_size'],
shuffle=True,
num_workers=config.num_workers)
self.val_loader = torch.utils.data.DataLoader(
val_set,
batch_size=self.params['batch_size'],
shuffle=False,
num_workers=config.num_workers)
self.train_T = ceil(
len(self.train_loader.sampler) / (self.params['batch_size'] * 12))
self.eval_T = ceil(
len(self.val_loader.sampler) / (self.params['batch_size'] * 4))
counts = utils.class_counts(train_set)
self.loss_fn = torch.nn.BCEWithLogitsLoss(
pos_weight=torch.tensor(counts[0] / counts[1]))
def train_and_evaluate(self, save_dir=None):
logger.info('TRAIN BEGIN | {0}: {1},{2},{3}'.format(
type(self.model.module).__name__, self.model.a_dims, self.dims[0],
self.dims[1]))
logger.info('TRAIN | Optim: {0}, params:{1}'.format(
type(self.optimizer).__name__, self.params))
logger.info('TRAIN | Nonneg:{0}'.format(
self.params.get('nonnegative_weights')))
if self.scheduler is not None:
logger.info(
'TRAIN | sched: {0}, step_size: {1}, gamma: {2}'.format(
type(self.scheduler).__name__, self.scheduler.step_size,
self.params.get('gamma')))
epochs = 0
prev = float('inf')
start_time = time.time()
while epochs < config.max_epochs:
logger.info('TRAIN | lr: {0}'.format(
self.optimizer.param_groups[0]['lr']))
self.train_one_epoch(start_time)
self.evaluate_one_epoch(start_time)
epochs += 1
logger.info(
'[{0:.2f}s] Epoch: {1} | Train loss={2:0.6f}, Val loss={3:0.6f}'
.format(time.time() - start_time, epochs,
self.train_loss[-1][-1], self.val_loss[-1][-1]))
logger.info(
'[{0:.2f}s] Epoch: {1} | Val Acc=[{2:0.6f}, {3:0.6f}], Val F1={4:0.6f}'
.format(time.time() - start_time, epochs,
self.val_acc[-1][-1][0], self.val_acc[-1][-1][1],
self.val_F1[-1][-1]))
logger.info(
'[{0:.2f}s] Epoch: {1} | GN={2:0.6e}, GE={3:0.6f}, GV={4:0.6e}'
.format(time.time() - start_time, epochs,
self.grad_norms[-1][-1], self.grad_entropies[-1][-1],
self.grad_vars[-1][-1]))
# Create model checkpoints every 10 epochs
if save_dir is not None and epochs % 10 == 0:
torch.save(
self.best_model_dict,
os.path.join(
save_dir, "model_{0}_{1},{2}_epoch{3}.pt".format(
type(self.model.module).__name__.lower(),
utils.size_to_str(self.dims[0]),
utils.size_to_str(self.dims[1]), epochs)))
# Save best model (Val F1)
if self.val_F1[-1][-1] > self.best_F1:
logger.info('[{0:.2f}s] Max F1: {1:0.6f}'.format(
time.time() - start_time, self.val_F1[-1][-1]))
self.best_val_loss = self.val_loss[-1][-1]
self.best_epochs = epochs
self.best_model_dict = self.model.state_dict()
self.best_lr = self.optimizer.param_groups[0]['lr']
self.best_val_conf_matrix = self.val_conf_matrix[-1]
self.best_val_acc = self.val_acc[-1][-1]
self.best_F1 = self.val_F1[-1][-1]
if save_dir is not None:
torch.save(
self.best_model_dict,
os.path.join(
save_dir, "model_{0}_{1},{2}_best.pt".format(
type(self.model.module).__name__.lower(),
utils.size_to_str(self.dims[0]),
utils.size_to_str(self.dims[1]))))
# # Save best model (Val loss)
# if self.val_loss[-1] < self.best_val_loss:
# logger.info('[{0:.2f}s] Min Val Loss: {1:0.6f}'.format(time.time() - start_time, self.val_loss[-1]))
# self.best_val_loss = self.val_loss[-1]
# self.best_epochs = epochs
# self.best_model_dict = self.model.state_dict()
# self.best_lr = self.optimizer.param_groups[0]['lr']
# self.best_val_conf_matrix = self.val_conf_matrix[-1]
# self.best_val_acc = self.val_acc[-1]
# self.best_F1 = self.val_F1[-1]
# torch.save(self, os.path.join(save_dir, "multi_{0}_{1},{2}_best.pt".format(
# type(self.model.module).__name__.lower(),
# utils.size_to_str(self.dims[0]), utils.size_to_str(self.dims[1]))))
if type(self.model.module).__name__.startswith('Full'):
# # Number of epochs = 5
# if epochs >= 5:
# logger.info('TRAIN FINISH | {0}: {1},{2} | Epochs: {3}'.format(
# type(self.model.module).__name__, self.dims[0], self.dims[1], epochs))
# break
# if epochs >= 2:
# break
if self.params['stop_cond'] == 'val_loss_increase':
if self.val_loss[-1][-1] > prev:
logger.info(
'TRAIN FINISH | {0}: {1},{2} | Epochs: {3} | Stop criterion: {4}'
.format(
type(self.model.module).__name__, self.dims[0],
self.dims[1], epochs,
self.params['stop_cond']))
break
else:
prev = self.val_loss[-1][-1]
elif self.params['stop_cond'] == 'gradient_entropy':
if self.grad_entropies[-1][-1] > prev:
logger.info(
'TRAIN FINISH | {0}: {1},{2} | Epochs: {3} | Stop criterion: {4}'
.format(
type(self.model.module).__name__, self.dims[0],
self.dims[1], epochs,
self.params['stop_cond']))
break
else:
prev = self.grad_entropies[-1][-1]
elif self.params['stop_cond'] == 'gradient_norm':
if self.grad_norms[-1][-1] > prev:
logger.info(
'TRAIN FINISH | {0}: {1},{2} | Epochs: {3} | Stop criterion: {4}'
.format(
type(self.model.module).__name__, self.dims[0],
self.dims[1], epochs,
self.params['stop_cond']))
break
else:
prev = self.grad_norms[-1][-1]
elif self.params['stop_cond'] == 'gradient_variance':
if self.grad_vars[-1][-1] > prev:
logger.info(
'TRAIN FINISH | {0}: {1},{2} | Epochs: {3} | Stop criterion: {4}'
.format(
type(self.model.module).__name__, self.dims[0],
self.dims[1], epochs,
self.params['stop_cond']))
break
else:
prev = self.grad_vars[-1][-1]
else:
# Min epochs and val F1 decreases on average
# if epochs >= 10 and np.mean(self.val_F1[-3:]) < np.mean(self.val_F1[-6:-3]):
# logger.info('TRAIN FINISH | {0}: {1},{2} | Epochs: {3}'.format(
# type(self.model.module).__name__, self.dims[0], self.dims[1], epochs))
# break
# # Min epochs and val_loss increases on average
# if epochs >= 10 and np.mean(self.val_loss[-3:]) > np.mean(self.val_loss[-6:-3]):
# logger.info('TRAIN FINISH | {0}: {1},{2} | Epochs: {3}'.format(
# type(self.model.module).__name__, self.dims[0], self.dims[1], epochs))
# break
# if epochs >= 2:
# break
# Loss convergence
if self.params['stop_cond'] == 'val_loss_increase':
if self.val_loss[-1][-1] > prev:
logger.info(
'TRAIN FINISH | {0}: {1},{2} | Epochs: {3} | Stop criterion: {4}'
.format(
type(self.model.module).__name__, self.dims[0],
self.dims[1], epochs,
self.params['stop_cond']))
break
else:
prev = self.val_loss[-1][-1]
elif self.params['stop_cond'] == 'gradient_entropy':
if self.grad_entropies[-1][-1] > prev:
logger.info(
'TRAIN FINISH | {0}: {1},{2} | Epochs: {3} | Stop criterion: {4}'
.format(
type(self.model.module).__name__, self.dims[0],
self.dims[1], epochs,
self.params['stop_cond']))
break
else:
prev = self.grad_entropies[-1][-1]
elif self.params['stop_cond'] == 'gradient_norm':
if self.grad_norms[-1][-1] > prev:
logger.info(
'TRAIN FINISH | {0}: {1},{2} | Epochs: {3} | Stop criterion: {4}'
.format(
type(self.model.module).__name__, self.dims[0],
self.dims[1], epochs,
self.params['stop_cond']))
break
else:
prev = self.grad_norms[-1][-1]
elif self.params['stop_cond'] == 'gradient_variance':
if self.grad_vars[-1][-1] > prev:
logger.info(
'TRAIN FINISH | {0}: {1},{2} | Epochs: {3} | Stop criterion: {4}'
.format(
type(self.model.module).__name__, self.dims[0],
self.dims[1], epochs,
self.params['stop_cond']))
break
else:
prev = self.grad_vars[-1][-1]
if self.scheduler is not None:
self.scheduler.step()
if save_dir is not None:
torch.save(
self,
os.path.join(
save_dir, "multi_{0}_{1},{2}.pt".format(
type(self.model.module).__name__.lower(),
utils.size_to_str(self.dims[0]),
utils.size_to_str(self.dims[1]))))
def train_one_epoch(self, start_time):
train_loss = 0.0
self.train_times.append([])
self.train_loss.append([])
self.grad_norms.append([])
self.grad_entropies.append([])
self.grad_vars.append([])
# Zero out gradients
for i in range(len(self.gradients)):
self.accum_gradients[i].zero_()
self.gradients[i].zero_()
self.model.train()
for i, (a, bh_pos, def_pos, y) in enumerate(self.train_loader):
a = a.to(self.device)
bh_pos = bh_pos.to(self.device)
def_pos = def_pos.to(self.device)
y = y.to(self.device)
# zero the parameter gradients
self.optimizer.zero_grad()
# Forward pass
outputs = self.model(a, bh_pos, def_pos)
# Compute loss
loss = self.loss_fn(outputs, y.float())
if self.params.get('reg_coeff'):
if type(self.model.module).__name__.startswith('Full'):
reg = self.params['reg_coeff'] * (
utils.bball_spatial_regularizer(
self.model, self.K_B, self.K_C, self.device) +
utils.l2_regularizer(self.model, self.device))
else:
reg = self.params['reg_coeff'] * (
utils.bball_spatial_regularizer(
self.model, self.K_B, self.K_C, self.device) +
utils.l2_regularizer(self.model, self.device))
if i == 0:
logger.info(
"TRAIN | Step {0} | Loss={1:0.6f}, Reg={2:0.6f}".
format(i + 1, loss, reg))
loss = loss + reg
loss.backward()
# Accumulate gradients
utils.accum_grad(self.accum_gradients, self.model)
self.optimizer.step()
# Constrain weights
if type(self.model.module).__name__.startswith(
'Low') and self.params.get('nonnegative_weights'):
with torch.no_grad():
self.model.constrain()
# Aggregate train_loss across batches
train_loss += loss.item()
# Log interval
# if i % self.train_T == self.train_T - 1:
# curr_loss = train_loss / (i + 1)
# logger.debug('TRAIN | Step {0} | Loss={1:0.6f}'.format(i + 1, curr_loss))
if i == 2 or i % self.train_T == self.train_T - 1 or i == len(
self.train_loader) - 1:
curr_loss = train_loss / (i + 1)
self.train_times[-1].append(time.time() - start_time)
self.train_loss[-1].append(curr_loss)
# Average gradients
for p in range(len(self.gradients)):
self.gradients[p] = self.accum_gradients[p].div(i + 1)
# Calculate gradient statistics
grad_norm, grad_entropy, grad_var = utils.grad_stats(
self.gradients)
self.grad_norms[-1].append(grad_norm)
self.grad_entropies[-1].append(grad_entropy)
self.grad_vars[-1].append(grad_var)
logger.info(
'TRAIN | Step {0} | Loss={1:0.6f}, GN={2:0.6e}, GE={3:0.6f}, GV={4:0.6e}'
.format(i + 1, curr_loss, grad_norm, grad_entropy,
grad_var))
def evaluate_one_epoch(self, start_time):
val_loss = 0.0
tn = 0
fn = 0
fp = 0
tp = 0
self.val_times.append([])
self.val_loss.append([])
self.val_acc.append([])
self.val_precision.append([])
self.val_recall.append([])
self.val_F1.append([])
self.model.eval()
with torch.no_grad():
for i, (a, bh_pos, def_pos, y) in enumerate(self.val_loader):
a = a.to(self.device)
bh_pos = bh_pos.to(self.device)
def_pos = def_pos.to(self.device)
y = y.to(self.device)
outputs = self.model(a, bh_pos, def_pos)
# Compute loss
loss = self.loss_fn(outputs, y.float())
if self.params.get('reg_coeff'):
if type(self.model.module).__name__.startswith('Full'):
# reg = self.params['reg_coeff'] * (utils.l1_regularizer(self.model, self.device))
# reg = self.params['reg_coeff'] * (utils.l2_regularizer(self.model, self.device))
# reg = self.params['reg_coeff'] * (
# utils.spatial_regularizer(self.model, self.K_B, self.K_C, self.device))
reg = self.params['reg_coeff'] * (
utils.bball_spatial_regularizer(
self.model, self.K_B, self.K_C, self.device) +
utils.l2_regularizer(self.model, self.device))
# reg = self.params['reg_coeff'] * (
# utils.l2_regularizer(self.model, self.device) + utils.l1_regularizer(self.model,
# self.device))
else:
# reg = self.params['reg_coeff'] * (utils.l1_regularizer(self.model, self.device))
# reg = self.params['reg_coeff'] * (utils.l2_regularizer(self.model, self.device))
# reg = self.params['reg_coeff'] * (
# utils.spatial_regularizer(self.model, self.K_B, self.K_C, self.device))
# reg = self.params['reg_coeff'] * (utils.l1_regularizer(self.model, self.device) +
# utils.l2_regularizer(self.model, self.device))
reg = self.params['reg_coeff'] * (
utils.bball_spatial_regularizer(
self.model, self.K_B, self.K_C, self.device) +
utils.l2_regularizer(self.model, self.device))
# reg = self.reg_coeff * (utils.spatial_regularizer(self.model, self.K_B, self.K_C, self.device)
# + utils.l1_regularizer(self.model, self.device))
if i == 0:
logger.info(
"VAL | Step {0} | Loss={1:0.6f}, Reg={2:0.6f}".
format(i + 1, loss, reg))
loss = loss + reg
# Aggregate train_loss across batches
val_loss += loss.item()
# Update confusion matrix
preds = (outputs > self.decision_threshold).bool()
tn += torch.sum((preds == 0) & (y == 0)).item()
fn += torch.sum((preds == 0) & (y == 1)).item()
fp += torch.sum((preds == 1) & (y == 0)).item()
tp += torch.sum((preds == 1) & (y == 1)).item()
# Log interval
# if i % self.eval_T == self.eval_T - 1:
# logger.debug('VAL | Step {0} | Loss={1:0.6f}'.format(i + 1, curr_loss))
if i == 2 or i % self.eval_T == self.eval_T - 1 or i == len(
self.val_loader) - 1:
curr_loss = val_loss / (i + 1)
self.val_times[-1].append(time.time() - start_time)
self.val_loss[-1].append(curr_loss)
# Class accuracy
self.val_acc[-1].append([tn / (tn + fp), tp / (tp + fn)])
F1, precision, recall = utils.calc_F1(fp, fn, tp)
self.val_precision[-1].append(precision)
self.val_recall[-1].append(recall)
self.val_F1[-1].append(F1)
logger.info(
'VAL | Step {0} | Loss={1:0.6f}, P={2:0.6f}, R={3:0.6f}, F1={4:0.6f}'
.format(i + 1, curr_loss, precision, recall, F1))
# Conf Matrix
self.val_conf_matrix.append([[tn, fp], [fn, tp]])
logger.info("VAL | Loss={0:6f}, Conf. matrix={1}".format(
self.val_loss[-1][-1], [[tn, fp], [fn, tp]]))
def test(self, test_set):
test_loader = torch.utils.data.DataLoader(
test_set,
batch_size=self.params['batch_size'],
shuffle=False,
num_workers=config.num_workers)
tn = 0
fn = 0
fp = 0
tp = 0
out = []
labels = []
self.model.eval()
with torch.no_grad():
for i, (a, bh_pos, def_pos, y) in enumerate(test_loader):
a = a.to(self.device)
bh_pos = bh_pos.to(self.device)
def_pos = def_pos.to(self.device)
y = y.to(self.device)
outputs = self.model(a, bh_pos, def_pos)
# Update confusion matrix
preds = (outputs > self.decision_threshold).bool()
tn += torch.sum((preds == 0) & (y == 0)).item()
fn += torch.sum((preds == 0) & (y == 1)).item()
fp += torch.sum((preds == 1) & (y == 0)).item()
tp += torch.sum((preds == 1) & (y == 1)).item()
out.append(outputs)
labels.append(y)
# Class accuracy
test_acc = [tn / (tn + fp), tp / (tp + fn)]
F1, precision, recall = utils.calc_F1(fp, fn, tp)
logger.info(
"TEST | Conf. matrix={0}, Acc=[{1:0.6f}, {2:0.6f}], P={3:0.6f}, R={4:0.6f}, F1={5:0.6f}"
.format([[tn, fp], [fn, tp]], test_acc[0], test_acc[1], precision,
recall, F1))
return [[tn, fp], [fn,
tp]], test_acc, precision, recall, F1, out, labels
| 2.203125
| 2
|
fonts/font10mono.py
|
robert-hh/SSD1963-TFT-Library-for-PyBoard
| 16
|
12778078
|
<reponame>robert-hh/SSD1963-TFT-Library-for-PyBoard<filename>fonts/font10mono.py
# Code generated by cfonts_to_trans_py.py
import TFTfont
_font10mono = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x02\x00\x80\x20\x08\x02\x00\x80\x20\x08\x02\x00\x80\x00\x08\x00\x00\x00\x00'\
b'\x00\x05\x01\x40\x50\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x04\x81\x20\x48\x12\x1f\xc1\x20\x90\x7f\x09\x02\x40\x90\x24\x00\x00\x00\x00'\
b'\x08\x0f\x84\x91\x24\x48\x12\x03\x80\x38\x09\x02\x44\x91\x24\x3e\x02\x00\x80\x00'\
b'\x00\x1c\x08\x82\x22\x89\x22\x87\x40\x7c\x28\x92\x28\x88\x22\x07\x00\x00\x00\x00'\
b'\x00\x0e\x04\x41\x10\x44\x0a\x03\x01\x42\x89\x21\x48\x21\x14\x38\x80\x00\x00\x00'\
b'\x00\x02\x00\x80\x20\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x02\x01\x00\x40\x10\x08\x02\x00\x80\x20\x08\x02\x00\x80\x20\x04\x01\x00\x40\x08'\
b'\x10\x02\x00\x80\x20\x04\x01\x00\x40\x10\x04\x01\x00\x40\x10\x08\x02\x00\x80\x40'\
b'\x00\x02\x00\x80\xf8\x08\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x08\x02\x00\x81\xfc\x08\x02\x00\x80\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x02\x00\x80\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00'\
b'\x00\x00\x80\x20\x08\x04\x01\x00\x40\x20\x08\x02\x01\x00\x40\x10\x00\x00\x00\x00'\
b'\x00\x07\x02\x21\x04\x41\x10\x44\x91\x04\x41\x10\x44\x10\x88\x1c\x00\x00\x00\x00'\
b'\x00\x01\x00\xc0\x50\x24\x01\x00\x40\x10\x04\x01\x00\x40\x10\x04\x00\x00\x00\x00'\
b'\x00\x07\x02\x21\x04\x01\x00\x40\x10\x08\x04\x06\x02\x01\x00\x7f\x00\x00\x00\x00'\
b'\x00\x06\x02\x41\x08\x02\x01\x01\x80\x18\x01\x00\x44\x10\x88\x1c\x00\x00\x00\x00'\
b'\x00\x00\x80\x60\x28\x0a\x04\x81\x20\x88\x42\x1f\xc0\x20\x08\x02\x00\x00\x00\x00'\
b'\x00\x1f\x84\x01\x00\x40\x17\x06\x20\x04\x01\x00\x44\x10\x88\x1c\x00\x00\x00\x00'\
b'\x00\x07\x82\x11\x04\x40\x17\x06\x21\x04\x41\x10\x44\x10\x88\x1c\x00\x00\x00\x00'\
b'\x00\x1f\xc0\x10\x08\x04\x01\x00\x40\x20\x08\x02\x01\x00\x40\x10\x00\x00\x00\x00'\
b'\x00\x07\x02\x21\x04\x41\x08\x81\xc0\x88\x41\x10\x44\x10\x88\x1c\x00\x00\x00\x00'\
b'\x00\x07\x02\x21\x04\x41\x10\x44\x10\x8c\x1d\x00\x44\x11\x08\x3c\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x08\x02\x00\x80\x00'\
b'\x00\x00\x00\x10\x18\x18\x08\x04\x00\x80\x18\x01\x80\x10\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x07\xf0\x00\x00\x1f\xc0\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x04\x00\xc0\x0c\x00\x80\x10\x08\x0c\x0c\x04\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x07\x82\x11\x02\x40\x80\x20\x10\x08\x02\x01\x00\x40\x00\x04\x00\x00\x00\x00'\
b'\x00\x07\x02\x21\x04\x41\x13\xc5\x11\x44\x4f\x10\x02\x00\x80\x1e\x00\x00\x00\x00'\
b'\x00\x02\x01\x40\x50\x14\x08\x82\x20\x88\x7f\x10\x44\x12\x02\x80\x80\x00\x00\x00'\
b'\x00\x1f\x04\x21\x08\x42\x10\x87\xc1\x08\x41\x10\x44\x11\x08\x7c\x00\x00\x00\x00'\
b'\x00\x07\x02\x21\x04\x40\x10\x04\x01\x00\x40\x10\x04\x10\x88\x1c\x00\x00\x00\x00'\
b'\x00\x1e\x04\x41\x08\x42\x10\x44\x11\x04\x41\x10\x84\x21\x10\x78\x00\x00\x00\x00'\
b'\x00\x1f\xc4\x01\x00\x40\x10\x07\xe1\x00\x40\x10\x04\x01\x00\x7f\x00\x00\x00\x00'\
b'\x00\x1f\xc4\x01\x00\x40\x10\x07\xe1\x00\x40\x10\x04\x01\x00\x40\x00\x00\x00\x00'\
b'\x00\x07\x02\x21\x04\x40\x10\x04\x01\x1c\x41\x10\x44\x10\x84\x1e\x00\x00\x00\x00'\
b'\x00\x10\x44\x11\x04\x41\x10\x47\xf1\x04\x41\x10\x44\x11\x04\x41\x00\x00\x00\x00'\
b'\x00\x0f\x80\x80\x20\x08\x02\x00\x80\x20\x08\x02\x00\x80\x20\x3e\x00\x00\x00\x00'\
b'\x00\x00\x40\x10\x04\x01\x00\x40\x10\x04\x01\x00\x44\x11\x08\x3c\x00\x00\x00\x00'\
b'\x00\x10\x24\x11\x08\x44\x12\x05\x01\xa0\x44\x10\x84\x21\x04\x40\x80\x00\x00\x00'\
b'\x00\x10\x04\x01\x00\x40\x10\x04\x01\x00\x40\x10\x04\x01\x00\x7f\x00\x00\x00\x00'\
b'\x00\x10\x46\x31\x8c\x55\x15\x44\x91\x24\x41\x10\x44\x11\x04\x41\x00\x00\x00\x00'\
b'\x00\x10\x46\x11\x84\x51\x14\x44\x91\x24\x45\x11\x44\x31\x0c\x41\x00\x00\x00\x00'\
b'\x00\x07\x02\x21\x04\x41\x10\x44\x11\x04\x41\x10\x44\x10\x88\x1c\x00\x00\x00\x00'\
b'\x00\x1f\x04\x21\x04\x41\x10\x87\xc1\x00\x40\x10\x04\x01\x00\x40\x00\x00\x00\x00'\
b'\x00\x07\x02\x21\x04\x41\x10\x44\x11\x04\x41\x10\x44\xd0\x88\x1f\x00\x20\x00\x00'\
b'\x00\x1f\x04\x21\x04\x41\x10\x87\xc1\x10\x42\x10\x84\x11\x04\x40\x80\x00\x00\x00'\
b'\x00\x0f\x84\x11\x04\x40\x08\x01\x80\x10\x02\x00\x44\x11\x04\x3e\x00\x00\x00\x00'\
b'\x00\x3f\xe0\x80\x20\x08\x02\x00\x80\x20\x08\x02\x00\x80\x20\x08\x00\x00\x00\x00'\
b'\x00\x10\x44\x11\x04\x41\x10\x44\x11\x04\x41\x10\x44\x10\x88\x1c\x00\x00\x00\x00'\
b'\x00\x20\x28\x09\x04\x41\x10\x42\x20\x88\x22\x05\x01\x40\x50\x08\x00\x00\x00\x00'\
b'\x00\x20\x28\x0a\x02\x80\x92\x44\x91\x24\x55\x15\x45\x50\x88\x22\x00\x00\x00\x00'\
b'\x00\x20\x24\x10\x88\x22\x05\x00\x80\x20\x14\x08\x82\x21\x04\x80\x80\x00\x00\x00'\
b'\x00\x20\x24\x11\x04\x22\x08\x81\x40\x20\x08\x02\x00\x80\x20\x08\x00\x00\x00\x00'\
b'\x00\x0f\xc0\x10\x08\x02\x01\x00\x80\x20\x10\x04\x02\x01\x00\x7f\x00\x00\x00\x00'\
b'\x00\x03\x80\x80\x20\x08\x02\x00\x80\x20\x08\x02\x00\x80\x20\x08\x02\x00\x80\x38'\
b'\x00\x04\x01\x00\x40\x08\x02\x00\x80\x10\x04\x01\x00\x20\x08\x02\x00\x00\x00\x00'\
b'\x00\x07\x00\x40\x10\x04\x01\x00\x40\x10\x04\x01\x00\x40\x10\x04\x01\x00\x40\x70'\
b'\x00\x02\x01\x40\x50\x22\x08\x84\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xfc\x00'\
b'\x00\x06\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x3e\x10\x40\x10\x7c\x21\x10\x44\x11\x0c\x3d\x00\x00\x00\x00'\
b'\x00\x10\x04\x01\x00\x5c\x18\x84\x11\x04\x41\x10\x44\x11\x88\x5c\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x1c\x08\x84\x11\x00\x40\x10\x04\x10\x88\x1c\x00\x00\x00\x00'\
b'\x00\x00\x40\x10\x04\x1d\x08\xc4\x11\x04\x41\x10\x44\x10\x8c\x1d\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x1c\x08\x84\x11\x04\x7f\x10\x04\x10\x88\x1c\x00\x00\x00\x00'\
b'\x00\x01\xe0\x80\x20\x3f\x02\x00\x80\x20\x08\x02\x00\x80\x20\x08\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x1d\x08\xc4\x11\x04\x41\x10\x44\x10\x8c\x1d\x00\x44\x20\xf0'\
b'\x00\x10\x04\x01\x00\x5c\x18\x84\x11\x04\x41\x10\x44\x11\x04\x41\x00\x00\x00\x00'\
b'\x00\x01\x00\x00\x00\x3c\x01\x00\x40\x10\x04\x01\x00\x40\x10\x04\x00\x00\x00\x00'\
b'\x00\x01\x00\x00\x00\x3c\x01\x00\x40\x10\x04\x01\x00\x40\x10\x04\x01\x00\x41\xe0'\
b'\x00\x10\x04\x01\x00\x42\x11\x04\x81\x40\x68\x11\x04\x21\x04\x40\x80\x00\x00\x00'\
b'\x00\x0f\x00\x40\x10\x04\x01\x00\x40\x10\x04\x01\x00\x40\x10\x04\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x5b\x1b\x44\x91\x24\x49\x12\x44\x91\x24\x49\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x5c\x18\x84\x11\x04\x41\x10\x44\x11\x04\x41\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x1c\x08\x84\x11\x04\x41\x10\x44\x10\x88\x1c\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x5c\x18\x84\x11\x04\x41\x10\x44\x11\x88\x5c\x10\x04\x01\x00'\
b'\x00\x00\x00\x00\x00\x1d\x08\xc4\x11\x04\x41\x10\x44\x10\x8c\x1d\x00\x40\x10\x04'\
b'\x00\x00\x00\x00\x00\x6e\x0c\x42\x00\x80\x20\x08\x02\x00\x80\x20\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x3e\x10\x44\x10\xc0\x0c\x00\x84\x11\x04\x3e\x00\x00\x00\x00'\
b'\x00\x04\x01\x00\x40\x7e\x04\x01\x00\x40\x10\x04\x01\x00\x40\x0f\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x41\x10\x44\x11\x04\x41\x10\x44\x10\x8c\x1d\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x80\x90\x44\x10\x88\x22\x08\x81\x40\x50\x08\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x88\xa7\x29\x49\x54\x55\x15\x45\x50\x88\x22\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x41\x08\x81\x40\x50\x08\x05\x01\x40\x88\x41\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x41\x10\x42\x20\x88\x14\x05\x01\x40\x20\x08\x04\x01\x01\x80'\
b'\x00\x00\x00\x00\x00\x7f\x80\x20\x10\x08\x04\x02\x01\x00\x80\x7f\x80\x00\x00\x00'\
b'\x00\x01\x80\x80\x20\x08\x02\x00\x80\x60\x30\x06\x00\x80\x20\x08\x02\x00\x80\x18'\
b'\x00\x02\x00\x80\x20\x08\x02\x00\x80\x20\x08\x02\x00\x80\x20\x08\x02\x00\x80\x20'\
b'\x00\x06\x00\x40\x10\x04\x01\x00\x40\x18\x03\x01\x80\x40\x10\x04\x01\x00\x40\x60'\
b'\x00\x00\x00\x00\x00\x00\x00\x03\x91\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x03\xe0\x88\x22\x08\x82\x20\x88\x22\x08\x82\x20\x88\x22\x08\x83\xe0\x00'\
_font10mono_index = b'\x00\x00\x14\x00\x28\x00\x3c\x00\x50\x00\x64\x00\x78\x00\x8c\x00'\
b'\xa0\x00\xb4\x00\xc8\x00\xdc\x00\xf0\x00\x04\x01\x18\x01\x2c\x01'\
b'\x40\x01\x54\x01\x68\x01\x7c\x01\x90\x01\xa4\x01\xb8\x01\xcc\x01'\
b'\xe0\x01\xf4\x01\x08\x02\x1c\x02\x30\x02\x44\x02\x58\x02\x6c\x02'\
b'\x80\x02\x94\x02\xa8\x02\xbc\x02\xd0\x02\xe4\x02\xf8\x02\x0c\x03'\
b'\x20\x03\x34\x03\x48\x03\x5c\x03\x70\x03\x84\x03\x98\x03\xac\x03'\
b'\xc0\x03\xd4\x03\xe8\x03\xfc\x03\x10\x04\x24\x04\x38\x04\x4c\x04'\
b'\x60\x04\x74\x04\x88\x04\x9c\x04\xb0\x04\xc4\x04\xd8\x04\xec\x04'\
b'\x00\x05\x14\x05\x28\x05\x3c\x05\x50\x05\x64\x05\x78\x05\x8c\x05'\
b'\xa0\x05\xb4\x05\xc8\x05\xdc\x05\xf0\x05\x04\x06\x18\x06\x2c\x06'\
b'\x40\x06\x54\x06\x68\x06\x7c\x06\x90\x06\xa4\x06\xb8\x06\xcc\x06'\
b'\xe0\x06\xf4\x06\x08\x07\x1c\x07\x30\x07\x44\x07\x58\x07\x6c\x07'\
b'\x80\x07'
font10mono = TFTfont.TFTFont(_font10mono, _font10mono_index, 16, 10, 96)
fonts = {"font10mono":font10mono,
}
| 1.882813
| 2
|
starter_code/organism/utilities.py
|
mbchang/societal-decision-making
| 38
|
12778079
|
<gh_stars>10-100
from collections import OrderedDict
import numpy as np
def get_second_highest_bid(bids, winner):
if len(bids) == 1:
second_highest_bid = 0
else:
second_highest_bid = -np.inf
for index, b in bids.items():
if b > second_highest_bid and index != winner:
second_highest_bid = b
second_highest_bid_index = index
assert bids[second_highest_bid_index] == second_highest_bid <= bids[winner]
return second_highest_bid
def vickrey_utilities(utility_args, args):
adjusted_gamma = args.gamma**(utility_args.end_time-utility_args.start_time)
second_highest_bid = get_second_highest_bid(
utility_args.bids, utility_args.winner)
utilities = OrderedDict()
for a_id in utility_args.bids:
if a_id == utility_args.swinner:
lookahead = utility_args.next_winner_bid
revenue = utility_args.reward + adjusted_gamma*lookahead
utilities[a_id] = revenue - second_highest_bid
else:
utilities[a_id] = 0
return utilities
def credit_conserving_vickrey_utilities(utility_args, args):
adjusted_gamma = args.gamma**(utility_args.end_time-utility_args.start_time)
second_highest_bid = get_second_highest_bid(
utility_args.bids, utility_args.winner)
utilities = OrderedDict()
for a_id in utility_args.bids:
if a_id == utility_args.winner:
lookahead = utility_args.next_second_highest_bid
revenue = utility_args.reward + adjusted_gamma*lookahead
utilities[a_id] = revenue - second_highest_bid
else:
utilities[a_id] = 0
return utilities
def bucket_brigade_utilities(utility_args, args):
adjusted_gamma = args.gamma**(utility_args.end_time-utility_args.start_time)
utilities = OrderedDict()
for a_id in utility_args.bids:
if a_id == utility_args.winner:
revenue = utility_args.reward + adjusted_gamma*utility_args.next_winner_bid
utilities[a_id] = revenue - utility_args.bids[utility_args.winner]
else:
utilities[a_id] = 0
return utilities
def environment_reward_utilities(utility_args, args):
utilities = OrderedDict()
for a_id in utility_args.bids:
if a_id == utility_args.winner:
utilities[a_id] = utility_args.reward
else:
utilities[a_id] = 0
return utilities
| 2.515625
| 3
|
data/load_local_dataset.py
|
marridG/2020-EI339
| 0
|
12778080
|
import os
import inspect
from tqdm import tqdm
import numpy as np
import typing
import cv2
import torchvision
import torch
from PIL import Image
from torch.utils.data import Dataset, DataLoader
# root (correct even if called)
CRT_ABS_PATH = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
# keys of dataset
KEYS = ["MNIST", "EI339", "combined"]
# relative to root/
PATH_TO_DATASET = {"MNIST": "MNIST/",
"EI339": "EI339-CN dataset sjtu/",
"MNIST+EI339": "MNIST+EI339/", }
# relative to root/PATH_TO_DATASET
DATASET_MAPPING_FN = {"MNIST": None,
"combined": None,
"EI339": {"train": {"data": "mapping/train_data.npy",
"label": "mapping/train_label.npy"},
"test": {"data": "mapping/test_data.npy",
"label": "mapping/test_label.npy"}, }, }
# relative to root/PATH_TO_DATASET
DATASET_SPLITS = {"MNIST": {"raw": "raw/",
"train": "processed/training.pt",
"test": "processed/test.pt"},
"EI339": {"raw": "",
"train": "processed/training.pt",
"test": "processed/test.pt"},
"MNIST+EI339": {"raw": None,
"train": "training.pt",
"test": "test.pt"}, }
"""
~ root (CRT_ABS_PATH)
+ --- PATH_TO_DATASET
+ --- DATASET_MAPPING_FN
+ --- DATASET_SPLITS
"""
def __ei339_generate_raw_mappings__() -> \
typing.Tuple[typing.Tuple[np.ndarray, np.ndarray],
typing.Tuple[np.ndarray, np.ndarray]]:
abs_train_data_fn = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["EI339"], DATASET_MAPPING_FN["EI339"]["train"]["data"])
abs_train_label_fn = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["EI339"], DATASET_MAPPING_FN["EI339"]["train"]["label"])
abs_test_data_fn = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["EI339"], DATASET_MAPPING_FN["EI339"]["test"]["data"])
abs_test_label_fn = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["EI339"], DATASET_MAPPING_FN["EI339"]["test"]["label"])
if os.path.exists(path=abs_train_data_fn) and os.path.exists(path=abs_train_label_fn) \
and os.path.exists(path=abs_test_data_fn) and os.path.exists(path=abs_test_label_fn):
# print("Mappings Loaded from File")
return (np.load(abs_train_data_fn), np.load(abs_train_label_fn)), \
(np.load(abs_test_data_fn), np.load(abs_test_label_fn))
__ensure_path_validation__(abs_train_data_fn)
__ensure_path_validation__(abs_train_label_fn)
__ensure_path_validation__(abs_test_data_fn)
__ensure_path_validation__(abs_test_label_fn)
train_data_map, train_label_map = [], []
test_data_map, test_label_map = [], []
for label_num in tqdm(range(1, 10 + 1)):
# print("Mapping Images of Label %d" % label_num)
abs_path_to_file_folder = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["EI339"],
DATASET_SPLITS["EI339"]["raw"], str(label_num))
abs_path_to_tr_files = os.path.join(abs_path_to_file_folder, "training/")
path_to_test_files = os.path.join(abs_path_to_file_folder, "testing/")
save_label_num = 0 if 10 == label_num else label_num
save_label_num += 10
# Training Data
for file in os.listdir(abs_path_to_tr_files):
abs_path_to_tr_file = os.path.join(abs_path_to_tr_files, file)
train_data_map.append(abs_path_to_tr_file)
train_label_map.append(save_label_num)
# Test Data
for file in os.listdir(path_to_test_files):
abs_path_to_test_file = os.path.join(path_to_test_files, file)
test_data_map.append(abs_path_to_test_file)
test_label_map.append(save_label_num)
train_data_map = np.array(train_data_map) # (cnt,) <str> as <U129>
train_label_map = np.array(train_label_map) # (cnt,) <np.int32>
train_idx = np.arange(train_label_map.size)
np.random.shuffle(train_idx)
train_data_map = train_data_map[train_idx]
train_label_map = train_label_map[train_idx]
print("EI339: Train Data Mapping Shuffled")
test_data_map = np.array(test_data_map) # (cnt,) <str> as <U129>
test_label_map = np.array(test_label_map) # (cnt,) <int>
test_idx = np.arange(test_label_map.size)
np.random.shuffle(test_idx)
test_data_map = test_data_map[test_idx]
test_label_map = test_label_map[test_idx]
print("EI339: Test Data Mapping Shuffled")
np.save(arr=train_data_map, file=abs_train_data_fn)
np.save(arr=train_label_map, file=abs_train_label_fn)
np.save(arr=test_data_map, file=abs_test_data_fn)
np.save(arr=test_label_map, file=abs_test_label_fn)
return (train_data_map, train_label_map), (test_data_map, test_label_map)
def __ei339_load_raw_image__(path: str) -> np.ndarray:
img = cv2.imread(path, cv2.IMREAD_GRAYSCALE)
img = cv2.resize(img, dsize=(28, 28))
# _, img = cv2.threshold(img, thresh=128, maxval=255, type=cv2.THRESH_BINARY)
img = 255 - img
return img
def __ensure_path_validation__(filename_with_path: str) -> None:
path = os.path.split(filename_with_path)[0]
if not os.path.exists(path):
os.mkdir(path)
assert os.path.exists(path), "[Error] Access to Directory \"%s\" is Denied" % path
def __ei339_process_raw_data__() -> None:
abs_train_dataset_path = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["EI339"], DATASET_SPLITS["EI339"]["train"])
abs_test_dataset_path = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["EI339"], DATASET_SPLITS["EI339"]["test"])
if os.path.exists(abs_train_dataset_path) and os.path.exists(abs_test_dataset_path):
return
__ensure_path_validation__(abs_train_dataset_path)
__ensure_path_validation__(abs_test_dataset_path)
(train_data_fn, train_label), (test_data_fn, test_label) = \
__ei339_generate_raw_mappings__()
# train data
train_data = []
for file in tqdm(train_data_fn):
train_data.append(__ei339_load_raw_image__(path=file))
train_data = np.array(train_data)
train_data = torch.from_numpy(train_data) # torch.Size([7385, 28, 28])
train_label = torch.from_numpy(train_label).long() # torch.Size([7385])
# print(train_data.shape, train_label.shape)
# test data
test_data = []
for file in tqdm(test_data_fn):
test_data.append(__ei339_load_raw_image__(path=file))
test_data = np.array(test_data)
test_data = torch.from_numpy(test_data) # torch.Size([2034, 28, 28])
test_label = torch.from_numpy(test_label).long() # torch.Size([2034])
# print(test_data.shape, test_label.shape)
torch.save((train_data, train_label), f=abs_train_dataset_path)
torch.save((test_data, test_label), f=abs_test_dataset_path)
print("EI339: Train & Test Data Saved")
def __combine_dataset__(data_fn_list: list, output_filename: str) -> None:
assert len(data_fn_list) > 1, "[Error] Given to-Combine List if of Length 1"
if os.path.exists(output_filename):
return
__ensure_path_validation__(output_filename)
for file in data_fn_list:
if not os.path.exists(file):
raise RuntimeError("[Error] File \"%s\" NOT Exist" % file)
data_list, targets_list = [], []
for file in data_fn_list:
_data, _target = torch.load(file)
data_list.append(_data)
targets_list.append(_target)
data = torch.cat(data_list, dim=0)
targets = torch.cat(targets_list, dim=0)
torch.save((data, targets), f=output_filename)
print("Dataset Combined")
for file in data_fn_list:
print("\tFrom \"%s\"" % file)
print("\tTo \"%s\"" % output_filename)
class TorchLocalDataLoader(Dataset):
def __init__(self, train: bool = True,
transform: torchvision.transforms.transforms.Compose = None,
mnist: bool = False, ei339: bool = False):
assert (mnist or ei339) is True, "[Error] No Dataset is Selected"
self.transform = transform
self.mnist_train_path = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["MNIST"], DATASET_SPLITS["MNIST"]["train"])
self.mnist_test_path = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["MNIST"], DATASET_SPLITS["MNIST"]["test"])
self.ei339_train_path = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["EI339"], DATASET_SPLITS["EI339"]["train"])
self.ei339_test_path = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["EI339"], DATASET_SPLITS["EI339"]["test"])
self.combined_train_path = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["MNIST+EI339"], DATASET_SPLITS["MNIST+EI339"]["train"])
self.combined_test_path = os.path.join(
CRT_ABS_PATH, PATH_TO_DATASET["MNIST+EI339"], DATASET_SPLITS["MNIST+EI339"]["test"])
# initialize dataset: MNIST, EI339, combined
torchvision.datasets.MNIST(CRT_ABS_PATH, train=True, download=True)
torchvision.datasets.MNIST(CRT_ABS_PATH, train=False, download=True)
__ei339_process_raw_data__()
__combine_dataset__([self.mnist_train_path, self.ei339_train_path],
self.combined_train_path)
__combine_dataset__([self.mnist_test_path, self.ei339_test_path],
self.combined_test_path)
# get data from file, save to self.data, self.targets (type Tensor)
if mnist is True and ei339 is True:
data_file = self.combined_train_path if train else self.combined_test_path
self.data, self.targets = torch.load(data_file)
elif mnist is True:
data_file = self.mnist_train_path if train else self.mnist_test_path
self.data, self.targets = torch.load(data_file)
else: # ei339 is True
data_file = self.ei339_train_path if train else self.ei339_test_path
self.data, self.targets = torch.load(data_file)
def __len__(self):
return len(self.targets)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
img, target = self.data[idx], int(self.targets[idx])
# doing this so that it is consistent with all other datasets
# to return a PIL Image
img = Image.fromarray(img.numpy(), mode='L')
if self.transform is not None:
img = self.transform(img)
return img, target
if "__main__" == __name__:
# # see MNIST processed file data structure
# # Tuple[Tensor(Size([60000, 28, 28])), Tensor(Size([60000]))]
# a = torch.load(os.path.join(PATH_TO_DATASET["MNIST"], DATASET_SPLITS["MNIST"]["train"]))
# print(type(a))
# print(a[0].shape)
# print(type(a[0][0]))
# print(a[1].shape)
# print(type(a[1][0]))
# __ei339_process_raw_data__()
loader = TorchLocalDataLoader(
train=True,
transform=torchvision.transforms.Compose([
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize((0.1307,), (0.3081,)), ]),
mnist=True,
ei339=True
)
train_loader = DataLoader(dataset=loader, batch_size=30, shuffle=True)
| 2.3125
| 2
|
gym_flock/__init__.py
|
katetolstaya/gym-flock
| 19
|
12778081
|
<filename>gym_flock/__init__.py
from gym.envs.registration import register
register(
id='ExploreEnv-v0',
entry_point='gym_flock.envs.spatial:ExploreEnv',
max_episode_steps=100000,
)
register(
id='ExploreFullEnv-v0',
entry_point='gym_flock.envs.spatial:ExploreFullEnv',
max_episode_steps=100000,
)
register(
id='ExploreEnv-v1',
entry_point='gym_flock.envs.spatial:ExploreEnv',
max_episode_steps=100000,
)
register(
id='CoverageFull-v0',
entry_point='gym_flock.envs.spatial:CoverageFullEnv',
max_episode_steps=100000,
)
register(
id='CoverageARL-v1',
entry_point='gym_flock.envs.spatial:CoverageARLEnv',
max_episode_steps=100000,
)
register(
id='CoverageARL-v0',
entry_point='gym_flock.envs.spatial:CoverageARLEnv',
max_episode_steps=100000,
)
register(
id='Coverage-v0',
entry_point='gym_flock.envs.spatial:CoverageEnv',
max_episode_steps=75,
)
register(
id='Shepherding-v0',
entry_point='gym_flock.envs.shepherding:ShepherdingEnv',
max_episode_steps=1000,
)
register(
id='Flocking-v0',
entry_point='gym_flock.envs.flocking:FlockingEnv',
max_episode_steps=1000,
)
register(
id='FlockingRelative-v0',
entry_point='gym_flock.envs.flocking:FlockingRelativeEnv',
max_episode_steps=1000,
)
register(
id='FlockingLeader-v0',
entry_point='gym_flock.envs.flocking:FlockingLeaderEnv',
max_episode_steps=200,
)
register(
id='FlockingObstacle-v0',
entry_point='gym_flock.envs.flocking:FlockingObstacleEnv',
max_episode_steps=200,
)
register(
id='FormationFlying-v0',
entry_point='gym_flock.envs.formation:FormationFlyingEnv',
max_episode_steps=500,
)
register(
id='FlockingStochastic-v0',
entry_point='gym_flock.envs.flocking:FlockingStochasticEnv',
max_episode_steps=500,
)
register(
id='FlockingTwoFlocks-v0',
entry_point='gym_flock.envs.flocking:FlockingTwoFlocksEnv',
max_episode_steps=500,
)
try:
import airsim
register(
id='FlockingAirsimAccel-v0',
entry_point='gym_flock.envs.flocking:FlockingAirsimAccelEnv',
max_episode_steps=200,
)
register(
id='MappingAirsim-v0',
entry_point='gym_flock.envs.spatial:MappingAirsimEnv',
max_episode_steps=100000,
)
except ImportError:
print('AirSim not installed.')
| 1.5625
| 2
|
divide_them_all.py
|
SamTech803/Miscellaneous-Tasks
| 0
|
12778082
|
def center(s):
if s[0] == 1:
x = (s[1] + s[5]) / 2
y = (s[2] + s[6]) / 2
return [x, y]
elif s[0]==0:
return [s[2], s[3]]
else:
print("Invalid Inputs!")
n = int(input("Enter Number of Targets:"))
lst = []
if n >= 1 and n <= 100000:
for i in range(n):
j = input()
temp = j.split(" ")
temp = list(map(float, temp))
lst.append(temp)
print(lst)
try:
if len(lst) != 1:
a = []
for i in lst:
a.append(center(i))
slope = (a[1][1] - a[0][1]) / (a[1][0] - a[0][0])
flag = "Yes"
print(slope)
for i in range(len(a) - 1):
x1, x2 = a[i][0], a[i + 1][0]
y1, y2 = a[i][1], a[i + 1][1]
s = (y2 - y1) / (x2 - x1)
print(s)
if s != slope:
flag = "No"
print(flag)
else:
print("Yes")
except:
pass
| 3.3125
| 3
|
testme.py
|
linrio/WhetherOrNotMe
| 0
|
12778083
|
# -*- coding utf-8 -*-
import cv2
import os
import numpy as np
from sklearn.model_selection import train_test_split
import random
import tensorflow as tf
def read_data(img_path, image_h = 64, image_w = 64):
image_data = []
label_data = []
image = cv2.imread(img_path)
#cv2.namedWindow("Image")
#cv2.imshow("Image",image)
#cv2.waitKey(0)
h,w,_ = image.shape
longest_edge = max(h,w)
top, bottom, left, right = (0, 0, 0, 0)
dh,dw = (0,0)
if h < longest_edge:
dh = longest_edge - h
top = dh // 2
bottom = dh - top
elif w < longest_edge:
dw = longest_edge - w
left = dw // 2
right = dw - left
else:
pass
image_pad = cv2.copyMakeBorder(image, top, bottom, left, right, cv2.BORDER_CONSTANT, value=[0, 0, 0])
image = cv2.resize(image_pad, (image_h, image_w))
image_data.append(image)
label_data.append(img_path)
image_data = np.array(image_data)
train_x, test_x, train_y, test_y = train_test_split(image_data, label_data, test_size=0.05,
random_state=random.randint(0, 100))
X = tf.placeholder(tf.float32,[None, 64, 64, 3])
Y = tf.placeholder(tf.float32, [None, 2])
return Y
#img_path = '4833.jpg'
#print(read_data(img_path))
x_data = np.float32(np.random.rand(2,100))
y_data = np.dot([0.100, 0.200], x_data) + 0.300
b = tf.Variable(tf.zeros([1]), name='B')
W = tf.Variable(tf.random_uniform([1, 2], -1.0, 1.0), name='W')
y = tf.add(tf.matmul(W, x_data, name='MatMul'), b ,name='add')
loss = tf.reduce_mean(tf.square(tf.subtract(y, y_data, name='Sub'), name='Square'), name='ReduceMean')
optimizer = tf.train.GradientDescentOptimizer(0.001, name='Optimizer')
train = optimizer.minimize(loss, name='minimize')
summaries = [tf.summary.histogram('W',W), tf.summary.histogram('b', b), tf.summary.scalar('loss', loss)]
summary_op = tf.summary.merge(summaries)
print(summary_op)
| 3.0625
| 3
|
examples/urlopen.py
|
mikelolasagasti/bandit
| 4,016
|
12778084
|
''' Example dangerous usage of urllib[2] opener functions
The urllib and urllib2 opener functions and object can open http, ftp,
and file urls. Often, the ability to open file urls is overlooked leading
to code that can unexpectedly open files on the local server. This
could be used by an attacker to leak information about the server.
'''
import urllib
import urllib2
# Python 3
import urllib.request
# Six
import six
def test_urlopen():
# urllib
url = urllib.quote('file:///bin/ls')
urllib.urlopen(url, 'blah', 32)
urllib.urlretrieve('file:///bin/ls', '/bin/ls2')
opener = urllib.URLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')
opener = urllib.FancyURLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')
# urllib2
handler = urllib2.HTTPBasicAuthHandler()
handler.add_password(realm='test',
uri='http://mysite.com',
user='bob')
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
urllib2.urlopen('file:///bin/ls')
urllib2.Request('file:///bin/ls')
# Python 3
urllib.request.urlopen('file:///bin/ls')
urllib.request.urlretrieve('file:///bin/ls', '/bin/ls2')
opener = urllib.request.URLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')
opener = urllib.request.FancyURLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')
# Six
six.moves.urllib.request.urlopen('file:///bin/ls')
six.moves.urllib.request.urlretrieve('file:///bin/ls', '/bin/ls2')
opener = six.moves.urllib.request.URLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')
opener = six.moves.urllib.request.FancyURLopener()
opener.open('file:///bin/ls')
opener.retrieve('file:///bin/ls')
| 3.5
| 4
|
src/feature.py
|
junha-l/DHVR
| 28
|
12778085
|
import logging
import os
from abc import ABC
import gin
import MinkowskiEngine as ME
import numpy as np
import open3d as o3d
import torch
from src.models import get_model
class BaseFeatureExtractor(ABC):
def __init__(self):
logging.info(f"Initialize {self.__class__.__name__}")
def extract_feature(self, xyz):
raise NotImplementedError("Feature should implement extract_feature method.")
@gin.configurable()
class FCGF(BaseFeatureExtractor):
def __init__(self, voxel_size, checkpoint_path, device):
super().__init__()
self.voxel_size = voxel_size
self.device = device
assert os.path.exists(checkpoint_path), f"{checkpoint_path} not exists"
MODEL = get_model("ResUNetBN2C")
feat_model = MODEL(
1, 32, bn_momentum=0.05, conv1_kernel_size=7, normalize_feature=True
).to(device)
checkpoint = torch.load(checkpoint_path)
feat_model.load_state_dict(checkpoint["state_dict"])
self.feat_model = feat_model
self.feat_model.eval()
def freeze(self):
for param in self.feat_model.parameters():
param.requires_grad = False
def extract_feature(self, xyz, coords=None, feats=None):
if coords is None or feats is None:
# quantize input xyz.
coords, sel = ME.utils.sparse_quantize(
xyz / self.voxel_size, return_index=True
)
# make sparse tensor.
coords = ME.utils.batched_coordinates([coords])
feats = torch.ones((coords.shape[0], 1)).float()
sinput = ME.SparseTensor(
feats.to(self.device), coordinates=coords.to(self.device)
)
if isinstance(xyz, np.ndarray):
xyz = torch.from_numpy(xyz)
xyz = xyz[sel].float().to(self.device)
else:
sinput = ME.SparseTensor(coordinates=coords, features=feats)
# extract feature.
F = self.feat_model(sinput).F
return F, xyz
@gin.configurable()
class FPFH(BaseFeatureExtractor):
def __init__(self, voxel_size, device):
super().__init__(voxel_size, device)
def extract_feature(self, xyz):
voxel_size = self.voxel_size
if isinstance(xyz, torch.Tensor):
xyz = xyz.numpy()
# downsample
pcd = o3d.geometry.PointCloud()
pcd.points = o3d.utility.Vector3dVector(xyz)
pcd = pcd.voxel_down_sample(voxel_size)
# calculate normals
radius_normal = voxel_size * 2.0
pcd.estimate_normals(
o3d.geometry.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=30)
)
# calculate features
radius_feature = voxel_size * 5.0
pcd_fpfh = o3d.pipelines.registration.compute_fpfh_feature(
pcd, o3d.geometry.KDTreeSearchParamHybrid(radius=radius_feature, max_nn=100)
)
xyz = torch.from_numpy(np.asarray(pcd.points)).float()
F = torch.from_numpy(pcd_fpfh.data.copy().T).float().contiguous()
return F, xyz
MODELS = [FPFH, FCGF]
@gin.configurable()
def get_feature(name):
# Find the model class from its name
all_models = MODELS
mdict = {model.__name__: model for model in all_models}
if name not in mdict:
logging.info(f"Invalid model index. You put {name}. Options are:")
# Display a list of valid model names
for model in all_models:
logging.info("\t* {}".format(model.__name__))
return None
NetClass = mdict[name]
return NetClass
| 2.109375
| 2
|
wrex/meeting/section_kind.py
|
mikiTesf/wrex-py
| 1
|
12778086
|
<filename>wrex/meeting/section_kind.py
from enum import Enum
class SectionKind(Enum):
TREASURES = "TREASURES"
IMPROVE_IN_MINISTRY = "IMPROVE_IN_MINISTRY"
CHRISTIAN_LIVING = "CHRISTIAN_LIVING"
| 2.1875
| 2
|
src/datamodules/mouse_datamodule.py
|
Jaakik/hydra-ml
| 0
|
12778087
|
<reponame>Jaakik/hydra-ml<filename>src/datamodules/mouse_datamodule.py
from typing import Optional, Tuple
from .datasets.mouse_dataset import MouseDataset
from pytorch_lightning import LightningDataModule
from torch.utils.data import DataLoader, Dataset, random_split
from torchvision.transforms import transforms
class MouseDataModule(LightningDataModule):
"""
LightningDataModule for Caltech Mouse Social Interactions (CalMS21) Dataset.
this DataModule implements 5 key methods:
- prepare_data (things to do on 1 GPU/TPU, not on every GPU/TPU in distributed mode)
- setup (things to do on every accelerator in distributed mode)
- train_dataloader (the training dataloader)
- val_dataloader (the validation dataloader(s))
- test_dataloader (the test dataloader(s))
This allows you to share a full dataset without explaining how to download,
split, transform and process the data.
Read the docs:
https://pytorch-lightning.readthedocs.io/en/latest/extensions/datamodules.html
"""
def __init__(
self,
data_train_dir: str = "/Users/marouanejaakik/Desktop/git-explore/hydra-ml/data/mouse/train/train_features.npy",
data_test_dir: str = "/Users/marouanejaakik/Desktop/git-explore/hydra-ml/data/mouse/train/train_features.npy",
ann_dir: str = "/Users/marouanejaakik/Desktop/git-explore/hydra-ml/data/mouse/annotation /train.npy",
train_val_split: Tuple[int, int] = (55, 15),
batch_size: int = 4,
num_workers: int = 0,
pin_memory: bool = False,
):
super().__init__()
self.ann_dir = ann_dir
self.data_train_dir = data_train_dir
self.data_test_dir = data_test_dir
self.train_val_split = train_val_split
self.batch_size = batch_size
self.num_workers = num_workers
self.pin_memory = pin_memory
self.data_train: Optional[Dataset] = None
self.data_val: Optional[Dataset] = None
self.data_test: Optional[Dataset] = None
@property
def num_classes(self) -> int:
return 4
def prepare_data(self):
# add the script used to directly download the dataset from aicrowd platform
pass
def setup(self, stage: Optional[str] = None):
self.data_test = MouseDataset(self.data_test_dir)
train_set = MouseDataset(self.data_train_dir, self.ann_dir)
self.data_train, self.data_val = random_split(train_set, self.train_val_split)
def train_dataloader(self):
return DataLoader(
dataset=self.data_train,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=self.pin_memory,
shuffle=True,
)
def val_dataloader(self):
return DataLoader(
dataset=self.data_val,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=self.pin_memory,
shuffle=False,
)
def test_dataloader(self):
return DataLoader(
dataset=self.data_test,
batch_size=self.batch_size,
num_workers=self.num_workers,
pin_memory=self.pin_memory,
shuffle=False,
)
| 2.53125
| 3
|
.github/scripts/process_commit.py
|
vitaut/pytorch
| 1
|
12778088
|
<gh_stars>1-10
#!/usr/bin/env python3
"""
This script finds the merger responsible for labeling a PR by a commit SHA. It is used by the workflow in
'.github/workflows/pr-labels.yml'. If there exists no PR associated with the commit or the PR is properly labeled,
this script is a no-op.
Note: we ping the merger only, not the reviewers, as the reviewers can sometimes be external to pytorch
with no labeling responsibility, so we don't want to bother them.
This script is based on: https://github.com/pytorch/vision/blob/main/.github/process_commit.py
"""
import sys
from typing import Any, Set, Tuple
import requests
# For a PR to be properly labeled it should have release notes label and one topic label
PRIMARY_LEBEL_FILTER = "release notes:"
SECONDARY_LEBELS = {
"topic: bc_breaking",
"topic: deprecation",
"topic: new feature",
"topic: improvements",
"topic: bug fixes",
"topic: performance",
"topic: documentation",
"topic: developer feature",
"topic: non-user visible",
}
def query_pytroch(cmd: str, *, accept: str) -> Any:
response = requests.get(f"https://api.github.com/repos/pytorch/pytorch/{cmd}", headers=dict(Accept=accept))
return response.json()
def get_pr_number(commit_hash: str) -> Any:
# See https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit
data = query_pytroch(f"commits/{commit_hash}/pulls", accept="application/vnd.github.groot-preview+json")
if not data:
return None
return data[0]["number"]
def get_pr_merger_and_labels(pr_number: int) -> Tuple[str, Set[str]]:
# See https://docs.github.com/en/rest/reference/pulls#get-a-pull-request
data = query_pytroch(f"pulls/{pr_number}", accept="application/vnd.github.v3+json")
merger = data["merged_by"]["login"]
labels = {label["name"] for label in data["labels"]}
return merger, labels
if __name__ == "__main__":
commit_hash = sys.argv[1]
pr_number = get_pr_number(commit_hash)
if not pr_number:
sys.exit(0)
merger, labels = get_pr_merger_and_labels(pr_number)
response = query_pytroch("labels", accept="application/json")
response_labels = list(map(lambda x: str(x["name"]), response.json()))
primary_labels = set(filter(lambda x: x.startswith(PRIMARY_LEBEL_FILTER), response_labels))
is_properly_labeled = bool(primary_labels.intersection(labels) and SECONDARY_LEBELS.intersection(labels))
if not is_properly_labeled:
print(f"@{merger}")
| 2.171875
| 2
|
src/Core/BetaFunctions/YukawaCouplings.py
|
NuxDD/pyrate
| 7
|
12778089
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
from sympy import transpose, Rational as r
from .BetaFunction import BetaFunction
from Definitions import tensorContract
class YukawaBetaFunction(BetaFunction):
def compute(self, a,i,j, nLoops):
ret = self.Beta(a,i,j, nLoops=nLoops)
if i!=j:
ret += transpose(self.Beta(a,j,i, nLoops=nLoops))
else:
ret += transpose(ret)
return r(1,2)*ret
def fDefinitions(self):
""" Functions definition """
for i in range(self.nLoops):
self.functions.append([])
count = 1
while True:
try:
self.functions[i].append(eval(f"self.y{i+1}_{count}"))
count += 1
except:
break
def cDefinitions(self):
""" Coefficients definition """
## 1-loop
self.coefficients.append( [r(0), r(-6), r(2), r(1), r(1,2)] )
## 2-loop
self.coefficients.append( [r(-21,2), r(12), r(0), r(-3), r(49,4), r(-1,4),
r(-1,2), r(-97,3), r(11,6), r(5,3), r(1,12), r(12),
r(0), r(6), r(-12), r(10), r(6), r(5,2),
r(9), r(-1,2), r(-7,2), r(0), r(-2), r(2),
r(0), r(-2), r(0), r(-1,2), r(-2), r(-1,4),
r(-3,4), r(-1), r(-3,4)] )
######################
# 1-loop functions #
######################
def y1_1(self, a,i,j):
return tensorContract(self.C2S(b_,a),
self.y(b_,i,j))
def y1_2(self, a,i,j):
return tensorContract(self.C2F(k_,j),
self.y(a,i,k_))
def y1_3(self, a,i,j):
return tensorContract(self.y(b_,i,k_),
self.yt(a,k_,l_),
self.y(b_,l_,j))
def y1_4(self, a,i,j):
return tensorContract(self.y(a,i,k_),
self.Y2Ft(k_,j))
def y1_5(self, a,i,j):
return tensorContract(self.Y2S(b_,a),
self.y(b_,i,j))
######################
# 2-loop functions #
######################
def y2_1(self, a,i,j):
return tensorContract(self.C2S(b_,a),
self.C2S(c_,b_),
self.y(c_,i,j))
def y2_2(self, a,i,j):
return tensorContract(self.C2S(b_,a),
self.C2F(k_,j),
self.y(b_,i,k_))
def y2_3(self, a,i,j):
return tensorContract(self.C2Ft(i,k_),
self.C2F(l_,j),
self.y(a,k_,l_))
def y2_4(self, a,i,j):
return tensorContract(self.C2F(l_,j),
self.C2F(k_,l_),
self.y(a,i,k_))
def y2_5(self, a,i,j):
return tensorContract(self.C2SG(b_,a),
self.y(b_,i,j))
def y2_6(self, a,i,j):
return tensorContract(self.C2SS(b_,a),
self.y(b_,i,j))
def y2_7(self, a,i,j):
return tensorContract(self.C2SF(b_,a),
self.y(b_,i,j))
def y2_8(self, a,i,j):
return tensorContract(self.y(a,i,k_),
self.C2FG(k_,j))
def y2_9(self, a,i,j):
return tensorContract(self.y(a,i,k_),
self.C2FS(k_,j))
def y2_10(self, a,i,j):
return tensorContract(self.y(a,i,k_),
self.C2FF(k_,j))
def y2_11(self, a,i,j):
return tensorContract(self.y(b_,i,j),
self.l(c_,d_,e_,a),
self.l(b_,c_,d_,e_))
def y2_12(self, a,i,j):
return tensorContract(self.y(b_,i,k_),
self.T(A_,k_,l_),
self.G(A_,B_),
self.yt(b_,l_,m_),
self.T(B_,n_,j),
self.y(a,m_,n_))
def y2_13(self, a,i,j):
return tensorContract(self.Y2F(i,_k_),
self.Tt(A_,k_,l_),
self.G(A_,B_),
self.y(a,l_,m_),
self.T(B_,m_,j))
def y2_14(self, a,i,j):
return tensorContract(self.y(b_,i,k_),
self.yt(a,k_,l_),
self.C2S(b_,c_),
self.y(c_,l_,j))
def y2_15(self, a,i,j):
return tensorContract(self.C2S(c_,a),
self.y(b_,i,k_),
self.yt(c_,k_,l_),
self.y(b_,l_,j))
def y2_16(self, a,i,j):
return tensorContract(self.y(b_,i,k_),
self.C2F(k_,l_),
self.yt(a,l_,m_),
self.y(b_,m_,j))
def y2_17(self, a,i,j):
return tensorContract(self.y(b_,i,k_),
self.yt(a,k_,l_),
self.y(b_,l_,m_),
self.C2F(m_,j))
def y2_18(self, a,i,j):
return tensorContract(self.Y2SCF(b_,a),
self.y(b_,i,j))
def y2_19(self, a,i,j):
return tensorContract(self.Y2FCS(i,k_),
self.y(a,k_,j))
def y2_20(self, a,i,j):
return tensorContract(self.Y2FCF(i,k_),
self.y(a,k_,j))
def y2_21(self, a,i,j):
return tensorContract(self.y(a,i,k_),
self.Y2Ft(k_,l_),
self.C2F(l_,j))
def y2_22(self, a,i,j):
return tensorContract(self.C2S(b_,a),
self.Y2S(c_,b_),
self.y(c_,i,j))
def y2_23(self, a,i,j):
return tensorContract(self.l(b_,c_,d_,a),
self.y(b_,i,k_),
self.yt(c_,k_,l_),
self.y(d_,l_,j))
def y2_24(self, a,i,j):
return tensorContract(self.y(b_,i,k_),
self.yt(c_,k_,l_),
self.y(a,l_,m_),
self.yt(b_,m_,n_),
self.y(c_,n_,j))
def y2_25(self, a,i,j):
return tensorContract(self.y(b_,i,k_),
self.yt(a,k_,l_),
self.y(c_,l_,m_),
self.yt(b_,m_,n_),
self.y(c_,n_,j))
def y2_26(self, a,i,j):
return tensorContract(self.y(b_,i,k_),
self.yt(c_,k_,l_),
self.y(a,l_,m_),
self.yt(c_,m_,n_),
self.y(b_,n_,j))
def y2_27(self, a,i,j):
return tensorContract(self.Y4F(i,k_),
self.y(a,k_,j))
def y2_28(self, a,i,j):
return tensorContract(self.Y4S(b_,a),
self.y(b_,i,j))
def y2_29(self, a,i,j):
return tensorContract(self.y(b_,i,k_),
self.yt(a,k_,l_),
self.Y2F(l_,m_),
self.y(b_,m_,j))
def y2_30(self, a,i,j):
return tensorContract(self.y(a,i,k_),
self.Y2FYFt(k_,j))
def y2_31(self, a,i,j):
return tensorContract(self.Y2SYF(b_,a),
self.y(b_,i,j))
def y2_32(self, a,i,j):
return tensorContract(self.y(b_,i,k_),
self.Y2S(b_,c_),
self.yt(a,k_,l_),
self.y(c_,l_,j))
def y2_33(self, a,i,j):
return tensorContract(self.Y2FYS(i,k_),
self.y(a,k_,j))
| 2.484375
| 2
|
dynamic/hanoi_dp.py
|
goldsborough/algs4
| 17
|
12778090
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import math
import copy
from collections import namedtuple
Move = namedtuple('Move', 'source, target, disc')
def hanoi(discs):
seen = set()
def __solve(rods, depth=0):
if len(rods[2]) == discs:
return []
if rods in seen:
return None
seen.add(rods)
best = None
least_moves = math.inf
for source in range(len(rods)):
if not rods[source]:
continue
disc = rods[source][-1]
for target in range(len(rods)):
if ((source == target) or
(rods[target] and disc > rods[target][-1])):
continue
copied = []
for i, p in enumerate(rods):
if i == source:
copied.append(p[:-1])
elif i == target:
copied.append(tuple(list(p) + [disc]))
else:
copied.append(p)
moves = __solve(tuple(copied), depth + 1)
if moves is not None and len(moves) < least_moves:
best = [Move(source, target, disc)] + moves
least_moves = len(moves)
return best
return __solve(tuple([
tuple(n for n in range(discs - 1, -1, -1)),
tuple([]),
tuple([])
]))
def main():
print(len(hanoi(3)))
if __name__ == '__main__':
main()
| 3.71875
| 4
|
registrator.py
|
anbo-de/PythonClientForSpringBootAdmin
| 2
|
12778091
|
import threading
import time
import requests
import json
import logging
from requests.auth import AuthBase, HTTPBasicAuth
class Registrator(threading.Thread):
"""
class running as thread to contact the Spring Boot Admin Server
"""
jsonHeaders = {"Content-type": "application/json",
"Accept": "application/json"}
adminServerURL = None
adminServerUser = None
adminServerPasswordregistration = None
registration = None # passed dict containing relevant information
interval = None # in seconds
def __init__(self, adminServerURL, adminServerUser, adminServerPassword, registration, interval=10):
threading.Thread.__init__(self)
self.adminServerURL = adminServerURL + "/instances"
self.adminServerUser = adminServerUser
self.adminServerPassword = <PASSWORD>
self.registration = registration
self.interval = interval
logging.basicConfig(level=logging.DEBUG)
def run(self):
while True:
self.callAdminServer()
time.sleep(self.interval)
def callAdminServer(self):
try:
# prepare POST request data (None values should not been send)
requestData = {k: v for k, v in vars(
self.registration).items() if v is not None}
response = requests.post(url=self.adminServerURL, headers=self.jsonHeaders, data=json.dumps(
requestData), auth=HTTPBasicAuth(self.adminServerUser, self.adminServerPassword))
if response:
logging.debug("registration: ok on %s (%d)" %
(self.adminServerURL, response.status_code))
else:
logging.warning("registration: failed at %s with HTTP status code %d" % (
self.adminServerURL, response.status_code))
except Exception as e:
logging.warning("registration: failed at %s with exception \"%s\"" % (
self.adminServerURL, e))
| 2.65625
| 3
|
Python/JSON/readCPU.py
|
Mastermindzh/Code-examples
| 0
|
12778092
|
<reponame>Mastermindzh/Code-examples<gh_stars>0
# Python read JSON example :)
# execute: clear && python3 readCPU.py
# make sure "example.json" resides in the same directory as this file
import json
from pprint import pprint
#lees json file in (of voer script uit die het maakt)
with open('example.json') as inputFile:
data = json.load(inputFile)
print("json object array: ")
pprint(data)
print() # empty line for easier visual output
print("First core: {}".format(data["0"]))
print("Second core: {}".format(data["1"]))
print("Third core: {}".format(data["2"]))
print("Fourth core: {}".format(data["3"]))
print() # empty line for easier visual output
print("Number of items in array: {}".format(len(data)))
for x in range(0,len(data)):
print("core {} temp: {}".format(x,data[str(x)]))
| 3.453125
| 3
|
backend/player_management/models.py
|
flokain/ulti-players
| 0
|
12778093
|
<filename>backend/player_management/models.py
from datetime import date
from random import choices
from django.contrib.auth.models import User
from django.db import models
from django.contrib.auth import models as authModels
# Create your models here.
class Person(models.Model):
SEX = [
['male']*2,
['female']*2,
]
""" personal information"""
firstname = models.CharField(max_length=200)
lastname = models.CharField(max_length=200)
birthdate = models.DateField()
sex = models.CharField(max_length=5, choices=SEX)
""" contact information"""
email = models.EmailField()
zip = models.PositiveIntegerField()
""" An organization is an abstract concept for people or parties
who organize themselves for a specific purpose. Teams, clubs
and associations are the 3 different organization types in this model"""
class Organisation(models.Model):
name = models.CharField(max_length=300)
founded_on = models.DateField()
disolved_on = models.DateField() # TODO: proper english
description = models.TextField()
"""A Team is an organization owned by a Club. it consists of a list
of players which is antemporary assignment of a player to a team"""
class Team(models.Model):
pass
class Club(models.Model):
pass
class Association(models.Model):
pass
"""A player is a role of aperson in context of the sport.
it holds"""
class Player(models.Model):
person = models.ForeignKey(Person, on_delete=models.CASCADE)
number = models.PositiveIntegerField()
""" A membership connects an organization with another organozation
or peraon. It is reported by, and confirmed by a person
it my have a from and until date. missing values asumen an infinite Membership period"""
class Membership(models.Model):
valid_until = models.DateField()
valid_from = models.DateField()
reporter: User = models.ForeignKey(
authModels.User,
on_delete=models.CASCADE,
related_name="reported_%(class)ss",
related_query_name="%(class)s_reporter")
approved_by: User = models.ForeignKey(
authModels.User,
on_delete=models.CASCADE,
related_name="approved_%(class)ss",
related_query_name="%(class)s_approver")
class Meta:
abstract = True
def is_active(self) -> bool:
return self.valid_from <= date.now() <= self.valid_until
class PlayerToTeamMembership(Membership):
player = models.ForeignKey(Person, on_delete=models.CASCADE)
team = models.ForeignKey(Team, on_delete=models.CASCADE)
class Meta(Membership.Meta):
db_table = 'PlayerToTeamMembership'
"""p """
class TeamToClubTeamMembership(Membership):
team = models.ForeignKey(Team, on_delete=models.CASCADE)
club = models.ForeignKey(Club, on_delete=models.CASCADE)
class Meta(Membership.Meta):
db_table = 'TeamToClubTeamMembership'
class ClubToAssociationMembership(Membership):
team = models.ForeignKey(Club, on_delete=models.CASCADE)
association = models.ForeignKey(Association, on_delete=models.CASCADE)
class Meta(Membership.Meta):
db_table = 'ClubToAssociationMembership'
class PersonToAssociationMembership(Membership):
ASSOCIATION_ROLES = (
['President']*2,
['Vicepresident']*2,
['Treasurer']*2,
['secretary']*2,
['Member']*2,
)
person = models.ForeignKey(Person, on_delete=models.CASCADE)
association = models.ForeignKey(Association, on_delete=models.CASCADE)
role = models.CharField(max_length=300, choices=ASSOCIATION_ROLES)
class Meta(Membership.Meta):
db_table = 'PersonToAssociationMembership'
| 3.0625
| 3
|
WebBrickLibs/EventHandlers/HVAC.py
|
AndyThirtover/wb_gateway
| 0
|
12778094
|
# Copyright L.P.Klyne 2013
# Licenced under 3 clause BSD licence
# $Id: HVAC.py 3201 2009-06-15 15:21:25Z philipp.schuster $
#
# Heating, Ventilation and Air Conditioning File
#
# This file includes the following classes to create a full HVAC solution
# class HeatingVentilationAC( BaseHandler ):
# class Zone(object):
# class MultiZone(object):
# class ZoneGroup(object):
# class ZoneMaster(object):
# class HeatSourceBoiler(object):
# class HeatSourceMultipleBoiler(object):
# class HeatSourceGround(object):
# class HeatSourceSolar(object):
# class HeatSourceMultiSolar(object):
# class WeatherCompensation(object):
#
# Functional dependancies:
# - pair of input and output.xml filef for each zone
# - pair of input and output.xml file for each zonegroupe
# - pair of input and output.xml file for each heatsource
# - pair of input and output.xml file for the zone master
# - input.xml file for weather compensation
#
# Author(s): LPK, PS
#
# Last Edit: 19/07/2008
# Last Edit: 01/08/2008
# convert input temp values and settings to float. The event payloads are not always of correct data type.
#
import logging
from EventLib.Event import Event
from EventLib.Status import StatusVal
from EventLib.SyncDeferred import makeDeferred
from EventHandlers.BaseHandler import BaseHandler
# make logging global to module.
_log = None
#------------------------------------------------------------------------------------------------------------
#
# class Zone(object):
# Class to manage a single heating zone
#
# Evaluates if a zone requires to be heated by comparing current temperature with a setpoint,
# wich is set either by a scheduled event or a manual user input.
# If the zone requires heating a run event is issued, see sendRun(self)
#
# Future Improvements:
# - 'Comfort Zone' setting to specify band of temperatures rather than a single setpoint
# - PID controller to account for lead and lag of the zone hating dynamics and prevent overshoot
#
#------------------------------------------------------------------------------------------------------------
ZONE_DISABLED = 0
ZONE_ENABLED = 1
ZONE_STATE_UNKNOWN = 0
ZONE_STATE_IDLE = 1
ZONE_STATE_DEMAND = 2
ZONEGROUP_STATE_UNKNOWN = 0
ZONEGROUP_STATE_STOPPED = 1
ZONEGROUP_STATE_RUNNING = 2
ZONEMASTER_STATE_UNKNOWN = 0
ZONEMASTER_STATE_STOPPED = 1
ZONEMASTER_STATE_RUNNING = 2
# values for state and request state
HEATSOURCE_UNKNOWN = 0
HEATSOURCE_STOP = 1
HEATSOURCE_RUN = 2
WEATHER_C_STATE_HOLDOFF = 0
WEATHER_C_STATE_RUN = 1
# LPK: As these are used as comparisons against zone state
# lets make sure they stay the same values, or should they be the same literals as well.
#ACTUATOR_STATE_UNKNOWN = ZONE_STATE_UNKNOWN
#ACTUATOR_STATE_OFF = ZONE_STATE_IDLE
#ACTUATOR_STATE_ON = ZONE_STATE_DEMAND
class Zone(object):
def __init__(self, parent, zone_cfg):
self._started = False
self._parent = parent
self._zone_key = zone_cfg["key"]
self._name = zone_cfg["name"]
self._minzonetemp = 10.0
self._target = self._minzonetemp
self._schedulesetpoint = 0
self._followOccupancy = 0
self._occupied = 1
self._wcselect = 0
self._manualsetpoint = None
self._zoneTemp = None
self._enabled = ZONE_DISABLED
self._cmdsource = 'Frost'
self._status = 'Idle'
self._hysteresis = 0.5 # half a degree
# TODO PS: changed from '' to 'Idle' for UI reasons will have to consider if this is correct
self._heatsource = 'Idle'
# start zone as stopped and actuator as unknown. Then stop issued.
self._state = ZONE_STATE_IDLE
self._actuatorstate = ZONE_STATE_UNKNOWN
def zone_key(self):
return self._zone_key
def start(self):
self._started = True
self.sendStop()
self._state = ZONE_STATE_IDLE
self.sendTarget()
self.sendState()
#------------------------------------------------------------------------------------------------------------
# Functions to handle relevant incomming events
#------------------------------------------------------------------------------------------------------------
def doHandleGet(self, key, inEvent):
if key == "enabled":
self._enabled = int(inEvent.getPayload()["val"])
self.doEvaluateZone()
self.sendState()
elif key == "matStat":
self._minzonetemp = float(inEvent.getPayload()["val"])
self.doEvaluateZone()
self.sendState()
elif key == "occupancy":
self._followOccupancy = int(inEvent.getPayload()["val"])
self.doEvaluateZone()
self.sendState()
elif key == "wcselect":
self._wcselect = int(inEvent.getPayload()["val"])
self.doEvaluateZone()
self.sendState()
# elif key == "manualsetpoint":
# self._manualsetpoint = float(inEvent.getPayload()["val"])
# self.doTargetManual(self._manualsetpoint)
# self.sendState()
elif key == "schedulesetpoint":
if not self._started:
self._schedulesetpoint = float(inEvent.getPayload()["val"])
self.doEvaluateZone()
self.sendState()
else:
_log.info( "Unexpected configuration value for zone %s - key %s value %s", self._zone_key, key, inEvent )
def setOccupied(self, isOccupied):
self._occupied = isOccupied
self.doEvaluateZone()
self.sendState()
def doHandleScheduleControl(self, newSetpoint):
if self._schedulesetpoint <> newSetpoint or self._manualsetpoint is not None:
self._manualsetpoint = None
if self._schedulesetpoint <> newSetpoint:
self._schedulesetpoint = newSetpoint
self.saveScheduleSetpoint(newSetpoint)
self.doEvaluateZone()
self.sendState()
def doHandleManual(self, newSetpoint):
_log.debug( "doHandleManual %s %s", newSetpoint, self._manualsetpoint )
if newSetpoint <> self._manualsetpoint:
self._manualsetpoint = float(newSetpoint)
self.doEvaluateZone()
self.sendState()
def doHandleSensor(self, newTemp):
self._zoneTemp = newTemp
# LPK should we back this off so done on minute tick, achieves a minimum run time.
self.checkRun()
self.sendState()
def doHandleWeather(self, wNr):
if self._wcselect == wNr:
self.doEvaluateZone()
#self.checkRun()
if wNr <> 0:
self.sendState()
def doHandleHeatSource(self, heatsource):
self._heatsource = heatsource
#self.sendState()
def doHandleActuatorState(self, state):
# sets the actuator state depending on running/stopped event which is triggerd by DO event from webbrick
if state == "running":
if self._actuatorstate <> ZONE_STATE_DEMAND:
_log.debug( "zone %s - started", self._zone_key )
self._actuatorstate = ZONE_STATE_DEMAND
elif state == "stopped":
if self._actuatorstate <> ZONE_STATE_IDLE:
_log.debug( "zone %s - stopped", self._zone_key )
self._actuatorstate = ZONE_STATE_IDLE
elif state == "stop":
# we normally create this
pass
elif state == "run":
# we normally create this
pass
#------------------------------------------------------------------------------------------------------------
# Functions to evaluate actions based on internal states
#------------------------------------------------------------------------------------------------------------
def doEvaluateZone(self):
_log.debug( "doEvaluateZone self._manualsetpoint %s", self._manualsetpoint )
if self._manualsetpoint is None:
# there is no manual setpoint
if self._schedulesetpoint > self._minzonetemp:
# the scheduled setpoint is above the min zone temp
if self._enabled:
# zone is enabled
if self._parent._weathercompensation[self._wcselect]._istate == WEATHER_C_STATE_RUN:
if self._followOccupancy:
# does follow occupancy
if self._occupied:
# Home is occupied
if self._target <> self._schedulesetpoint:
self._target = self._schedulesetpoint
self._cmdsource = 'Schedule'
self.sendTarget()
self.checkRun()
elif self._target <> self._minzonetemp:
self._target = self._minzonetemp
self._cmdsource = 'Frost'
self.sendTarget()
self.checkRun()
elif self._target <> self._schedulesetpoint:
# does not follow occupancy (do not care if occupied)
self._target = self._schedulesetpoint
self._cmdsource = 'Schedule'
self.sendTarget()
self.checkRun()
elif self._schedulesetpoint < self._target:
self._target = self._schedulesetpoint
self.sendTarget()
elif self._target <> self._minzonetemp:
self._target = self._minzonetemp
self._cmdsource = 'Frost'
self.sendTarget()
self.checkRun()
elif self._target <> self._minzonetemp:
self._target = self._minzonetemp
self._cmdsource = 'Frost'
self.sendTarget()
self.checkRun()
elif self._manualsetpoint > self._minzonetemp:
_log.debug( "doEvaluateZone.2 %s %s", self._manualsetpoint, self._minzonetemp )
if self._enabled:
if self._parent._weathercompensation[self._wcselect]._istate == WEATHER_C_STATE_RUN:
_log.debug( "doEvaluateZone.3 %s %s", self._manualsetpoint, self._minzonetemp )
if self._followOccupancy:
_log.debug( "doEvaluateZone.4 %s %s", self._manualsetpoint, self._minzonetemp )
# does follow occupancy
if self._occupied:
# Need to set new target
if self._target <> self._manualsetpoint:
self._target = self._manualsetpoint
self._cmdsource = 'Manual'
self.sendTarget()
self.checkRun()
elif self._target <> self._minzonetemp:
self._target = self._minzonetemp
self._cmdsource = 'Frost'
self.sendTarget()
self.checkRun()
elif self._target <> self._manualsetpoint:
_log.debug( "doEvaluateZone.5 %s %s", self._manualsetpoint, self._minzonetemp )
# does not follow occupancy (do not care if occupied)
self._target = self._manualsetpoint
self._cmdsource = 'Manual'
self.sendTarget()
self.checkRun()
elif self._manualsetpoint < self._target:
self._target = self._manualsetpoint
self.sendTarget()
elif self._target <> self._minzonetemp:
self._target = self._minzonetemp
self._cmdsource = 'Frost'
self.sendTarget()
self.checkRun()
elif self._target <> self._minzonetemp:
self._target = self._minzonetemp
self._cmdsource = 'Frost'
self.sendTarget()
self.checkRun()
def checkRun(self):
# hold off zone start until zone temp is a little below target
if self._zoneTemp is not None and (self._target - self._hysteresis) > self._zoneTemp:
if self._parent._weathercompensation[self._wcselect]._istate == WEATHER_C_STATE_RUN:
if self._state <> ZONE_STATE_DEMAND:
self._status = 'Demand'
self._state = ZONE_STATE_DEMAND
elif self._state <> ZONE_STATE_IDLE:
# PS
# Code now accessable, no test case for checking if code works correctly!!!
# required to respond to 'hold off' state in weather compensation
self._status = 'Idle'
self._state = ZONE_STATE_IDLE
# hold off zone stop until zone temp is a little above target
elif self._zoneTemp is not None and (self._target + self._hysteresis) < self._zoneTemp:
if self._state == ZONE_STATE_DEMAND:
self._status = 'Idle'
self._state = ZONE_STATE_IDLE
# moved out of tests above to make clearer
if self._state <> self._actuatorstate:
if self._state == ZONE_STATE_DEMAND:
self.sendRun()
else:
self.sendStop()
#------------------------------------------------------------------------------------------------------------
# Functions to send/publish Events for external interaction
#------------------------------------------------------------------------------------------------------------
def saveScheduleSetpoint(self, newSetpoint):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/events/config/set",
"%s/schedulesetpoint"%self._zone_key, {'val': newSetpoint} ) )
def sendTarget(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/zone",
"%s/targetset"%self._zone_key, {'val': self._target} ) )
def sendRun(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/zone",
"%s/run"%self._zone_key, None ) )
def sendStop(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/zone",
"%s/stop"%self._zone_key, None ) )
def sendName(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zone/name",
"%s/name"%self._zone_key,
{'name': self._name} ) )
def sendState(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/zone",
"%s/state"%self._zone_key,
{'status': self._status,
'manualsetpoint': self._manualsetpoint,
'zoneTemp': self._zoneTemp,
'weather1': self._parent._weathercompensation[1]._state,
'weather2': self._parent._weathercompensation[2]._state,
'weather3': self._parent._weathercompensation[3]._state,
'weathercompensation': self._parent._weathercompensation[self._wcselect]._istate,
'cmdsource': self._cmdsource,
'enabled': self._enabled,
'wcselect': self._wcselect,
'schedulesetpoint': self._schedulesetpoint,
'state': self._state,
'actuatorstate': self._actuatorstate,
'targetsetpoint': self._target,
'zonesource': self._heatsource,
'minzonetemp': self._minzonetemp,
'occupied': self._occupied,
'zoneenabled': '',
'followoccupancy': self._followOccupancy } ) )
#
#------------------------------------------------------------------------------------------------------------
#
# class MultiZone(object):
# Class to manage cases where there is a single area in a house that has 2 heating zones
# this may be the case if there is a single room that has underfloor heating and radiators or
# this may be the case when there is a water vessle that has a top and botom coil
#
# Acts as a container for multiple zones.
#
# Note: only one zoneControl page is shown for a MultiZone, hence there are differnt kid
# templates required to set these MultiZones up
#
#------------------------------------------------------------------------------------------------------------
class MultiZone(object):
def __init__(self, parent, mz_cfg):
self._started = False
self._parent = parent
self._mz_key = mz_cfg["key"]
self._name = mz_cfg["name"]
self._zones = []
for zone_cfg in mz_cfg["parts"]:
newZone = Zone(parent, zoneCfg)
self._zones[newZone.zone_key()] = newZone
# PS:
# some of the below may be used at a later stage
# self._occupied = 1
# self._wcselect = 0
# self._manualsetpoint = None
# self._zoneTemp = None
# self._enabled = 0
# self._cmdsource = 'Frost'
# self._status = 'Idle'
# self._state = ZONE_STATE_UNKNOWN
# self._heatsource = 'Idle'
# self._actuatorstate = ZONE_STATE_UNKNOWN
def mz_key(self):
return self._mz_key
def start(self):
self._started = True
self.sendStop()
self._state = ZONE_STATE_IDLE
self.sendTarget()
self.sendState()
#------------------------------------------------------------------------------------------------------------
#
# class ZoneGroup(object):
# Class to group zones
#
# Zones are combined in a Zone Groups. If a single Zone that belongs to a Zone Groups is 'running'
# the Zone group will issue a 'run' event. If none of the zones in a zone group are running the zonegroup
# will issue a 'stop' event
# Zone groups are used to control actuators that are common to multiple zones, such as:
# - Circulation Pumps
# - Manifold Valves etc.
#
#------------------------------------------------------------------------------------------------------------
class ZoneGroup(object):
def __init__(self, parent, zg_cfg):
self._parent = parent
self._zg_key = zg_cfg["key"]
self._zones = {}
# TODO PS: changed from '' to 'Idle' for UI reasons will have to consider if this is correct
# LK - changed back let zonemaster notify us.
self._heatsource = ''
self._state = ZONEGROUP_STATE_STOPPED
self._actuatorstate = ZONEGROUP_STATE_UNKNOWN # Has to be modified to start in unkown state, same with _state
def start(self):
self.sendStop()
def zg_key(self):
return self._zg_key
#------------------------------------------------------------------------------------------------------------
# Functions to handle relevant incomming events
#------------------------------------------------------------------------------------------------------------
def doHandleGet(self, key, inEvent):
if key == "groupnumber":
src = inEvent.getSource().split("/")
idx = inEvent.getPayload()["val"]
if idx == self._zg_key:
if src[0] not in self._zones:
_log.debug("Adding to zonegroup %s zone %s", self._zg_key, src[0])
self._zones[src[0]] = ZONE_STATE_IDLE
elif src[0] in self._zones:
_log.debug("Removing from zonegroup %s zone %s", self._zg_key, src[0])
del self._zones[src[0]]
def doHandleHeatSource(self, heatsource):
# only handle if changed
if self._heatsource <> heatsource:
self._heatsource = heatsource
for zkey in self._zones:
# TODO PS: changed from '' to 'Idle' for UI reasons will have to consider if this is correct
if self._heatsource <> 'Idle':
# The line below can significantly reduce the number of events being sent
#if self._zones[zkey] == ZONE_STATE_DEMAND:
self.sendHeatSource(zkey)
else:
self.sendHeatSource(zkey)
def doState(self, key, cmd):
if key in self._zones and cmd in ["running","stop","stopped"]:
if cmd == "running":
_log.debug("zonegroup %s zone %s running", self._zg_key, key)
if self._zones[key] <> ZONE_STATE_DEMAND:
self._zones[key] = ZONE_STATE_DEMAND
self.checkRun()
elif cmd == "stop" or cmd == "stopped":
_log.debug("zonegroup %s zone %s stop", self._zg_key, key)
if self._zones[key] <> ZONE_STATE_IDLE:
self._zones[key] = ZONE_STATE_IDLE
self.checkRun()
def doHandleActuatorState(self, state):
# sets the actuator state depending on running/stopped event which is triggerd by DO event from webbrick
if state == "running":
_log.debug("zonegroup %s actuator running", self._zg_key)
self._actuatorstate = ZONEGROUP_STATE_RUNNING
elif state == "stopped":
_log.debug("zonegroup %s actuator stopped", self._zg_key)
self._actuatorstate = ZONEGROUP_STATE_STOPPED
#------------------------------------------------------------------------------------------------------------
# Functions to evaluate actions based on internal states
#------------------------------------------------------------------------------------------------------------
def checkRun(self):
if ZONE_STATE_DEMAND in self._zones.values():
self._state = ZONEGROUP_STATE_RUNNING
else:
self._state = ZONEGROUP_STATE_STOPPED
if self._state <> self._actuatorstate:
if self._state == ZONEGROUP_STATE_RUNNING:
self.sendRun()
else:
self.sendStop()
# LPK Up logging to track a problem
for k in self._zones:
zon = self._parent._zones[k]
if self._zones[k] == ZONE_STATE_DEMAND:
if zon._state <> ZONE_STATE_DEMAND or zon._actuatorstate <> ZONE_STATE_DEMAND:
_log.info("zonegroup %s thinks zone active and zone thinks not state %s actuatorstate %s", self._zg_key, zon._state, zon._actuatorstate)
else:
if zon._state <> ZONE_STATE_IDLE or zon._actuatorstate <> ZONE_STATE_IDLE:
_log.info("zonegroup %s thinks zone inactive and zone thinks otherwise state %s actuatorstate %s", self._zg_key, zon._state, zon._actuatorstate)
#------------------------------------------------------------------------------------------------------------
# Functions to send/publish Events for external interaction
#------------------------------------------------------------------------------------------------------------
def sendRun(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/zonegroup",
"zonegroup/%s/run"%self._zg_key, None ) )
def sendStop(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/zonegroup",
"zonegroup/%s/stop"%self._zg_key, None ) )
def sendHeatSource(self, zkey):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/zone/heatsource",
"%s/heatsource"%zkey, {'name': self._heatsource} ) )
#
#------------------------------------------------------------------------------------------------------------
#
# class ZoneMaster(object):
# Class to create link between zonegroups and heatsources
#
# The Zone Master uses persited data (heatsource priorities, heatsource enabled) and heatsource
# availibility infortmation to reques the run of the best heatsource on a zonegroup basis, i.e. it responds to
# zone group running events, to start a heatsource.
# The Zone Master is used to control actuators that are specific to a mapping of heatsource and zonegroup:
# - 3 way valves
# - circulation pumps
#
#------------------------------------------------------------------------------------------------------------
class ZoneMaster(object):
def __init__(self, parent):
self._parent = parent
self._availability = 0
self._enabled = ZONE_ENABLED
#self._requeststate = 0
self._state = ZONEMASTER_STATE_STOPPED # 0=Stopped; 1=Running
self._heatsourceavailibilities = {}
self._zonegrouppriorities = {}
self._bestheatsourceforzonegroup = {}
self._zonegroupstate = {}
#------------------------------------------------------------------------------------------------------------
# Functions to handle relevant incomming events
#------------------------------------------------------------------------------------------------------------
def configure( self, cfgDict ):
if cfgDict.has_key("zonegroups"):
for ZGCfg in cfgDict["zonegroups"]:
try:
newZG = {}
for HSCfg in cfgDict["heatsources"]:
#newHS = {newHS.hs_key():newHS._availability}
newZG[HSCfg["key"]] = 0
self._zonegrouppriorities[ZGCfg["key"]] = newZG
self._bestheatsourceforzonegroup[ZGCfg["key"]] = None # Not decided yet
self._zonegroupstate[ZGCfg["key"]] = ZONEGROUP_STATE_STOPPED
except:
_log.exception("ZoneMaster zonegroup error %s", ZGCfg)
if cfgDict.has_key("heatsources"):
for HSCfg in cfgDict["heatsources"]:
try:
self._heatsourceavailibilities[HSCfg["key"]] = 0
except:
_log.exception("ZoneMaster heatsource error %s", HSCfg)
def doHandleGet(self, key, inEvent):
if key == "priority":
src = inEvent.getSource().split("/")
# -------------- This is very likely to change once the xml version of zone heating is phased out, only for backwanrds compatibility done in this way!!!
zg_key = src[0][9:]
hs_key = src[1][10:]
if zg_key in self._zonegrouppriorities:
if hs_key in self._zonegrouppriorities[zg_key]:
self._zonegrouppriorities[zg_key][hs_key] = int(inEvent.getPayload()["val"])
self.doBestHeatSource()
def anyHeatSourceActive(self):
for hs_key in self._parent._heatsources:
if self._parent._heatsources[hs_key]._state == HEATSOURCE_RUN:
return True
return False
def anyZoneGroupActive(self):
return ZONEGROUP_STATE_RUNNING in self._zonegroupstate.values()
def doHandleHeatSource(self, key, inEvent):
if key == "availability":
src = inEvent.getSource().split("/")
if src[1] in self._heatsourceavailibilities:
self._heatsourceavailibilities[src[1]] = int(inEvent.getPayload()["availability"])
_log.debug("ZoneMaster doHandleHeatSource %s %s", src[1], self._heatsourceavailibilities[src[1]])
self.doBestHeatSource()
elif key == "running":
# No longer needed rely on heat sources them selves
pass
elif key == "stopped":
if not self.anyHeatSourceActive() and not self.anyZoneGroupActive() and self._state <> ZONEMASTER_STATE_STOPPED:
self._state = ZONEMASTER_STATE_STOPPED
self.sendStop()
def doHandleZoneGroup(self, key, cmd):
if key in self._zonegroupstate:
if cmd == "running":
self._zonegroupstate[key] = ZONEGROUP_STATE_RUNNING
elif cmd == "stop" or cmd == "stopped":
self._zonegroupstate[key] = ZONEGROUP_STATE_STOPPED
self.checkRun(key)
else:
_log.info("Unrecognised zonegroup %s (%s)", key, cmd )
#------------------------------------------------------------------------------------------------------------
# Functions to evaluate actions based on internal states
#------------------------------------------------------------------------------------------------------------
def doHSName(self, hs_key):
for zg_key in self._zonegroupstate:
if self._zonegroupstate[zg_key] == 1 and self._bestheatsourceforzonegroup[zg_key] == hs_key:
self.sendHSName(zg_key, self._parent._heatsources[hs_key]._name)
def doBestHeatSource(self):
for zg_key in self._zonegrouppriorities:
currenths = self._bestheatsourceforzonegroup[zg_key]
higestpriority = 0
self._bestheatsourceforzonegroup[zg_key] = None
for hs_key in self._heatsourceavailibilities:
if self._heatsourceavailibilities[hs_key] <> 0:
if self._zonegrouppriorities[zg_key][hs_key] > higestpriority:
higestpriority = self._zonegrouppriorities[zg_key][hs_key]
self._bestheatsourceforzonegroup[zg_key] = hs_key
_log.debug("doBestHeatSource %s old heatsource %s new heatsource %s",zg_key,currenths,self._bestheatsourceforzonegroup[zg_key])
# check if there has been a change
if self._bestheatsourceforzonegroup[zg_key] <> currenths:
# best heatsource for a zonegroup has changed, is the zonegroup running
if self._zonegroupstate[zg_key] == ZONEGROUP_STATE_RUNNING:
# check if current hs is None
if currenths is not None:
#stop the actuators associated with the the combo of ZG and current HS
self.sendZoneGroupHeatSourceStop(zg_key, currenths)
# is the HS used soley by this ZG and can be switched off?
inUse = 0
for zonegroup in self._bestheatsourceforzonegroup:
if self._bestheatsourceforzonegroup[zonegroup] == currenths and self._zonegroupstate[zonegroup] == ZONEGROUP_STATE_RUNNING:
_log.debug("doBestHeatSource %s heatsource in use by", currenths, zonegroup )
inUse = 1
if inUse == 0 and self._parent._heatsources[currenths]._requeststate == HEATSOURCE_RUN:
self.sendHSRequestStop(currenths)
else:
_log.debug("doBestHeatSource leave %s heatsource running", currenths )
# TODO PS: changed from '' to 'Idle' for UI reasons will have to consider if this is correct
self.sendHSName(zg_key, 'Idle')
self.checkRun(zg_key)
def checkRun(self, key):
# PS
# self._zonegroupstate is a dictonary containing the state of the zonegroups, i.e. running or stoped.
if ZONEGROUP_STATE_RUNNING in self._zonegroupstate.values():
if self._state == ZONEMASTER_STATE_STOPPED:
self._state = ZONEMASTER_STATE_RUNNING
self.sendRun()
hs_key = self._bestheatsourceforzonegroup[key]
if hs_key is None:
# PS:
# NOTE: this may be due to heatsource being no longer available (example: Solar)
# changed logging priority to debug since there being no suitable heatsource is nto nessecarily an error (eample: Solar)
_log.debug("No suitable heatsource for zonegroup %s", key)
elif self._zonegroupstate[key] == ZONEGROUP_STATE_RUNNING:
if self._parent._heatsources[hs_key]._requeststate == HEATSOURCE_STOP:
# heatsource coudl be running because another zones also requests it
self.sendHSRequestRun(hs_key)
# have to adjust linking actuators (three way valves etc)
self.sendZoneGroupHeatSourceRun(key, hs_key)
# have to send zonegorup heatsource name
self.sendHSName(key, self._parent._heatsources[hs_key]._name)
else:
# have to check if no other zonegroups needs the heatsource
inUse = 0
for zonegroup in self._bestheatsourceforzonegroup:
if self._bestheatsourceforzonegroup[zonegroup] == hs_key and self._zonegroupstate[zonegroup] == ZONEGROUP_STATE_RUNNING:
inUse = 1
if inUse == 0 and self._parent._heatsources[hs_key]._requeststate == HEATSOURCE_RUN:
self.sendHSRequestStop(hs_key)
self.sendZoneGroupHeatSourceStop(key, hs_key)
# TODO PS: changed from '' to 'Idle' for UI reasons will have to consider if this is correct
self.sendHSName(key, 'Idle')
def sanityCheck(self):
# called once a minute to perform some sanity checks.
# LPK Up logging to track a problem
for k in self._zonegroupstate:
zg = self._parent._zonegroups[k]
if self._zonegroupstate[k] == ZONEGROUP_STATE_RUNNING:
if zg._state <> ZONEGROUP_STATE_RUNNING or zg._actuatorstate <> ZONEGROUP_STATE_RUNNING:
_log.info("zonemaster thinks zonegroup %s active and zonegroup thinks not state %s actuatorstate %s", k, zg._state, zg._actuatorstate)
else:
if zg._state <> ZONEGROUP_STATE_STOPPED or zg._actuatorstate <> ZONEGROUP_STATE_STOPPED:
_log.info("zonemaster thinks zonegroup %s inactive and zonegroup thinks otherwise state %s actuatorstate %s", k, zg._state, zg._actuatorstate)
#------------------------------------------------------------------------------------------------------------
# Functions to send/publish Events for external interaction
#------------------------------------------------------------------------------------------------------------
def sendHSName(self, zg_key, hs_key):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/group/heatsource",
"zonegroup/%s/heatsource"%zg_key,
{'name': hs_key}) )
def sendRun(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/master",
"zonemaster/run", None ) )
def sendStop(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/master",
"zonemaster/stop", None ) )
def sendHSRequestRun(self, hs_key):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/requestrun"%hs_key, None ) )
def sendHSRequestStop(self, hs_key):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/requeststop"%hs_key, None ) )
def sendZoneGroupHeatSourceRun(self, zg_key, hs_key):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/master",
"zonemaster/zonegroup%s/heatsource%s/run"% (zg_key, hs_key), None ) )
def sendZoneGroupHeatSourceStop(self, zg_key, hs_key):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/master",
"zonemaster/zonegroup%s/heatsource%s/stop"% (zg_key, hs_key), None ) )
#
#------------------------------------------------------------------------------------------------------------
#
# class HeatSourceBoiler(object):
# Class to manage Oil or Gas Boiler
#
# The Boiler is the simplest heatsource, it is always available.
# When requested to run (requestrun event) it will start within a minute and issue a 'dorun' event
# every minute. When requested to stop, it wil stop withon a minute and issue a 'dostop' event.
#
#------------------------------------------------------------------------------------------------------------
class HeatSourceBoiler(object):
def __init__(self, parent, hs_cfg):
self._parent = parent
self._hs_key = hs_cfg["key"]
self._name = hs_cfg["name"]
self._type = hs_cfg["type"]
self._availability = 2
self._enabled = ZONE_ENABLED
self._requeststate = HEATSOURCE_STOP
self._state = HEATSOURCE_UNKNOWN
def hs_key(self):
return self._hs_key
def getType(self):
return self._type
#------------------------------------------------------------------------------------------------------------
# Functions to handle relevant incomming events
#------------------------------------------------------------------------------------------------------------
def doHandleGet(self, key, inEvent):
if key == "enabled":
self._enabled = int(inEvent.getPayload()["val"])
self.sendState()
def doHandleSensor(self, key, inEvent):
# Does not have input from sensors
pass
def doHandleHeatSource(self, part, cmd, inEvent):
if cmd == "requestrun":
self._requeststate = HEATSOURCE_RUN
elif cmd == "requeststop":
self._requeststate = HEATSOURCE_STOP
#------------------------------------------------------------------------------------------------------------
# Functions to evaluate actions based on internal states
#------------------------------------------------------------------------------------------------------------
def checkRun(self, minuteEvent):
# currently ignores availibility!
if self._enabled == ZONE_ENABLED:
if self._requeststate == HEATSOURCE_RUN:
self.sendRun()
self._state = HEATSOURCE_RUN
elif self._state <> HEATSOURCE_STOP:
self.sendStop()
self._state = HEATSOURCE_STOP
#------------------------------------------------------------------------------------------------------------
# Functions to send/publish Events for external interaction
#------------------------------------------------------------------------------------------------------------
def sendState(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/availability"%self._hs_key,
{'availability': self._availability
,'enabled': self._enabled
,'name': self._name} ) )
def sendRun(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/dorun"%self._hs_key, None ) )
def sendStop(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/dostop"%self._hs_key, None ) )
#
#------------------------------------------------------------------------------------------------------------
#
# class HeatSourceMultipleBoiler(object):
# Class to manage multiple Oil or Gas Boiler
#
# When requested to run (requestrun event) it will start within a minute and issue a 'dorun' event
# every minute. When requested to stop, it wil stop withon a minute and issue a 'dostop' event.
#
# It manages multiple boilers, the choice of initial boiler depends on the week of the year so as to
# rotate the usage. After a configued interval the flow and return temperature are compared and
# if too high an additional boiler will be started if they are too small then a boiler may be stopped.
# The aim is to keep the boilers working in there most efficient range.
#
#------------------------------------------------------------------------------------------------------------
class Boiler(object):
def __init__(self, bl_cfg):
self._key = bl_cfg["key"]
self._name = bl_cfg["name"]
self._active = False
def set_active(self):
self._active = True
def set_inactive(self):
self._active = False
def isactive(self):
return self._active
class HeatSourceMultipleBoiler(object):
def __init__(self, parent, hs_cfg):
self._parent = parent
self._hs_key = hs_cfg["key"]
self._name = hs_cfg["name"]
self._type = hs_cfg["type"]
# TODO may want to process this better
# such as use the boiler keys
self._boilers = []
if hs_cfg.has_key("boilers"):
for bl_cfg in hs_cfg["boilers"]:
self._boilers.append(Boiler(bl_cfg))
else:
_log.error("No boilers defined in %s", hs_cfg )
self._frmin = 5.0
self._frmax = 15.0
self._checkinterval = 5
if hs_cfg.has_key("flowreturnmin"):
self._frmin = float(hs_cfg["flowreturnmin"])
if hs_cfg.has_key("flowreturnmax"):
self._frmax = float(hs_cfg["flowreturnmax"])
if hs_cfg.has_key("checkinterval"):
self._checkinterval = float(hs_cfg["checkinterval"])
self._availability = 2
self._enabled = ZONE_ENABLED
self._requeststate = HEATSOURCE_STOP
self._state = HEATSOURCE_UNKNOWN
self._flow_temp = 0
self._return_temp = 0
self._check_counter = 0
self._active_count = 0
def hs_key(self):
return self._hs_key
def getType(self):
return self._type
#------------------------------------------------------------------------------------------------------------
# Functions to handle relevant incomming events
#------------------------------------------------------------------------------------------------------------
def doHandleGet(self, key, inEvent):
if key == "enabled":
self._enabled = int(inEvent.getPayload()["val"])
self.sendState()
def doHandleSensor(self, key, inEvent):
_log.debug("doHandleSensor %s", inEvent)
# save for later evaluation
if key == "flow":
self._flow_temp = float(inEvent.getPayload()["val"])
elif key == "return":
self._return_temp = float(inEvent.getPayload()["val"])
def doHandleHeatSource(self, part, cmd, inEvent):
if cmd == "requestrun":
if self._requeststate <> HEATSOURCE_RUN:
self._requeststate = HEATSOURCE_RUN
self._active_count = 1
self._check_counter = self._checkinterval
elif cmd == "requeststop":
self._requeststate = HEATSOURCE_STOP
elif cmd == "running":
self._boilers[int(part)-1].set_active()
if self._state <> HEATSOURCE_RUN:
self._state = HEATSOURCE_RUN
self.sendRunning()
elif cmd == "stopped":
self._boilers[int(part)-1].set_inactive()
self._state = HEATSOURCE_STOP # assume all stopped
for idx in range(0,len(self._boilers)):
if self._boilers[idx].isactive():
self._state = HEATSOURCE_RUN
if self._state == HEATSOURCE_STOP:
self.sendStopped()
#------------------------------------------------------------------------------------------------------------
# Functions to evaluate actions based on internal states
#------------------------------------------------------------------------------------------------------------
def checkRun(self, minuteEvent):
# Called once a minute
# currently ignores availibility!
# TODO re-evaluate and decide how many boilers to request
#
if self._enabled == ZONE_ENABLED:
if self._requeststate == HEATSOURCE_RUN:
if self._check_counter > 0:
self._check_counter = self._check_counter - 1
if self._check_counter <= 0:
self._check_counter = self._checkinterval
delta = self._flow_temp - self._return_temp
_log.debug("checkRun delta %s", delta)
if delta > self._frmax and self._active_count < len(self._boilers):
# start another boiler
self._active_count = self._active_count+1
elif delta > self._frmin and self._active_count > 0:
# stop boiler
self._active_count = self._active_count-1
_log.debug("checkRun %s", self._active_count)
base = minuteEvent.getPayload()["week"] % len(self._boilers)
for idx in range(0,self._active_count):
self.sendRun((base+idx)%len(self._boilers))
for idx in range(self._active_count, len(self._boilers)):
self.sendStop((base+idx)%len(self._boilers))
elif self._state <> HEATSOURCE_STOP:
for idx in range(0,len(self._boilers)):
self.sendStop(idx)
self._state = HEATSOURCE_STOP
#------------------------------------------------------------------------------------------------------------
# Functions to send/publish Events for external interaction
#------------------------------------------------------------------------------------------------------------
def sendState(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/availability"%self._hs_key,
{'availability': self._availability
,'enabled': self._enabled
,'name': self._name} ) )
def sendRun(self, idx):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/%s/dorun"%(self._hs_key,idx+1), None ) )
def sendRunning(self):
# we need to amalgamate multiple running
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/running"%self._hs_key, None ) )
def sendStop(self, idx):
if self._boilers[idx].isactive():
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/%s/dostop"%(self._hs_key,idx+1), None ) )
def sendStopped(self):
# we need to amalgamate multiple stopped
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/stopped"%self._hs_key, None ) )
#
#------------------------------------------------------------------------------------------------------------
#
# class HeatSourceGround(object):
# Class to manage Ground Source Heatpump
#
# Currently operates exactly like a Boiler, in future however it should use further inputs to adjust its availibility.
# Default availibility is 0, i.e. at them moment the zone master will not consider it as a viable heatsource!!!
#
#------------------------------------------------------------------------------------------------------------
class HeatSourceGround(object):
def __init__(self, parent, hs_cfg):
self._parent = parent
self._hs_key = hs_cfg["key"]
self._name = hs_cfg["name"]
self._type = hs_cfg["type"]
self._availability = 0
self._enabled = ZONE_DISABLED
self._requeststate = HEATSOURCE_STOP
self._state = HEATSOURCE_UNKNOWN
self._manifoldTemp = None
self._heatexTemp = None
def hs_key(self):
return self._hs_key
def getType(self):
return self._type
#------------------------------------------------------------------------------------------------------------
# Functions to handle relevant incomming events
#------------------------------------------------------------------------------------------------------------
def doHandleGet(self, key, inEvent):
if key == "enabled":
self._enabled = int(inEvent.getPayload()["val"])
self.sendState()
def doHandleSensor(self, key, inEvent):
if key == "manifold":
self._manifoldTemp = float(inEvent.getPayload()["val"])
elif key == "heatex":
self._heatexTemp = float(inEvent.getPayload()["val"])
# is Ground Source availibility really dependant on these temperatures (taken from xml) ....
# self.doAvailability()
self.sendState()
def doHandleHeatSource(self, part, cmd, inEvent):
if cmd == "requestrun":
self._requeststate = HEATSOURCE_RUN
elif cmd == "requeststop":
self._requeststate = HEATSOURCE_STOP
#------------------------------------------------------------------------------------------------------------
# Functions to evaluate actions based on internal states
#------------------------------------------------------------------------------------------------------------
def doAvailability(self):
# to be implemented once it is clear what influences the availability
pass
def checkRun(self, minuteEvent):
# currently ignors availibility!
if self._enabled == ZONE_ENABLED:
if self._requeststate == HEATSOURCE_RUN:
self.sendRun()
if self._state <> HEATSOURCE_RUN:
self._state = HEATSOURCE_RUN
elif self._state <> HEATSOURCE_STOP:
self.sendStop()
self._state = HEATSOURCE_STOP
#------------------------------------------------------------------------------------------------------------
# Functions to send/publish Events for external interaction
#------------------------------------------------------------------------------------------------------------
def sendState(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/availability"%self._hs_key,
{'availability': self._availability
,'enabled': self._enabled
,'name': self._name} ) )
def sendRun(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/dorun"%self._hs_key, None ) )
def sendStop(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/dostop"%self._hs_key, None ) )
#
#------------------------------------------------------------------------------------------------------------
#
# class HeatSourceSolar(object):
# Class to manage single solar panel elevation
#
# Uses the difference between a the panel temperature and the temperature at the heatexchagner to
# calculate its availibility.
# If the temperature difference is greater than 8C the availibility will be 1, if the panel temperature
# is also above 50C the availibility will be 2. If the temperatre differenc edrops below 4C the availibility is
# set to 0.
#
#------------------------------------------------------------------------------------------------------------
class HeatSourceSolar(object):
def __init__(self, parent, hs_cfg):
self._parent = parent
self._hs_key = hs_cfg["key"]
self._name = hs_cfg["name"]
self._type = hs_cfg["type"]
self._availability = 0
self._enabled = ZONE_ENABLED
self._requeststate = HEATSOURCE_STOP
self._state = HEATSOURCE_UNKNOWN
self._panelTemp = None
self._heatexTemp = None
def hs_key(self):
return self._hs_key
def getType(self):
return self._type
def doHandleGet(self, key, inEvent):
if key == "enabled":
self._enabled = int(inEvent.getPayload()["val"])
self.sendState()
def doHandleSensor(self, key, inEvent):
if key == "panel":
self._panelTemp = float(inEvent.getPayload()["val"])
elif key == "heatex":
self._heatexTemp = float(inEvent.getPayload()["val"])
self.doAvailability()
def doAvailability(self):
if not None in (self._panelTemp, self._heatexTemp):
oldavailability = self._availability
if (self._panelTemp - self._heatexTemp) > 8.0: # Solar will be available
if self._panelTemp > 50:
self._availability = 2
else:
self._availability = 1
elif (self._panelTemp - self._heatexTemp) < 4.0: # Solar will not be available
self._availability = 0
if oldavailability <> self._availability :
self.sendState()
def doHandleHeatSource(self, part, cmd, inEvent):
if cmd == "requestrun":
self._requeststate = HEATSOURCE_RUN
elif cmd == "requeststop":
self._requeststate = HEATSOURCE_STOP
def checkRun(self, minuteEvent):
# currently ignors availibility! (Availability should be processed by zone master, not internally??? Other thoughts??)
if self._enabled == ZONE_ENABLED:
if self._requeststate == HEATSOURCE_RUN:
self.sendRun()
if self._state <> HEATSOURCE_RUN:
self._state = HEATSOURCE_RUN
elif self._state <> HEATSOURCE_STOP:
self.sendStop()
self._state = HEATSOURCE_STOP
def sendState(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/availability"%self._hs_key,
{'availability': self._availability
,'enabled': self._enabled
,'name': self._name} ) )
def sendRun(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/dorun"%self._hs_key, None ) )
def sendStop(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/dostop"%self._hs_key, None ) )
#
#------------------------------------------------------------------------------------------------------------
#
# class SolarElevation(object):
# Class to manage a Soalr Elevation that is part of a Multi Solar Heatsource
#
# Uses the difference between a the panel temperature and the temperature at the heatexchagner to
# calculate its availibility.
# If the temperature difference is greater than 8C the availibility will be 1, if the panel temperature
# is also above 50C the availibility will be 2. If the temperatre differenc edrops below 4C the availibility is
# set to 0.
#
#------------------------------------------------------------------------------------------------------------
class SolarElevation(object):
def __init__(self, parent, el_cfg):
self._parent = parent
self._el_key = el_cfg["key"]
self._name = el_cfg["name"]
self._type = el_cfg["type"]
self._availability = 0
self._enabled = ZONE_ENABLED
self._requeststate = HEATSOURCE_STOP
self._state = HEATSOURCE_UNKNOWN
self._panelTemp = None
self._heatexTemp = None
def el_key(self):
return self._el_key
def getType(self):
return self._type
def doHandleGet(self, key, inEvent):
if key == "enabled":
self._enabled = int(inEvent.getPayload()["val"])
self.sendState()
def setElevationTemp(self, value):
self._panelTemp = float(value)
self.doAvailability()
def setHeatexTemp(self, value):
self._heatexTemp = float(value)
self.doAvailability()
def doAvailability(self):
if not None in (self._panelTemp, self._heatexTemp):
oldavailability = self._availability
if (self._panelTemp - self._heatexTemp) > 8.0: # Solar will be available
if self._panelTemp > 50:
self._availability = 2
else:
self._availability = 1
elif (self._panelTemp - self._heatexTemp) < 4.0: # Solar will not be available
self._availability = 0
if oldavailability <> self._availability :
# only used to update the UI value passes internally using lines below
self.sendState()
# function call to update availability sotred in multiSolar class
self._parent.setElevationAvailability(self._el_key, self._availability)
def doHandleHeatSource(self, part, cmd, inEvent):
if cmd == "requestrun":
self._requeststate = HEATSOURCE_RUN
elif cmd == "requeststop":
self._requeststate = HEATSOURCE_STOP
def checkRun(self, minuteEvent):
# PS:
# currently ignors availibility! (Availability should be processed by zone master, not internally??? Other thoughts??)
if self._enabled == ZONE_ENABLED:
if self._requeststate == HEATSOURCE_RUN:
self.sendRun()
if self._state <> HEATSOURCE_RUN:
self._state = HEATSOURCE_RUN
elif self._state <> HEATSOURCE_STOP:
self.sendStop()
self._state = HEATSOURCE_STOP
#------------------------------------------------------------------------------------------------------------
# Functions to send/publish Events for external interaction
#------------------------------------------------------------------------------------------------------------
def sendState(self):
self._parent.sendElevationState(self._el_key, self._availability, self._enabled, self._name)
def sendRun(self):
self._parent.sendElevationRun(self._el_key)
def sendStop(self):
self._parent.sendElevationStop(self._el_key)
#
#------------------------------------------------------------------------------------------------------------
#
# class HeatSourceMultiSolar(object):
# Class to manage multiple solar panel elevation
#
#------------------------------------------------------------------------------------------------------------
class HeatSourceMultiSolar(object):
def __init__(self, parent, hs_cfg):
self._parent = parent
self._hs_key = hs_cfg["key"]
self._name = hs_cfg["name"]
self._type = hs_cfg["type"]
self._elevations = {}
self._elevationavailabilities = {}
for el_cfg in hs_cfg["elevations"]:
self.addElevation(SolarElevation(self, el_cfg))
self._availability = 0
self._enabled = ZONE_ENABLED
self._requeststate = HEATSOURCE_STOP
self._state = HEATSOURCE_UNKNOWN
def hs_key(self):
return self._hs_key
def getType(self):
return self._type
def addElevation(self, elevation):
self._elevations[elevation.el_key()] = elevation
newElevation = {elevation.el_key():0}
self._elevationavailabilities.update(newElevation)
def setElevationAvailability(self, el_key, value):
_log.debug("setElevationAvailability %s %s", el_key, value)
if el_key in self._elevations:
self._elevationavailabilities[el_key] = int(value)
self.doAvailability()
#------------------------------------------------------------------------------------------------------------
# Functions to handle relevant incomming events
#------------------------------------------------------------------------------------------------------------
def doHandleGet(self, key, inEvent):
if key == "enabled":
self._enabled = int(inEvent.getPayload()["val"])
for el_key in self._elevations:
self._elevations[el_key].doHandleGet(key, inEvent)
self.sendState()
def doHandleSensor(self, key, inEvent):
if key == "elevation":
src = inEvent.getSource().split("/")
if src[3] in self._elevations:
self._elevations[src[3]].setElevationTemp(inEvent.getPayload()["val"])
elif key == "heatex":
for el_key in self._elevations:
self._elevations[el_key].setHeatexTemp(inEvent.getPayload()["val"])
def doHandleHeatSource(self, part, cmd, inEvent):
if cmd == "requestrun":
self._requeststate = HEATSOURCE_RUN
for elevation in self._elevationavailabilities:
if self._elevationavailabilities[elevation] == self._availability:
self._elevations[elevation]._requeststate = self._requeststate
elif cmd == "requeststop":
self._requeststate = HEATSOURCE_STOP
for elevation in self._elevationavailabilities:
self._elevations[elevation]._requeststate = self._requeststate
#------------------------------------------------------------------------------------------------------------
# Functions to evaluate actions based on internal states
#----------------------------------------------------------------------------------------------------------
def doAvailability(self):
oldavailability = self._availability
if 2 in self._elevationavailabilities.values():
self._availability = 2
elif 1 in self._elevationavailabilities.values():
self._availability = 1
else:
self._availability = 0
# reevaluate if a panel shoudl still be running if its
for elevation in self._elevationavailabilities:
if self._elevationavailabilities[elevation] == self._availability:
self._elevations[elevation]._requeststate = self._requeststate
else:
self._elevations[elevation]._requeststate = HEATSOURCE_STOP
# has overall availability changed?
if oldavailability <> self._availability :
_log.debug("doAvailability %s %s", self._hs_key, self._availability)
self.sendState()
def checkRun(self, minuteEvent):
# currently ignors availibility! (Availability should be processed by zone master, not internally??? Other thoughts??)
if self._enabled == ZONE_ENABLED:
if self._requeststate == HEATSOURCE_RUN:
self.sendRun()
if self._state <> HEATSOURCE_RUN:
self._state = HEATSOURCE_RUN
elif self._state <> HEATSOURCE_STOP:
self.sendStop()
self._state = HEATSOURCE_STOP
for elevation in self._elevations:
self._elevations[elevation].checkRun(minuteEvent)
#------------------------------------------------------------------------------------------------------------
# Functions to send/publish Events for external interaction
#------------------------------------------------------------------------------------------------------------
def sendState(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/availability"%self._hs_key,
{'availability': self._availability
,'enabled': self._enabled
,'name': self._name} ) )
def sendRun(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/dorun"%self._hs_key, None ) )
def sendStop(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/dostop"%self._hs_key, None ) )
def sendElevationState(self, el_key, availability, enabled, name):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/%s/availability"%(self._hs_key, el_key),
{'availability': availability
,'enabled': enabled
,'name': name} ) )
def sendElevationRun(self, el_key):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/%s/dorun"%(self._hs_key, el_key), None ) )
def sendElevationStop(self, el_key):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/%s/%s/dostop"%(self._hs_key, el_key), None ) )
#
#------------------------------------------------------------------------------------------------------------
#
# class WeatherCompensation(object):
# Class to provide weather compensation
#
# Evaluates the temperature trend over the past 10 minutes and sets state/istate variable
# based on high and low thresholds to indicate if heating should run or be held off
# Operates on a Zone basis, i.e. a zone will have to be configured to follow a set of weather
# compensation, with associated thresholds. Weather compensation 0 is a special case, which is
# effectifely no weather compensation
#
# Future Improvements:
# - RSS Feed interface for weather forecast
#
#------------------------------------------------------------------------------------------------------------
class WeatherCompensation(object):
def __init__(self, parent, wkey):
self._parent = parent
self._started = False
self._weatherkey = wkey
self._trend = ''
self._state = "Run"
self._istate = WEATHER_C_STATE_RUN # 0 = HoldOff; 1 = Run
self._tstate = 1 # 0=Down; 1=Level; 2=Up
self._previousTemp = None
self._currentTemp = None
self._risingThres = None
self._fallingThres = None
def start(self):
self._started = True
self.sendState()
def istate(self):
return self._istate
#------------------------------------------------------------------------------------------------------------
# Functions to handle relevant incomming events
#------------------------------------------------------------------------------------------------------------
def doHandleWeather(self, key, inEvent):
if key == "outsideTemp":
self._currentTemp = float(inEvent.getPayload()["val"])
def doHandleGet(self, key, inEvent):
if key == "rising":
self._risingThres = float(inEvent.getPayload()["val"])
self.doTrend()
self.sendState()
elif key == "falling":
self._fallingThres = float(inEvent.getPayload()["val"])
self.doTrend()
self.sendState()
#------------------------------------------------------------------------------------------------------------
# Functions to evaluate actions based on internal states
#------------------------------------------------------------------------------------------------------------
def doTrend(self):
if not None in (self._currentTemp, self._previousTemp, self._risingThres, self._fallingThres):
if self._currentTemp > self._previousTemp:
self._trend = 'Up'
if self._currentTemp > self._risingThres:
self._state = 'HoldOff'
self._istate = WEATHER_C_STATE_HOLDOFF
else:
self._state = 'Run'
self._istate = WEATHER_C_STATE_RUN
else:
self._trend = 'Down'
if self._currentTemp > self._fallingThres:
self._state = 'HoldOff'
self._istate = WEATHER_C_STATE_HOLDOFF
else:
self._state = 'Run'
self._istate = WEATHER_C_STATE_RUN
self.sendState()
#Sepcial Case to create 'global/trend' this is only created by weather 0
if self._weatherkey == '0':
if not None in (self._currentTemp, self._previousTemp):
if self._currentTemp > self._previousTemp :
self._trend = 'Up'
self._tstate = 2
elif self._currentTemp < self._previousTemp:
self._trend = 'Down'
self._tstate = 0
else:
self._trend = 'Level'
self._tstate = 1
self.sendGlobalTrend()
self._previousTemp = self._currentTemp
#------------------------------------------------------------------------------------------------------------
# Functions to send/publish Events for external interaction
#------------------------------------------------------------------------------------------------------------
#does this even need to be sent out??? atm YES indirectly triggers checkRun for Zones
def sendState(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/weather",
"weather/%s"%self._weatherkey,
{'state': self._state,
'trend': self._trend,
'istate': self._istate,
} ) )
def sendGlobalTrend(self):
self._parent.sendEvent( Event("http://id.webbrick.co.uk/zones/weather",
"weather/global",
{'trend': self._trend,
'tstate': self._tstate,
'curtemp': self._currentTemp,
'prevtemp': self._previousTemp
} ) )
#
#------------------------------------------------------------------------------------------------------------
#
# HeatingVentialtionAC class
#
#------------------------------------------------------------------------------------------------------------
class HeatingVentilationAC( BaseHandler ):
"""
This event interface is used to create a full heating, ventialtion and air conditioning solution,
that evaluates the demand of 'heating zones' and triggers actuators and heatsources to fullfil the
demand where appropriate.
The configuration for an HeatingVentilationAC entry is as follows:
<eventInterface module='EventHandlers.hvac' name='HeatingVentilationAC'>
</eventInterface>
eventtype, eventsource, event, params as as per BaseHandler.
additonally is one or more newEvent elements that define the new event to be isssued. The type and source
attributes of the newEvent element specify the event type and source.
"""
def __init__ (self, localRouter):
super(HeatingVentilationAC,self).__init__(localRouter)
global _log
_log = self._log # make global
self._subscribeTime = 30
self._zones = {}
self._zonegroups = {}
self._heatsources = {}
self._weathercompensation = [
WeatherCompensation(self, '0'),
WeatherCompensation(self, '1'),
WeatherCompensation(self, '2'),
WeatherCompensation(self, '3') ]
self._zonemaster = ZoneMaster(self)
def start(self):
self._log.debug( 'start' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/events/config/get' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/events/time/minute' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/events/time/runtime' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/zones/weather' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/events/schedule/control' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/events/zones/manual' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/zones/sensor' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/zones/zone/heatsource' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/zones/group/heatsource' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/zones/zone' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/zones/heatsource' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/zones/heatsource/sensor' )
self._localRouter.subscribe( self._subscribeTime, self, 'http://id.webbrick.co.uk/zones/zonegroup' )
def stop(self):
self._log.debug( 'stop' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/events/config/get' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/events/time/minute' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/events/time/runtime' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/zones/weather' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/events/schedule/control' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/events/zones/manual' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/zones/sensor' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/zones/zone/heatsource' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/zones/group/heatsource' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/zones/zone' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/zones/heatsource' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/zones/heatsource/sensor' )
self._localRouter.unsubscribe( self, 'http://id.webbrick.co.uk/zones/zonegroup' )
def getWCState(wc_key):
return self._weathercompensation[wc_key].istate()
def configure( self, cfgDict ):
super(HeatingVentilationAC,self).configure(cfgDict)
if cfgDict.has_key("zones"):
for zoneCfg in cfgDict["zones"]:
try:
if not zoneCfg.has_key("type") or zoneCfg["type"] == "single":
newZone = Zone(self, zoneCfg)
self._zones[newZone.zone_key()] = newZone
elif zoneCfg["type"] == "multi":
newMultiZone = MultiZone(self, zoneCfg)
self._zones[newMultiZone.mz_key()] = newMultiZone
except:
_log.exception("Error configuring zone %s", zoneCfg)
if cfgDict.has_key("zonegroups"):
for ZGCfg in cfgDict["zonegroups"]:
try:
newZG = ZoneGroup(self, ZGCfg)
self._zonegroups[newZG.zg_key()] = newZG
except:
_log.exception("Error configuring zonegroup %s", ZGCfg)
if cfgDict.has_key("heatsources"):
for HSCfg in cfgDict["heatsources"]:
try:
if HSCfg["type"] == "boiler":
newHS = HeatSourceBoiler(self, HSCfg)
self._heatsources[newHS.hs_key()] = newHS
elif HSCfg["type"] == "multiboiler":
newHS = HeatSourceMultipleBoiler(self, HSCfg)
self._heatsources[newHS.hs_key()] = newHS
elif HSCfg["type"] == "ground":
newHS = HeatSourceGround(self, HSCfg)
self._heatsources[newHS.hs_key()] = newHS
elif HSCfg["type"] == "solar":
newHS = HeatSourceSolar(self, HSCfg)
self._heatsources[newHS.hs_key()] = newHS
elif HSCfg["type"] == "multisolar":
newHS = HeatSourceMultiSolar(self, HSCfg)
self._heatsources[newHS.hs_key()] = newHS
# for ElevationCfg in HSCfg["elevations"]:
# newElevation = HeatSourceSolar(self, ElevationCfg)
# self._heatsources[newHS.hs_key()].addElevation(newElevation)
except:
_log.exception("Error configuring heatsource %s", HSCfg)
if cfgDict.has_key("zonemaster"):
try:
if int(cfgDict["zonemaster"]["active"]) == 1:
self._zonemaster.configure(cfgDict)
except:
_log.exception("Error configuring zonemaster %s", cfgDict["zonemaster"])
def doHandleRuntime( self, inEvent ):
od = inEvent.getPayload()
if int(od["elapsed"]) == 10:
for weather in self._weathercompensation:
weather.start()
elif int(od["elapsed"]) == 20:
for heatsource in self._heatsources:
self._heatsources[heatsource].sendState()
elif int(od["elapsed"]) == 30:
for zone in self._zones:
self._zones[zone].start()
self._zones[zone].sendName()
elif int(od["elapsed"]) == 45:
self.sendNumberOfZones(len(self._zones))
self.sendNumberOfZoneGroups(len(self._zonegroups))
self.sendNumberOfHeatsources(len(self._heatsources))
for zone in self._zones:
self._zones[zone].sendName()
elif int(od["elapsed"]) == 60:
self.sendNumberOfZones(len(self._zones))
self.sendNumberOfZoneGroups(len(self._zonegroups))
self.sendNumberOfHeatsources(len(self._heatsources))
for zone in self._zones:
self._zones[zone].sendName()
def doHandleMinute( self, inEvent ):
od = inEvent.getPayload()
curMin = od["minute"]
for heatsource in self._heatsources:
self._heatsources[heatsource].checkRun(inEvent)
for zone in self._zones:
self._zones[zone].checkRun()
for zonegroup in self._zonegroups:
self._zonegroups[zonegroup].checkRun()
if (curMin % 10) == 0:
for wc in self._weathercompensation:
wc.doTrend()
self._zonemaster.sanityCheck()
def doHandleGet( self, inEvent ):
src = inEvent.getSource().split("/")
if src[0] in self._zones:
if src[1] == "groupnumber":
for zonegroup in self._zonegroups:
self._zonegroups[zonegroup].doHandleGet(src[1], inEvent)
else:
self._zones[src[0]].doHandleGet(src[1], inEvent)
elif src[0] == "weather":
idx = int(src[1])
self._weathercompensation[idx].doHandleGet(src[2], inEvent)
#legacy zoneheatsource should be heatsource....
elif src[0] == "zoneheatsource":
if src[1] in self._heatsources:
self._heatsources[src[1]].doHandleGet(src[2], inEvent)
elif inEvent.getSource() == "occupants/home":
for zkey in self._zones:
self._zones[zkey].setOccupied(int(inEvent.getPayload()["val"]))
if len(src) > 2 and src[2] == "priority":
try:
self._zonemaster.doHandleGet(src[2], inEvent)
except:
_log.exception("ZoneMaster doHandleGet %s", inEvent)
def doHandleWeather( self, inEvent ):
src = inEvent.getSource().split("/")
if src[1] in ["previous","global","current","outsideTemp"]:
for wc in self._weathercompensation:
wc.doHandleWeather(src[1], inEvent)
else:
w_key = int(src[1])
for zkey in self._zones:
self._zones[zkey].doHandleWeather(w_key)
def doHandleScheduleControl( self, inEvent ):
src = inEvent.getSource().split("/")
if src[0] in self._zones:
self._zones[src[0]].doHandleScheduleControl(float(inEvent.getPayload()["val"]))
def doHandleManual( self, inEvent ):
src = inEvent.getSource().split("/")
if src[0] in self._zones:
self._zones[src[0]].doHandleManual(float(inEvent.getPayload()["val"]))
def doHandleSensor( self, inEvent ):
src = inEvent.getSource().split("/")
if src[0] in self._zones:
self._zones[src[0]].doHandleSensor(float(inEvent.getPayload()["val"]))
elif src[0] == "heatsource":
if src[1] in self._heatsources:
self._heatsources[src[1]].doHandleSensor(src[2], inEvent)
def doHandleGroupHeatSource( self, inEvent ):
src = inEvent.getSource().split("/")
if src[1] in self._zonegroups:
self._zonegroups[src[1]].doHandleHeatSource(inEvent.getPayload()["name"])
def doHandleZoneHeatSource( self, inEvent ):
src = inEvent.getSource().split("/")
if src[0] in self._zones:
self._zones[src[0]].doHandleHeatSource(inEvent.getPayload()["name"])
def doHandleZone( self, inEvent ):
src = inEvent.getSource().split("/")
if src[0] in self._zones:
# let the zone make decsion on whether valid
self._zones[src[0]].doHandleActuatorState(src[1])
for zonegroup in self._zonegroups:
self._zonegroups[zonegroup].doState(src[0], src[1])
def doHandleZoneGroup( self, inEvent ):
src = inEvent.getSource().split("/")
if src[1] in self._zonegroups:
self._zonegroups[src[1]].doHandleActuatorState(src[2])
self._zonemaster.doHandleZoneGroup(src[1], src[2])
def doHandleHeatSource(self, inEvent):
# TODO this could be tidied up more.
src = inEvent.getSource().split("/")
if len(src) > 3:
# This is a multipart heat source
# They see the distinct running, stopped and generate the overall running/stopped events
if src[1] in self._heatsources:
self._heatsources[src[1]].doHandleHeatSource(src[2], src[3], inEvent)
elif src[1] in self._heatsources:
if src[2] in ["requestrun","requeststop"]:
self._heatsources[src[1]].doHandleHeatSource(None, src[2], inEvent)
elif src[2] in ["availability", "running", "stopped"]:
self._zonemaster.doHandleHeatSource(src[2], inEvent)
def doHandleEvent( self, handler, inEvent ):
try:
if inEvent.getType() == 'http://id.webbrick.co.uk/zones/sensor':
self.doHandleSensor( inEvent )
elif inEvent.getType() == "http://id.webbrick.co.uk/events/time/minute" :
self.doHandleMinute( inEvent )
elif inEvent.getType() == "http://id.webbrick.co.uk/zones/zone" :
self.doHandleZone( inEvent )
elif inEvent.getType() == 'http://id.webbrick.co.uk/zones/weather':
self.doHandleWeather( inEvent )
elif inEvent.getType() == 'http://id.webbrick.co.uk/events/schedule/control':
self.doHandleScheduleControl( inEvent )
elif inEvent.getType() == 'http://id.webbrick.co.uk/events/zones/manual':
self.doHandleManual( inEvent )
elif inEvent.getType() == 'http://id.webbrick.co.uk/zones/heatsource':
self.doHandleHeatSource( inEvent )
elif inEvent.getType() == 'http://id.webbrick.co.uk/zones/heatsource/sensor':
self.doHandleSensor( inEvent )
elif inEvent.getType() == 'http://id.webbrick.co.uk/zones/zone/heatsource':
self.doHandleZoneHeatSource( inEvent )
elif inEvent.getType() == 'http://id.webbrick.co.uk/zones/group/heatsource':
self.doHandleGroupHeatSource( inEvent )
elif inEvent.getType() == 'http://id.webbrick.co.uk/events/config/get':
self.doHandleGet( inEvent )
elif inEvent.getType() == 'http://id.webbrick.co.uk/zones/zonegroup':
self.doHandleZoneGroup( inEvent )
elif inEvent.getType() == "http://id.webbrick.co.uk/events/time/runtime" :
self.doHandleRuntime( inEvent )
else:
# unexpected
self._log.error( "Not expecting this event %s", inEvent.getType() )
except Exception, ex:
self._log.exception(ex)
return makeDeferred(StatusVal.OK)
def sendNumberOfZones(self, count):
self.sendEvent( Event("http://id.webbrick.co.uk/zones/zone",
"zone/count",
{'val': count} ) )
def sendNumberOfZoneGroups(self, count):
self.sendEvent( Event("http://id.webbrick.co.uk/zones/zonegroup",
"zonegroup/count",
{'val': count} ) )
def sendNumberOfHeatsources(self, count):
self.sendEvent( Event("http://id.webbrick.co.uk/zones/heatsource",
"heatsource/count",
{'val': count} ) )
# $Id: HVAC.py 3201 2009-06-15 15:21:25Z philipp.schuster $
| 2.390625
| 2
|
modules/structure.py
|
zhester/hzpy
| 3
|
12778095
|
##############################################################################
#
# structure.py - Structure Data Access Class
#
##############################################################################
import struct
#=============================================================================
class structure:
"""
Cleanly implements redundant methods to access structured data in binary
streams.
"""
#=========================================================================
def __init__( self, format, fields ):
"""
Constructor
@param format Packed data format string
@param fields List (or tuple) of parsed field name strings
"""
self.format = format
self.fields = fields
self.sizeof = struct.calcsize( format )
#=========================================================================
def load_data( self, obj, data ):
"""
Load parsed data into specified object.
@param obj Dictionary (or object) to which data is loaded
@param data Binary data string from which to load
@return True if successful
"""
# Ensure data is correctly specified
if ( data is None ) or ( len( data ) != self.sizeof ):
return False
# Unpack the data string into primitive types
parts = struct.unpack( self.format, data )
# Iterate over each parsed field
for i in range( len( parts ) ):
# The field name is specified
if ( len( self.fields ) > i ) and ( self.fields[ i ] is not None ):
# Assign this data to the dictionary field
if type( obj ) is dict:
obj[ self.fields[ i ] ] = parts[ i ]
# Assign this data to a member of the field's name
else:
setattr( obj, self.fields[ i ], parts[ i ] )
# The field name is not specified
else:
# Assign this data to the dictionary field
if type( obj ) is dict:
obj[ '_anon_%d' % i ] = parts[ i ]
# Assign this data to a generated field name
else:
setattr( obj, '_anon_' + i, parts[ i ] )
# Data properly loaded
return True
#=========================================================================
def load_from_handle( self, obj, handle ):
"""
Load parsed data into specified object from a file handle.
@param obj Dictionary (or object) to which data is loaded
@param handle File handle from which data is read
@return True if successful
"""
return self.load_data( obj, handle.read( self.sizeof ) )
#=========================================================================
def pack( self, *args ):
"""
Pack data into string representation.
@param *args Data values to pack according to structure format
@return Byte string of packed data
"""
return struct.pack( self.format, *args )
#=========================================================================
def pack_from_object( self, obj ):
"""
Pack data into string representation from a compatible object.
@param obj Dictionary (or object) from which data is extracted
@return Byte string of packed data
"""
args = []
for field in self.fields:
if type( obj ) is dict:
args.append( obj[ field ] )
else:
args.append( getattr( obj, field ) )
return self.pack( *args )
#=============================================================================
def main( argv ):
""" Test script execution entry point """
print "structure module test not yet implemented. Sorry."
##test = structure()
#=============================================================================
if __name__ == "__main__":
import sys
sys.exit( main( sys.argv ) )
| 3.0625
| 3
|
python_web/config.py
|
LouisYZK/Frodo
| 123
|
12778096
|
import os
import configparser
import yaml
import ast
from pathlib import Path
HERE = Path(__file__).parent.absolute()
print(HERE)
config_dir = HERE / 'config/config.ini.model'
config = configparser.ConfigParser()
config.read(config_dir)
ACCESS_TOKEN_EXPIRE_MINUTES = config.get('security', 'access_token_expire_minutes')
JWT_ALGORITHM = config.get('security', 'jwt_algorithm')
OAUTH_REDIRECT_PATH = config.get('github', 'oauth_redirect_path')
REDIRECT_URI = config.get('github', 'redirect_uri')
CLIENT_ID = config.get('github', 'client_id')
CLIENT_SECRET = config.get('github', 'client_secret')
HOST_PATH = config.get('global', 'host_path')
WEB_PORT = config.get('port', "fastapi")
# DB_URL = os.getenv('DB_URL', config.get('database', 'db_url'))
db_host = config.get('database', 'host')
db_username = config.get('database', 'username')
db_pwd = config.get('database', 'password')
db_port = config.get('database', 'port')
db = config.get('database', 'db')
charset = config.get('database', 'charset')
DB_URL = f'mysql+pymysql://{db_username}:{db_pwd}@{db_host}:{db_port}/{db}?charset={charset}'
print(DB_URL)
REDIS_URL = os.getenv('REDIS_URL',
config.get('redis', 'redis_url'))
DEBUG = os.getenv('DEBUG', config.get('global', 'debug')).lower() \
in ('true', 'y', 'yes', '1')
WTF_CSRF_SECRET_KEY = 123
AUTH_LOGIN_ENDPOINT = 'index.login'
MEMCACHED_HOST = os.getenv('MEMCACHED_HOST',
config.get('memcached', 'memcached_host'))
MEMCACHED_PORT = config.get('memcached', 'memcached_port')
oauth_redirect_path = '/oauth'
redirect_uri = 'http://127.0.0.1:8000/oauth'
client_id = "098a2e6da880878e05da"
client_secret = "<KEY>"
REACT_PROMPT = '喜欢这篇文章吗? 记得给我留言或订阅哦'
PLOAD_FOLDER = HERE / 'static/upload'
AUTHOR = 'zhikai'
SITE_TITLE = 'Zhikai-Yang Space'
PER_PAGE = 10
GOOGLE_ANALYTICS = ''
SENTRY_DSN = ''
REQUEST_TIMEOUT = 15
SHOW_PAGEVIEW = True
PERMALINK_TYPE = 'slug' # 可选 id、slug、title
# [(Endpoint, Name, IconName, Color), ...]
# SITE_NAV_MENUS = [('blog.index', '首页'), ('blog.topics', '专题'),
# ('blog.archives', '归档'), ('blog.tags', '标签'),
# ('index.search', '搜索'), ('/page/aboutme', '关于我'),
# ('index.feed', 'RSS', 'rss', '#fc6423')]
SITE_NAV_MENUS = [('blog.index', '首页'),
('blog.activities', '动态'),
('blog.tags', '标签'),
('index.search', '搜索'),
('blog.archives', '归档'),
('/post/aboutme', '关于我')
]
BEIAN_ID = ''
JWT_SECRET = config.get('security', 'jwt_secret')
EXPIRATION_DELTA = 60 * 60
WTF_CSRF_ENABLED = False
MAIL_SERVER = 'smtp.qq.com'
MAIL_PORT = 465
MAIL_USERNAME = ''
MAIL_PASSWORD = ''
BLOG_URL = 'https://example.com'
UPLOAD_FOLDER = HERE / 'static/upload'
# Redis sentinel
REDIS_SENTINEL_SERVICE_HOST = None
REDIS_SENTINEL_SERVICE_PORT = 26379
SHOW_AUTHOR = True
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__dict__ = self
try:
with open(HERE / 'config.yaml') as f:
yaml_content = f.read()
partials = AttrDict(yaml.load(yaml_content)).partials
USE_YAML = True
except FileNotFoundError:
USE_YAML = False
partials = {}
try:
from local_settings import * # noqa
except ImportError:
pass
K_POST = 1001
K_COMMENT = 1002
ONE_MINUTE = 60
ONE_HOUR = ONE_MINUTE * 60
ONE_DAY = ONE_HOUR * 24
K_STATUS = 1003
K_ACTIVITY = 1004
CDN_DOMAIN = ''
USE_FFMPEG = False
STATIC_FILE_TYPES = ('jpg', 'png', 'webp', 'gif', 'mp4', 'css', 'js')
| 2.0625
| 2
|
examples/gurobipy/metrorail/create_xls_file.py
|
adampkehoe/ticdat
| 15
|
12778097
|
<filename>examples/gurobipy/metrorail/create_xls_file.py
# Use this file to convert the metrorail_sample_data.json data set
# to Excel format.
#
# python create_xls_file.py
#
# will create a file named Metro Rail Data.xlsx.
#
# metrorail.py will produce the same result regardless of whether
# it is run on metrorail_sample_data.json or Metro Rail Data.xlsx.
# This result is largely consistent with the heat map result from
# https://orbythebeach.wordpress.com/2018/03/01/buying-metrorail-tickets-in-miami/
# with the exception that we find only two infeasible sub-models.
from metrorail import input_schema
dat = input_schema.json.create_tic_dat("metrorail_sample_data.json")
input_schema.xls.write_file(dat, "Metro Rail Data.xlsx", allow_overwrite=True,
case_space_sheet_names=True)
| 2.828125
| 3
|
flydra_core/flydra_core/align.py
|
elhananby/flydra
| 45
|
12778098
|
<filename>flydra_core/flydra_core/align.py
from __future__ import print_function
import numpy as np
import scipy.linalg
def estsimt(X1, X2):
# from estsimt.m in MultiCameSelfCal
# ESTimate SIMilarity Transformation
#
# [s,R,T] = estsimt(X1,X2)
#
# X1,X2 ... 3xN matrices with corresponding 3D points
#
# X2 = s*R*X1 + T
# s ... scalar scale
# R ... 3x3 rotation matrix
# T ... 3x1 translation vector
#
# This is done according to the paper:
# "Least-Squares Fitting of Two 3-D Point Sets"
# by <NAME>, <NAME> and <NAME>
N = X1.shape[1]
if N != X2.shape[1]:
raise ValueError("both X1 and X2 must have same number of points")
X1cent = np.mean(X1, axis=1)
X2cent = np.mean(X2, axis=1)
# normalize coordinate systems for both set of points
x1 = X1 - X1cent[:, np.newaxis]
x2 = X2 - X2cent[:, np.newaxis]
# mutual distances
d1 = x1[:, 1:] - x1[:, :-1]
d2 = x2[:, 1:] - x2[:, :-1]
ds1 = np.sqrt(np.sum(d1 ** 2, axis=0))
ds2 = np.sqrt(np.sum(d2 ** 2, axis=0))
# print 'ds1'
# print ds1
scales = ds2 / ds1
s = np.median(scales)
# print 's', s
# undo scale
x1s = s * x1
# finding rotation
H = np.zeros((3, 3))
for i in range(N):
tmp1 = x1s[:, i, np.newaxis]
# print 'tmp1',tmp1
tmp2 = x2[np.newaxis, :, i]
# print 'tmp2',tmp2
tmp = np.dot(tmp1, tmp2)
# print 'tmp'
# print tmp
H += tmp
# print 'H'
# print H
U, S, Vt = scipy.linalg.svd(H)
# print 'U'
# print U
# print 'S'
# print S
# print 'Vt'
# print Vt
V = Vt.T
X = np.dot(V, U.T)
R = X
T = X2cent - s * np.dot(R, X1cent)
return s, R, T
def build_xform(s, R, t):
T = np.zeros((4, 4), dtype=np.float)
T[:3, :3] = R
T = s * T
T[:3, 3] = t
T[3, 3] = 1.0
return T
def align_points(s, R, T, X):
assert X.ndim == 2
assert X.shape[0] in [3, 4] # either 3D or 3D homogeneous
T = build_xform(s, R, T)
if X.shape[0] == 3:
# make homogeneous
Xnew = np.ndarray((4, X.shape[1]), dtype=X.dtype)
Xnew[3, :].fill(1)
Xnew[:3, :] = X
X = Xnew
X = np.dot(T, X)
return X
def align_pmat(s, R, T, P):
T = build_xform(s, R, T)
P = np.dot(P, scipy.linalg.inv(T))
return P
def align_pmat2(M, P):
P = np.dot(P, np.dual.inv(M))
return P
def test_align():
orig_points = np.array(
[
[3.36748406, 1.61036404, 3.55147255],
[3.58702265, 0.06676394, 3.64695356],
[0.28452026, -0.11188296, 3.78947735],
[0.25482713, 1.57828256, 3.6900808],
[3.54938525, 1.74057692, 5.13329681],
[3.6855626, 0.10335229, 5.26344841],
[0.25025385, -0.06146044, 5.57085135],
[0.20742481, 1.71073272, 5.41823085],
]
).T
ft2inch = 12.0
inch2cm = 2.54
cm2m = 0.01
ft2m = ft2inch * inch2cm * cm2m
x1, y1, z1 = 0, 0, 0
x2, y2, z2 = np.array([10, 5, 5]) * ft2m
new_points = np.array(
[
[x2, y2, z2],
[x2, y1, z2],
[x1, y1, z2],
[x1, y2, z2],
[x2, y2, z1],
[x2, y1, z1],
[x1, y1, z1],
[x1, y2, z1],
]
).T
print(orig_points.T)
print(new_points.T)
s, R, t = estsimt(orig_points, new_points)
print("s=%s" % repr(s))
print("R=%s" % repr(R.tolist()))
print("t=%s" % repr(t.tolist()))
Xnew = align_points(s, R, t, orig_points)
# measure distance between elements
mean_absdiff = np.mean(abs(Xnew[:3] - new_points).flatten())
assert mean_absdiff < 0.05
pmat_orig = np.array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]], dtype=np.float)
print("Xnew.T")
print(Xnew.T)
pmat_new = align_pmat(s, R, t, pmat_orig)
print("pmat_new")
print(pmat_new)
## print 's',s
## print 'R'
## print R
## print 'T'
## print T
| 3.171875
| 3
|
src/base/XVII/getLastEventID.py
|
sockball/logistics
| 3
|
12778099
|
<gh_stars>1-10
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# pip3 install requests PyExecJS
# linux下同时需要nodejs环境或其他JS Runtime
import execjs
import sys, getopt
import re, requests, json
def getWaybillNo ():
# sys.argv[1:]表示取索引1之后的值, 0为文件名
# c: 表示短选项 -c 后面应有参数
# code= 表示长选项 --code 后应有参数
# options为分析出的格式信息,args为不属于格式信息的剩余参数
options, args = getopt.getopt(sys.argv[1:], 'c:', ['code='])
for option, value in options:
if option in ('-c', '--code'):
return value
def getClassName ():
home = 'https://t.17track.net/zh-cn'
res = requests.get(home)
return re.findall(r'<li id="jcHeaderInput" class="(.+?)">', res.text)[0]
def main ():
js = sys.path[0] + '/track.js'
with open(js, 'r', encoding = 'utf-8') as f:
track_js = f.read()
data = '{"data":[{"num":"%s","fc":0,"sc":0}],"guid":"","timeZoneOffset":-480}' % getWaybillNo()
ctx = execjs.compile(track_js)
return ctx.call('get_cookie', data, getClassName())
print(main())
| 2.546875
| 3
|
ros_system_ws/src/vector79/scripts/light_system.py
|
DrClick/ARCRacing
| 7
|
12778100
|
<filename>ros_system_ws/src/vector79/scripts/light_system.py<gh_stars>1-10
#!/usr/bin/env python
import rospy
import socket
from std_msgs.msg import String
import serial
import time
import subprocess
#TODO: figure out how to figure out this port programatically
_serial = serial.Serial('/dev/ttyUSB0', 115200, timeout=.1)
time.sleep(5)
light_state = {
"brakes": False,
"left": False,
"right": False,
"auto": False,
"pilot": False,
"warn": False
}
def callback(data):
message_type, message = data.data.split(":")
message.strip()
if message_type == "INF":
print(message)
if message == "Entering Auto mode":
light_state["auto"] = True
write_light("GREEN_ON")
write_light("RED_OFF")
write_light("BLUE_ON")
if message == "Enable Auto mode to start pilot":
light_state["pilot"] = True
light_state["auto"] = True
write_light("GREEN_OFF")
write_light("BLUE_ON")
write_light("RED_OFF")
if message == "Manual mode triggered from TX":
light_state["auto"] = False
light_state["pilot"] = False
write_light("GREEN_ON")
write_light("RED_OFF")
write_light("BLUE_OFF")
if message_type == "WRN":
light_state["warn"] = True
write_light("RED_ON")
write_light("BLUE_OFF")
write_light("GREEN_OFF")
# write brakes
if message_type == "THR":
throttle = int(message)
if throttle < -8:
write_light("BRAKE_ON")
light_state["brakes"] = True
else:
if light_state["brakes"]:
light_state["brakes"] = False
write_light("BRAKE_OFF")
# write turn signals
if message_type == "STR":
turn = int(message)
if turn < -10:
write_light("LEFT_ON")
light_state["left"] = True
else:
if light_state["left"]:
light_state["left"] = False
write_light("LEFT_OFF")
if turn > 10:
write_light("RIGHT_ON")
light_state["right"] = True
else:
if light_state["right"]:
light_state["right"] = False
write_light("RIGHT_OFF")
def write_light(message):
light_commands = {
"LEFT_ON": "RIGHT_OFF\r\nLEFT_ON\r\n",
"LEFT_OFF": "LEFT_OFF\r\n",
"RIGHT_ON": "LEFT_OFF\r\nRIGHT_ON\r\n",
"RIGHT_OFF": "RIGHT_OFF\r\n",
"BRIGHTS": "BRIGHT_ON\r\n",
"RUNNING": "BRIGHT_OFF\r\nRUNNING_ON\r\n",
"BRAKE_ON": "BRAKE_ON\r\n",
"BRAKE_OFF": "BRAKE_OFF\r\n",
"GREEN_ON": "STATUS_ON\r\nGREEN_ON\r\n",
"GREEN_OFF": "GREEN_OFF\r\n",
"RED_ON": "STATUS_ON\r\nRED_ON\r\n",
"RED_OFF": "RED_OFF\r\n",
"BLUE_ON": "STATUS_ON\r\nBLUE_ON\r\n",
"BLUE_OFF": "BLUE_OFF\r\n",
"OFF": "LEFT_OFF\r\nRUNNING_OFF\r\nBRIGHT_OFF\r\nRIGHT_OFF\r\n"
}
if message in light_commands:
command = light_commands[message]
_serial.write(command)
def light_system():
rospy.init_node('light_system')
rospy.Subscriber('bus_comm', String, callback)
write_light("OFF")
time.sleep(1)
write_light("RUNNING")
time.sleep(1)
write_light("GREEN_ON")
time.sleep(1)
write_light("GREEN_OFF")
write_light("BLUE_ON")
time.sleep(1)
write_light("BLUE_OFF")
write_light("RED_ON")
time.sleep(1)
write_light("RED_OFF")
rospy.spin()
if __name__ == '__main__':
light_system()
| 2.25
| 2
|
data_collection/misc/manual_ir_test.py
|
PUTvision/thermo-presence
| 0
|
12778101
|
<gh_stars>0
"""
Code example from
https://makersportal.com/blog/2020/6/8/high-resolution-thermal-camera-with-raspberry-pi-and-mlx90640
"""
##########################################
# MLX90640 Thermal Camera w Raspberry Pi
# -- 2Hz Sampling with Simple Routine
##########################################
#
import time,board,busio
import numpy as np
import adafruit_mlx90640
import matplotlib.pyplot as plt
import time
i2c = busio.I2C(board.SCL, board.SDA, frequency=400000) # setup I2C
mlx = adafruit_mlx90640.MLX90640(i2c) # begin MLX90640 with I2C comm
mlx.refresh_rate = adafruit_mlx90640.RefreshRate.REFRESH_2_HZ # set refresh rate
mlx_shape = (24,32)
# setup the figure for plotting
plt.ion() # enables interactive plotting
fig,ax = plt.subplots(figsize=(12,7))
therm1 = ax.imshow(np.zeros(mlx_shape),vmin=0,vmax=60) #start plot with zeros
cbar = fig.colorbar(therm1) # setup colorbar for temps
cbar.set_label('Temperature [$^{\circ}$C]',fontsize=14) # colorbar label
frame = np.zeros((24*32,)) # setup array for storing all 768 temperatures
t_array = []
while True:
t1 = time.monotonic()
try:
mlx.getFrame(frame) # read MLX temperatures into frame var
data_array = (np.reshape(frame,mlx_shape)) # reshape to 24x32
therm1.set_data(np.fliplr(data_array)) # flip left to right
therm1.set_clim(vmin=np.min(data_array),vmax=np.max(data_array)) # set bounds
cbar.on_mappable_changed(therm1) # update colorbar range
plt.pause(0.001) # required
t_array.append(time.monotonic()-t1)
print('Sample Rate: {0:2.1f}fps'.format(len(t_array)/np.sum(t_array)))
time.sleep(0.05)
except ValueError:
continue # if error, just read again
| 2.859375
| 3
|
src/core.py
|
OdatNurd/OdatNurdTestPackage
| 1
|
12778102
|
import sublime
import sublime_plugin
import os
from ..lib import log, setup_log_panel, yte_setting, dotty
from ..lib import select_video, select_playlist, select_tag, select_timecode
from ..lib import Request, NetworkManager, stored_credentials_path, video_sort
# TODO:
# - Hit the keyword in the first few lines and 2-3 times total
# - The first few lines (how much?) are shown above the fold
# - Tags is 500 characters long, no more than 30 characters per tag
# - Tags with spaces may count as having a length + 2 because internally
# they're wrapped in quotes and that counts against the length
# - Tags should include brand related and channel tags for more relvance
# - Chapters: first must be at 0:00; there has to be at least 3 in ascending
# order, and the minimum length of a chapter is 10 seconds. There is no
# official doc on what the text should look like, but observably it seems to
# ignore leading punctuatuion, as in "00:00 - Introduction" the " - " is
# skipped (though starting it with a literal " gets it added, so there's
# that)
###----------------------------------------------------------------------------
# Our global network manager object
netManager = None
###----------------------------------------------------------------------------
# The uploads playlist doesn't appear in the list of playlists associated with
# a user because it's channel specific and not user specific. This is a sample
# dotty entry with just enough information to allow for populating that
# playlist into a chooser.
#
# The actual ID of the placeholder needs to be established at the point where
# the data is actually collected.
_upload_template = {
"id": "placeholder",
"snippet": {
"title": "Uploaded Videos"
},
"status": {
"privacyStatus": "private",
},
"contentDetails": {
# We don't know how many items are in the uploads playlist until we
# fetch the contents of it. The display code in the chooser will use
# markup to tell the user the list size is unknown in this case.
# "itemCount": 0
}
}
###----------------------------------------------------------------------------
def loaded():
"""
Initialize our plugin state on load.
"""
global netManager
for window in sublime.windows():
setup_log_panel(window)
log("PKG: YouTubeEditor loaded")
yte_setting.obj = sublime.load_settings("YouTubeEditor.sublime-settings")
yte_setting.default = {
"camtasia_folder": os.path.expanduser("~"),
"auto_show_panel": 2,
"report_output_to_view": False,
"cache_downloaded_data": True,
"encrypt_cache": False,
"client_id": "",
"client_secret": "",
"auth_uri": "",
"token_uri": ""
}
netManager = NetworkManager()
def unloaded():
"""
Clean up plugin state on unload.
"""
global netManager
if netManager is not None:
netManager.shutdown()
netManager = None
def youtube_has_credentials():
"""
Determine if there are stored credentials for a YouTube login; this
indicates that the user has previously gone through the login steps to
authorize the plugin with YouTube.
"""
return netManager.has_credentials()
def youtube_is_authorized():
"""
Determine if the plugin is currently authorized or not. This indicates not
only that the user has previously authorizaed the plugin on YouTube, but
also that a request has been made that has validated (and potentially
refreshed) our access token. If this is not the case, requests will fail.
"""
return netManager.is_authorized()
def youtube_request(request, handler, reason, callback, **kwargs):
"""
Dispatch a request to collect data from YouTube, invoking the given
callback when the request completes. The request will store the given
handler and all remaining arguments as arguments to the request dispatched.
"""
netManager.request(Request(request, handler, reason, **kwargs), callback)
###----------------------------------------------------------------------------
class YoutubeRequest():
"""
This class abstracts away the common portions of using the NetworkManager
to make requests and get responses back.
A request can be made via the `request()` method, and the result will
be automatically directed to a method in the class. The default handler
is the name of the request preceeded by an underscore.
"""
auth_req = None
auth_resp = None
run_args = None
def run(self, **kwargs):
self.run_args = kwargs
if not youtube_is_authorized():
self.request("authorize", "_internal_auth", "Authorizing")
else:
self._authorized(self.auth_req, self.auth_resp)
def _internal_auth(self, request, result):
self.auth_req = request
self.auth_resp = result
self._authorized(self.auth_req, self.auth_resp)
def request(self, request, handler=None, reason=None, **kwargs):
youtube_request(request, handler, reason, self.result, **kwargs)
def result(self, request, success, result):
attr = request.handler if success else "_error"
if not hasattr(self, attr):
raise RuntimeError("'%s' has no handler for request '%s'" % (
self.name(), request.name))
handler = getattr(self, attr)
handler(request, result)
def _error(self, request, result):
log("Err: in '{0}': {2} (code={1})", request.name,
result['error.code'], result['error.message'], display=True)
# Assume that most commands want to only enable themselves when there are
# credentials; commands that are responsible for obtaining credentials
# override this method.
def is_enabled(self, **kwargs):
return youtube_has_credentials()
###----------------------------------------------------------------------------
class YouTubeVideoSelect(YoutubeRequest):
"""
This class is a specialization on YoutubeRequest that specifically presumes
that the ultimate goal is to have the user select a video for some purpose.
The sequence of items here is:
- Gather channel information
- Gather list of playlists and prompt (or; assume uploads playlist)
- Gather contents of selected playlist
- Prompt by tags on videos in the playlist (optional based on args)
- Prompt for a video (either in the tags or in the playlist)
- Prompt for a timecode in the video (if any)
"""
# These values control what the placeholder text in the various quick
# panels will be for each of the given operations. The default value of
# None defers the placeholder to the utility functions in the utils.py
# file.
#
# video_tag_placeholder takes an optional format of {tag} to specify the
# tag that was chosen to get to this video list.
#
# timecode_placeholder takes an optional format of {title} to specify the
# title of the video the user is selecting a timecode from.
playlist_placeholder = None
tag_placeholder = None
video_placeholder = None
video_tag_placeholder = None
timecode_placeholder = None
def _authorized(self, request, result):
self.use_tags = self.run_args.get("by_tags", False)
self.use_playlists = self.run_args.get("by_playlists", False)
self.request("channel_list", reason="Get Channel Info")
def _channel_list(self, request, result):
self.channel = result[0]
# Make a fake playlist from a template; populate it with the public
# video count. The count will be adjusted later if/when the user
# browses into the Uploads playlist.
self.uploads_playlist = dotty.dotty(_upload_template)
self.uploads_playlist['contentDetails.itemCount'] = self.channel['statistics.videoCount']
self.uploads_playlist['id'] = self.channel['contentDetails.relatedPlaylists.uploads']
if self.use_playlists:
self.request("playlist_list", channel_id=self.channel['id'],
reason="Get user playlists")
else:
self.pick_playlist(self.uploads_playlist)
def _playlist_list(self, request, result):
self.playlists = video_sort(result, 'snippet.title')
self.playlists.insert(0, self.uploads_playlist)
select_playlist(self.playlists, self.pick_playlist,
placeholder=self.playlist_placeholder)
def _playlist_contents(self, request, result):
if self.use_tags:
select_tag(result, self.pick_tag, show_back=self.use_playlists,
placeholder=self.tag_placeholder)
else:
# If this is the uploads playlist, update the video count to
# include non-public videos.
if request["playlist_id"] == self.uploads_playlist['id']:
self.uploads_playlist['contentDetails.itemCount'] = len(result)
# Pass the video list as the tag_list to the lambda so it can be
# picked up and used again if the user goes back while editing the
# timecode.
videos = video_sort(result, "statistics.viewCount", int, True)
select_video(videos, lambda vid: self.select_video(vid, None, videos),
show_back=self.use_playlists,
placeholder=self.video_placeholder)
def pick_playlist(self, playlist):
if playlist != None:
self.request("playlist_contents",
reason="Get playlist contents",
playlist_id=playlist['id'])
def pick_tag(self, tag, tag_list):
if tag is not None:
if tag == "_back":
if self.use_playlists:
return select_playlist(self.playlists, self.pick_playlist,
placeholder=self.playlist_placeholder)
videos = video_sort(tag_list[tag], "statistics.viewCount", int, True)
# Use the default, unless we have a specific placeholder for this.
placeholder = (None if not self.video_tag_placeholder else
self.video_tag_placeholder.format(tag=tag))
# Video ID is in contentDetails.videoId for short results or id for
# full details (due to it being a different type of request)
select_video(videos, lambda vid: self.select_video(vid, tag, tag_list),
show_back=True, placeholder=placeholder)
def select_video(self, video, tag, tag_list):
if video is None:
return
if video['id'] == "_back":
# When using both tags and playlists, the browse order should send
# us back to tags first and from there to playlists.
if self.use_tags:
return select_tag(None, self.pick_tag, self.use_playlists, tag_list,
placeholder=self.tag_placeholder)
return select_playlist(self.playlists, self.pick_playlist,
placeholder=self.playlist_placeholder)
self.picked_video(video, tag, tag_list)
def pick_toc(self, timecode, text, video, tag, tag_list):
if timecode != None:
if timecode == "_back":
if self.use_tags:
return self.pick_tag(tag, tag_list)
else:
return select_video(tag_list, lambda vid: self.select_video(vid, None, None),
show_back=self.use_playlists,
placeholder=self.video_placeholder)
self.picked_toc(timecode, text, video)
def picked_video(self, video, tag, tag_list):
"""
Override this if you want to know what video the user selected; the
default will continue on to prompt the user for a timecode contained
in the video instead.
video represents the video chosen by the user, and tag is the tag they
chose (if prompted; otherwise it is None). The tag_list argument should
be ignored by outside code, as its value and use changes depending on
how the user is browsing around in the content.
"""
placeholder = (None if not self.timecode_placeholder else
self.timecode_placeholder.format(title=video['snippet.title']))
select_timecode(video, lambda a, b: self.pick_toc(a, b, video, tag, tag_list),
show_back=True, placeholder=placeholder)
def picked_toc(self, timecode, text, video):
"""
Override this if you want to know what timecode the user selected from
the table of contents of their selected video. You get told the
timecode string, the text of the TOC entry associated with it, and the
information on the video the user selected.
"""
pass
###----------------------------------------------------------------------------
| 1.671875
| 2
|
src/client.py
|
tomkcook/tunnel-server
| 3
|
12778103
|
#!/usr/bin/env python3
from argparse import ArgumentParser
from util import startTunnel, stopTunnel, addressesForInterface, srcAddressForDst
import logging
import signal
import requests
import socket
def main():
parser = ArgumentParser()
parser.add_argument("--bridge", type=str)
parser.add_argument("remoteIP", type=str)
args = parser.parse_args()
try:
args.remoteIP = socket.gethostbyname(args.remoteIP)
except:
logging.error("Unabled to resolve remote host: {}".format(args.remoteIP))
return
src = srcAddressForDst(args.remoteIP)
if src is None:
logging.error("Could not determine source address for destination {}.".format(args.remoteIP))
return
response = requests.get("http://{}:5000/connect".format(args.remoteIP))
if response.status_code != 200:
logging.error("Could not connect to server: HTTP {}: {}", response.status_code, response.text)
return
startTunnel(args.remoteIP, src, args.bridge)
try:
signal.pause()
except KeyboardInterrupt:
stopTunnel(args.remoteIP)
if __name__ == "__main__":
main()
| 3.046875
| 3
|
release/stubs.min/System/Windows/Forms/__init___parts/KeysConverter.py
|
tranconbv/ironpython-stubs
| 0
|
12778104
|
<reponame>tranconbv/ironpython-stubs
class KeysConverter(TypeConverter,IComparer):
"""
Provides a System.ComponentModel.TypeConverter to convert System.Windows.Forms.Keys objects to and from other representations.
KeysConverter()
"""
def Instance(self):
""" This function has been arbitrarily put into the stubs"""
return KeysConverter()
def CanConvertFrom(self,*__args):
"""
CanConvertFrom(self: KeysConverter,context: ITypeDescriptorContext,sourceType: Type) -> bool
Returns a value indicating whether this converter can convert an object in the specified source type to the native type of the converter using the specified context.
context: An System.ComponentModel.ITypeDescriptorContext that provides a format context,which can be used to extract additional information about the environment this converter is being invoked from. This parameter or properties of this parameter can be null.
sourceType: The System.Type to convert from.
Returns: true if the conversion can be performed; otherwise,false.
"""
pass
def CanConvertTo(self,*__args):
"""
CanConvertTo(self: KeysConverter,context: ITypeDescriptorContext,destinationType: Type) -> bool
Returns a value indicating whether this converter can convert an object in the specified source type to the native type of the converter using the specified context.
context: An System.ComponentModel.ITypeDescriptorContext that provides a format context,which can be used to extract additional information about the environment this converter is being invoked from. This parameter or properties of this parameter can be null.
destinationType: The System.Type to convert to.
Returns: true if the conversion can be performed; otherwise,false.
"""
pass
def Compare(self,a,b):
"""
Compare(self: KeysConverter,a: object,b: object) -> int
Compares two key values for equivalence.
a: An System.Object that represents the first key to compare.
b: An System.Object that represents the second key to compare.
Returns: An integer indicating the relationship between the two parameters.Value Type Condition A negative integer. a is less than b. zero a equals b. A positive integer. a is greater than b.
"""
pass
def ConvertFrom(self,*__args):
"""
ConvertFrom(self: KeysConverter,context: ITypeDescriptorContext,culture: CultureInfo,value: object) -> object
Converts the specified object to the converter's native type.
context: An ITypeDescriptorContext that provides a format context,which can be used to extract additional information about the environment this converter is being invoked from. This parameter or properties of this parameter can be null.
culture: A CultureInfo object to provide locale information.
value: The object to convert.
Returns: An object that represents the converted value.
"""
pass
def ConvertTo(self,*__args):
"""
ConvertTo(self: KeysConverter,context: ITypeDescriptorContext,culture: CultureInfo,value: object,destinationType: Type) -> object
Converts the specified object to the specified destination type.
context: An System.ComponentModel.ITypeDescriptorContext that provides a format context,which can be used to extract additional information about the environment this converter is being invoked from. This parameter or properties of this parameter can be null.
culture: A System.Globalization.CultureInfo to provide locale information.
value: The System.Object to convert.
destinationType: The System.Type to convert the object to.
Returns: An System.Object that represents the converted value.
"""
pass
def GetConvertFromException(self,*args):
"""
GetConvertFromException(self: TypeConverter,value: object) -> Exception
Returns an exception to throw when a conversion cannot be performed.
value: The System.Object to convert,or null if the object is not available.
Returns: An System.Exception that represents the exception to throw when a conversion cannot be performed.
"""
pass
def GetConvertToException(self,*args):
"""
GetConvertToException(self: TypeConverter,value: object,destinationType: Type) -> Exception
Returns an exception to throw when a conversion cannot be performed.
value: The System.Object to convert,or null if the object is not available.
destinationType: A System.Type that represents the type the conversion was trying to convert to.
Returns: An System.Exception that represents the exception to throw when a conversion cannot be performed.
"""
pass
def GetStandardValues(self,context=None):
"""
GetStandardValues(self: KeysConverter,context: ITypeDescriptorContext) -> StandardValuesCollection
Returns a collection of standard values for the data type that this type converter is designed for when provided with a format context.
context: An System.ComponentModel.ITypeDescriptorContext that provides a format context,which can be used to extract additional information about the environment this converter is being invoked from. This parameter or properties of this parameter can be null.
Returns: A System.ComponentModel.TypeConverter.StandardValuesCollection that holds a standard set of valid values,which can be empty if the data type does not support a standard set of values.
"""
pass
def GetStandardValuesExclusive(self,context=None):
"""
GetStandardValuesExclusive(self: KeysConverter,context: ITypeDescriptorContext) -> bool
Determines if the list of standard values returned from GetStandardValues is an exclusive list using the specified System.ComponentModel.ITypeDescriptorContext.
context: A formatter context. This object can be used to extract additional information about the environment this converter is being invoked from. This may be null,so you should always check. Also,properties on the context object may also return null.
Returns: true if the collection returned from erload:System.Windows.Forms.KeysConverter.GetStandardValues is an exhaustive list of possible values; otherwise,false if other values are possible. The default implementation for this method always returns false.
"""
pass
def GetStandardValuesSupported(self,context=None):
"""
GetStandardValuesSupported(self: KeysConverter,context: ITypeDescriptorContext) -> bool
Gets a value indicating whether this object supports a standard set of values that can be picked from a list.
context: An System.ComponentModel.ITypeDescriptorContext that provides a format context,which can be used to extract additional information about the environment this converter is being invoked from. This parameter or properties of this parameter can be null.
Returns: Always returns true.
"""
pass
def SortProperties(self,*args):
"""
SortProperties(self: TypeConverter,props: PropertyDescriptorCollection,names: Array[str]) -> PropertyDescriptorCollection
Sorts a collection of properties.
props: A System.ComponentModel.PropertyDescriptorCollection that has the properties to sort.
names: An array of names in the order you want the properties to appear in the collection.
Returns: A System.ComponentModel.PropertyDescriptorCollection that contains the sorted properties.
"""
pass
def __cmp__(self,*args):
""" x.__cmp__(y) <==> cmp(x,y) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
| 3.015625
| 3
|
erasmus/protocols.py
|
gpontesss/Erasmus
| 10
|
12778105
|
from __future__ import annotations
from typing import Protocol
from .data import Passage, SearchResults, VerseRange
class Bible(Protocol):
command: str
name: str
abbr: str
service: str
service_version: str
rtl: bool | None
books: int
class Service(Protocol):
async def get_passage(self, bible: Bible, verses: VerseRange, /) -> Passage:
...
async def search(
self,
bible: Bible,
terms: list[str],
/,
*,
limit: int = ...,
offset: int = ...,
) -> SearchResults:
...
| 2.921875
| 3
|
tensorflow/ac_to_tf.py
|
nimish15shah/AC_GPU_profiling
| 0
|
12778106
|
## This file converts ac to tensorflow graph
## It takes as input a pickle file which contains the AC as a dictionary
## Each value in the dictionary is node_obj class object from Nimish's graph_analysis project
import tensorflow as tf
import pickle
import networkx as nx
import random
import numpy as np
def load_ac(ac):
fname= './gr_files/' + ac + '.p'
with open(fname, 'rb') as fp:
graph= pickle.load(fp, encoding='latin1')
fp.close()
fname= './gr_nx_files/' + ac + '_gr.p_nx5ALL_0.00.05.03333_33322_332222_3222222_22222222'
with open(fname, 'rb') as fp:
graph_nx= pickle.load(fp)
return graph, graph_nx
def ac_to_tf(ac, batch_size):
"""
Reads pickled ac, converts it to tf graph
"""
print('Constructing TF graph from AC')
graph, graph_nx= load_ac(ac)
#-- Convert ac to tf
tf_dict= {}
root= None
num_ops= 0
print("total node in AC:", graph_nx.number_of_nodes())
weight_cnt= 0
ind_cnt= 0
for node in nx.topological_sort(graph_nx):
# print(node, end=',')
obj= graph[node]
if obj.is_leaf():
assert len(list(graph_nx.in_edges(node))) == 0
# if len(list(graph_nx.in_edges(node))) == 0: # Leaf node
# curr= tf.Variable(tf.random_normal([batch_size,batch_size]), name= 'in')
# curr= tf.Variable(tf.convert_to_tensor([[[random.random()]*batch_size]*batch_size]), name= 'in')
# curr= tf.Variable(tf.convert_to_tensor(np.full((batch_size, batch_size), random.random())), name= 'in')
leaf_type= None
IND= 0
WEIGHT= 1
siblings= set([ch for parent in obj.parent_key_list for ch in graph[parent].child_key_list])
siblings= siblings - set([node])
siblings_WEIGHT= False
siblings_INDICATOR= False
for sib in siblings:
if graph[sib].is_weight():
siblings_WEIGHT= True
if graph[sib].is_indicator():
siblings_INDICATOR= True
if siblings_INDICATOR == True and siblings_WEIGHT == True:
break
# assert not (siblings_WEIGHT == True and siblings_INDICATOR == True)
if siblings_WEIGHT == True:
leaf_type= IND
elif siblings_INDICATOR == True:
leaf_type= WEIGHT
if leaf_type== None:
if len(obj.parent_key_list) == 1:
leaf_type= WEIGHT
else:
leaf_type= IND
if leaf_type == IND:
ind_cnt += 1
obj.leaf_type= obj.LEAF_TYPE_INDICATOR
curr= tf.Variable(tf.convert_to_tensor(np.full((1, batch_size), random.random(), dtype= np.float32)), name= 'ind')
elif leaf_type== WEIGHT:
weight_cnt += 1
obj.leaf_type= obj.LEAF_TYPE_WEIGHT
curr= tf.constant([random.random()], name= 'weight')
else:
assert 0
else: # sum or product
# assert len(obj.child_key_list) == 2, "AC should be binary"
# ch_0= tf_dict[obj.child_key_list[0]]
# ch_1= tf_dict[obj.child_key_list[1]]
#
# if obj.operation_type == 1:
# curr= tf.multiply(ch_0, ch_1, 'mul')
# elif obj.operation_type == 2:
# curr= tf.add(ch_0, ch_1, 'mul')
# else:
# assert 0
#
# if len(obj.parent_key_list) == 0:
# assert root== None
# root= node
# tf_root= curr
children= list(graph_nx.predecessors(node))
parents= list(graph_nx.successors(node))
ch_0= tf_dict[children[0]]
ch_1= tf_dict[children[1]]
if random.randint(0,2):
curr= tf.multiply(ch_0, ch_1, 'mul')
else:
curr= tf.add(ch_0, ch_1, 'add')
if len(parents) == 0:
assert root == None
root= node
tf_root= curr
num_ops += 1
tf_dict[node]= curr
print("Indicator cnt, Weight Cnt:", ind_cnt, weight_cnt)
assert root != None
assert len(tf_dict) == len(graph_nx)
return tf_root, num_ops
| 2.765625
| 3
|
deepcave/runs/converters/bohb.py
|
PhMueller/DeepCAVE
| 0
|
12778107
|
import os
import json
import glob
import pandas as pd
from typing import Dict, Type, Any
import ConfigSpace
from deepcave.runs.run import Status
from deepcave.runs.converters.converter import Converter
from deepcave.runs.run import Run
from deepcave.runs.objective import Objective
from deepcave.utils.hash import file_to_hash
class BOHB(Converter):
@staticmethod
def name() -> str:
return "BOHB"
def get_run_id(self, working_dir, run_name) -> str:
"""
The id from the files in the current working_dir/run_name/*. For example, history.json could be read and hashed.
Idea behind: If id changed, then we have to update cached trials.
"""
# Use hash of history.json as id
return file_to_hash(os.path.join(working_dir, run_name, "results.json"))
def get_run(self, working_dir, run_name) -> Run:
"""
Based on working_dir/run_name/*, return a new trials object.
"""
base = os.path.join(working_dir, run_name)
# Read configspace
from ConfigSpace.read_and_write import json as cs_json
with open(os.path.join(base, 'configspace.json'), 'r') as f:
configspace = cs_json.read(f.read())
# Read objectives
# We have to define it ourselves, because we don't know the type of the objective
# Only lock lower
objective = Objective("Cost", lower=0)
run = Run(
configspace=configspace,
objectives=objective,
meta={}
)
from hpbandster.core.result import logged_results_to_HBS_result
bohb = logged_results_to_HBS_result(base)
first_starttime = None
for bohb_run in bohb.get_all_runs():
times = bohb_run.time_stamps
starttime = times["started"]
endtime = times["finished"]
if first_starttime is None:
first_starttime = starttime
starttime = starttime - first_starttime
endtime = endtime - first_starttime
cost = bohb_run.loss
budget = bohb_run.budget
config = bohb_run.info["config"]
# Convert str to dict
config = json.loads(config)
origin = None
additional = {}
status = bohb_run.info["state"]
# QUEUED, RUNNING, CRASHED, REVIEW, TERMINATED, COMPLETED, SUCCESS
if "SUCCESS" in status or "TERMINATED" in status or "COMPLETED" in status:
status = Status.SUCCESS
elif "RUNNING" in status or "QUEUED" in status or "REVIEW" in status:
status = Status.RUNNING
else:
status = Status.CRASHED
if status != Status.SUCCESS:
# We don't want cost included which are failed
cost = None
run.add(
costs=[cost], # Having only single objective here
config=config,
budget=budget,
start_time=starttime,
end_time=endtime,
status=status,
origin=origin,
additional=additional,
)
# Save for sanity check
# run.save(os.path.join(base, "run"))
return run
| 2.296875
| 2
|
utils.py
|
minsukchang/exponential_family_embeddings
| 86
|
12778108
|
<filename>utils.py
# The functions below are copied from this tutorial:
# https://github.com/tensorflow/tensorflow/blob/r0.11/tensorflow/examples/tutorials/word2vec/word2vec_basic.py
#
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import collections
import matplotlib.pyplot as plt
import os
import tensorflow as tf
import zipfile
from six.moves import urllib
def maybe_download(url, filename, expected_bytes):
"""Download a file if not present, and make sure it's the right size."""
if not os.path.exists(filename):
filename, _ = urllib.request.urlretrieve(url + filename, filename)
statinfo = os.stat(filename)
if statinfo.st_size == expected_bytes:
print('Found and verified', filename)
else:
print(statinfo.st_size)
raise Exception(
'Failed to verify ' + filename + '. Can you get to it with a browser?')
return filename
def read_data(filename):
"""Extract the first file enclosed in a zip file as a list of words"""
with zipfile.ZipFile(filename) as f:
data = tf.compat.as_str(f.read(f.namelist()[0])).split()
return data
def plot_with_labels(low_dim_embs, labels, fname):
plt.figure(figsize=(28, 28))
for i, label in enumerate(labels):
x, y = low_dim_embs[i, :]
plt.scatter(x, y)
plt.annotate(label,
xy=(x, y),
xytext=(5, 2),
textcoords='offset points',
ha='right',
va='bottom')
plt.savefig(fname)
plt.close()
def variable_summaries(summary_name, var):
with tf.name_scope(summary_name):
mean = tf.reduce_mean(var)
tf.summary.scalar('mean', mean)
with tf.name_scope('stddev'):
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
tf.summary.scalar('stddev', stddev)
tf.summary.scalar('max', tf.reduce_max(var))
tf.summary.scalar('min', tf.reduce_min(var))
| 2.6875
| 3
|
tests/io/polling/serial/endpoints_console.py
|
ethanjli/phyllo-python
| 0
|
12778109
|
<reponame>ethanjli/phyllo-python<filename>tests/io/polling/serial/endpoints_console.py
"""Expose an example endpoints text console on the polling I/O implementation."""
# Builtins
import logging
import time
# Packages
from phylline.links.clocked import ClockedLink, LinkClockRequest
from phylline.util.timing import TimeoutTimer
from phyllo.io.cli.args.args import parse_args
from phyllo.io.cli.args.protocol import (
args_application_stack, args_transport_logical_substack, group_protocol_stack
)
from phyllo.io.cli.stdin import input_literal
from phyllo.io.polling.serial import PollingSerialAdapter, connect_serial_retry
from phyllo.protocol.application.endpoints import EndpointHandler, SingleEndpointHandler
from phyllo.protocol.communication import AutomaticStack, CommunicationLinkData
from phyllo.protocol.communication import DATA_TYPES
from phyllo.protocol.stacks import make_preset_stack
from phyllo.util.logging import config_logging
from tests.io.polling.serial.console import ReceiveLoggerLink
from tests.io.polling.serial.receiver import receive_once_blocking, run_once
config_logging()
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class PingPongHandler(EndpointHandler, ClockedLink):
"""Handler for playing ping-pong in the background."""
def __init__(self, ping_name=b'ping', pong_name=b'pong'):
"""Initialize members."""
self.ping_name = ping_name
self.pong_name = pong_name
self.ping_counter = 0
self.active = False
super().__init__()
self.pong_timer = TimeoutTimer(clock=self.clock)
def make_ping(self):
"""Make a ping."""
self.ping_counter += 1
logger.info('Ping-pong sending ping {}'.format(self.ping_counter))
return (self.ping_name, self.ping_counter)
# Implement DataUnitLink
def on_internal_data(self, parse_result, event):
"""Implement DataUnitLink.on_internal_data."""
self.update_clock_time(event)
if self.pong_timer.timed_out:
self.pong_timer.reset_and_stop()
self.directly_to_send_data(self.make_ping())
# Implement EndpointHandler
def match_receiver(self, endpoint_name):
"""Implement EndpointHandler.match_receiver."""
return endpoint_name == self.pong_name
def on_receiver_event(self, endpoint_name, data, source_event):
"""Implement EndpointHandler.on_receiver_event."""
if endpoint_name == self.pong_name:
logger.info('Ping-pong received pong {}'.format(data))
self.pong_timer.start()
clock_request = self.make_clock_request(self.pong_timer.timeout_time)
if clock_request is not None:
yield clock_request
def on_sender_event(self, send_data):
"""Implement EndpointHandler.on_sender_event."""
if send_data:
logger.info('Starting ping-pong with interval of {} sec!'.format(send_data))
self.pong_timer.timeout = send_data
yield self.make_ping()
else:
logger.info('Stopping ping-pong.')
self.pong_timer.reset_and_stop()
class LoggingHandler(SingleEndpointHandler):
"""Handler for logging send and receive data."""
# Implement EndpointHandler
def on_receiver_event(self, endpoint_name, data, source_event):
"""Implement EndpointHandler.on_receiver_event."""
logger.info('{} response on {}: {}'.format(self.name, endpoint_name, data))
yield from []
def on_sender_event(self, send_data):
"""Implement EndpointHandler.on_receiver_event."""
logger.info('{} requesting on {}: {}'.format(self.name, self.endpoint_name, send_data))
yield (send_data, {})
def print_example_data():
"""Print examples of send data for handlers."""
print('Example send data for echo handler: (\'echo\', (1, True, ("hello", None)))')
print('Example send data for copy: (\'copy\', (1, True, ("hello", None)))')
print('Example send data for reply: (\'reply\', True)')
print('Example send data for reply: \'reply\'')
print('Example send data for prefix: (\'prefix\', (\'world!\', \'hello, \'))')
print('Example send data for blink: (\'blink\', True)')
print('Example send data for blink: (\'blink\', False)')
print('Example send data for ping-pong: (\'ping-pong\', 1.0)')
print('Example send data for ping-pong: (\'ping-pong\', False)')
def send_data(data, handlers):
"""Send the data to its corresponding handler."""
try:
try:
(handler_name, handler_args) = data
except (ValueError, TypeError):
handler_name = data
handler_args = ()
handler = handlers[handler_name]
handler.send(CommunicationLinkData(
handler_args, type=DATA_TYPES[('presentation', 'document')],
direction='down', instance='console input', previous=data
))
except (KeyError, IndexError, TypeError, ValueError):
logger.exception('Ignored invalid input: {}'.format(data))
def run_console(stack, receiver, handlers):
"""Run the echo test."""
print_example_data()
while True:
stack.update_clock()
data = input_literal('Send: ')
if data is not None:
send_data(data, handlers)
for i in range(2): # read twice for starting and ending delimiters
for received in receiver(stack):
if isinstance(received, LinkClockRequest):
stack.update_clock_request(received)
else:
print('Received: {}'.format(received))
def main(transport_logical_preset, application_preset):
"""Run echo test."""
connection = connect_serial_retry(baudrate=115200, timeout=0.05)
if connection is None:
logger.fatal('Failed to establish serial connection!')
return
handlers = {
'echo': LoggingHandler(b'echo', name='Echo'),
'copy': LoggingHandler(b'copy', name='Copy'),
'reply': LoggingHandler(b'reply', name='Reply'),
'prefix': LoggingHandler(b'prefix', name='Prefix'),
'blink': LoggingHandler(b'blink', name='Blink'),
'ping-pong': PingPongHandler()
}
protocol = AutomaticStack(make_preset_stack(
transport_logical=transport_logical_preset, application=application_preset
), list(handlers.values()), ReceiveLoggerLink())
stack = PollingSerialAdapter(connection, protocol)
logger.info(stack)
time.sleep(1.0)
run_once(run_console, stack, receive_once_blocking, handlers)
logger.info('Quitting!')
if __name__ == '__main__':
args = parse_args(
grouped_args={
group_protocol_stack: (
args_transport_logical_substack, args_application_stack
)
},
description='Run console test.'
)
main(args.logical, args.application)
| 2.234375
| 2
|
pandas_api/mit_model.py
|
cfong32/lpp
| 0
|
12778110
|
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from xgboost import XGBRegressor
import os
from django.conf import settings
import numpy as np
from functools import lru_cache
RANDOM_STATE = 42
def get_path(course, file):
return os.path.join(settings.PROJECT_ROOT, '..', 'pandas_api', 'static', 'mit', course, file)
@lru_cache(maxsize=32)
def load_data(course):
# Loading the final grade and the student list
final_grades = pd.read_csv(get_path(course, 'final_grades.csv'), index_col='user_id')
course_feature = pd.read_csv(get_path(course, 'coursewised_feature.csv'),
index_col='user_id').fillna(0)
# cg = pd.read_csv(get_path(course, 'chapter_grades.csv'))
# cg = cg.pivot(index='user_id', columns='chapter_mid', values='chgrade').fillna(0)
cv = pd.read_csv(get_path(course, 'chapter_videos.csv'))
cv = cv.pivot(index='user_id', columns='chapter_name', values='video_count').fillna(0)
# note that the above dfs have same index 'user_id'
# # merge the course_videos and course_grades
# features = \
# cg.join(cv, on=None, how='outer', lsuffix='_grade', rsuffix='_video_count').fillna(0)
features = cv
# full outer join on cv.user_id = course_feature.user_id
features = features.join(course_feature, how='outer').fillna(0)
# final_grades is y-data => left outer join on final_grades.user_id = features.user_id
df = final_grades.join(features, how='left').fillna(0)
# exclude the 'final_grade' and 'nproblem_check'
X = df.drop(['final_grade', 'nproblem_check', 'username'], axis=1)
y = df['final_grade']
return X, y
def get_user_chapter_grades(course, user_id):
chapter_grade = pd.read_csv(get_path(course, 'chapter_grade.csv'), index_col=['user_id', 'chapter_id'])
result = []
for chapter_id, chapter_grade in chapter_grade.loc[user_id]['chapter_grade'].iteritems():
result.append({"name": "Chapter "+str(chapter_id), "score": chapter_grade})
return result
def main():
course = 'VJx__VJx_2__3T2016'
filename = 'model.xgb'
X, y = load_data(course)
# Normalization
scaler = MinMaxScaler()
scaler.fit(X)
X = scaler.transform(X)
model = XGBRegressor()
if os.path.isfile(filename):
model.load_model(filename)
else:
model.fit(X, y)
model.save_model(filename)
y_ = model.predict(X)
print(y_)
model_cache = {}
data_transformer = {}
def predict(course_code, user_id):
filename = get_path(course_code, '%s_model.xgb' % course_code)
X, y = load_data(course_code)
user_X = X.loc[user_id]
# Normalization
if course_code not in data_transformer:
scaler = MinMaxScaler()
scaler.fit(X)
data_transformer[course_code] = scaler
scaler = data_transformer[course_code]
if course_code not in model_cache:
model = XGBRegressor()
if os.path.isfile(filename):
model.load_model(filename)
else:
X = scaler.transform(X)
model.fit(X, y)
model.save_model(filename)
model_cache[course_code] = model
model = model_cache[course_code]
X = scaler.transform(X)
y_ = model.predict(X)
hist, bin_edges = np.histogram(y_, bins=10, range=[0, 1])
return {
"classFinalExamDistribution": hist.tolist(),
"myChapterScore": get_user_chapter_grades(course_code, user_id),
"myPredictedFinalExamScore": float(model.predict(user_X)[0])
}
if __name__ == '__main__':
main()
| 2.53125
| 3
|
BKGLycanExtractor/attic/basenamefrompath.py
|
glygen-glycan-data/GlycanImageExtract
| 0
|
12778111
|
<reponame>glygen-glycan-data/GlycanImageExtract
import ntpath
path = "test/p19-578.png"
basename = ntpath.basename(path).split('.')[0]
print(basename)
| 1.90625
| 2
|
vedastr/metrics/__init__.py
|
csmasters/vedastr
| 475
|
12778112
|
from .accuracy import Accuracy
from .builder import build_metric
| 1.070313
| 1
|
c14/p274_test1442.py
|
pkingpeng/-python-
| 0
|
12778113
|
import json
pythonValueDic = {
'name': 'zhangsan',
'isCat': True,
'miceCaught': 0
}
data = json.dumps(pythonValueDic)
print(data)
"""
{"name": "zhangsan", "isCat": true, "miceCaught": 0}
"""
| 2.765625
| 3
|
demo/user_roles/endpoints/admin.py
|
seijihirao/apys
| 4
|
12778114
|
filters = [
'check_params',
['auth.is_owner', 'auth.is_admin']
]
def post(req, api):
"""
return success if user has rights to access server
user needs to have role owner or admin
Input:
role: string
Output:
result: string
"""
return {
'result': 'success'
}
| 2.140625
| 2
|
alipay/aop/api/domain/ApInvoiceBillLinkOrderRequest.py
|
antopen/alipay-sdk-python-all
| 213
|
12778115
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi
class ApInvoiceBillLinkOrderRequest(object):
def __init__(self):
self._amt = None
self._daily_bill_dimension = None
self._monthly_bill_no = None
@property
def amt(self):
return self._amt
@amt.setter
def amt(self, value):
if isinstance(value, MultiCurrencyMoneyOpenApi):
self._amt = value
else:
self._amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value)
@property
def daily_bill_dimension(self):
return self._daily_bill_dimension
@daily_bill_dimension.setter
def daily_bill_dimension(self, value):
self._daily_bill_dimension = value
@property
def monthly_bill_no(self):
return self._monthly_bill_no
@monthly_bill_no.setter
def monthly_bill_no(self, value):
self._monthly_bill_no = value
def to_alipay_dict(self):
params = dict()
if self.amt:
if hasattr(self.amt, 'to_alipay_dict'):
params['amt'] = self.amt.to_alipay_dict()
else:
params['amt'] = self.amt
if self.daily_bill_dimension:
if hasattr(self.daily_bill_dimension, 'to_alipay_dict'):
params['daily_bill_dimension'] = self.daily_bill_dimension.to_alipay_dict()
else:
params['daily_bill_dimension'] = self.daily_bill_dimension
if self.monthly_bill_no:
if hasattr(self.monthly_bill_no, 'to_alipay_dict'):
params['monthly_bill_no'] = self.monthly_bill_no.to_alipay_dict()
else:
params['monthly_bill_no'] = self.monthly_bill_no
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = ApInvoiceBillLinkOrderRequest()
if 'amt' in d:
o.amt = d['amt']
if 'daily_bill_dimension' in d:
o.daily_bill_dimension = d['daily_bill_dimension']
if 'monthly_bill_no' in d:
o.monthly_bill_no = d['monthly_bill_no']
return o
| 2.171875
| 2
|
packages/pyright-internal/src/tests/samples/paramSpec4.py
|
Jasha10/pyright
| 3,934
|
12778116
|
# This sample tests the type checker's handling of ParamSpec
# and Concatenate as described in PEP 612.
from typing import Callable, Concatenate, ParamSpec, TypeVar
P = ParamSpec("P")
R = TypeVar("R")
class Request:
...
def with_request(f: Callable[Concatenate[Request, P], R]) -> Callable[P, R]:
def inner(*args: P.args, **kwargs: P.kwargs) -> R:
return f(Request(), *args, **kwargs)
return inner
@with_request
def takes_int_str(request: Request, x: int, y: str) -> int:
# use request
return x + 7
takes_int_str(1, "A")
# This should generate an error because the first arg
# is the incorrect type.
takes_int_str("B", "A")
# This should generate an error because there are too
# many parameters.
takes_int_str(1, "A", 2)
# This should generate an error because a ParamSpec can appear
# only within the last type arg for Concatenate
def decorator1(f: Callable[Concatenate[P, P], int]) -> Callable[P, int]:
...
# This should generate an error because the last type arg
# for Concatenate should be a ParamSpec.
def decorator2(f: Callable[Concatenate[int, int], int]) -> Callable[P, int]:
...
# This should generate an error because Concatenate is missing
# its type arguments.
def decorator3(f: Callable[Concatenate, int]) -> Callable[P, int]:
...
def decorator4(func: Callable[P, None]) -> Callable[Concatenate[int, P], None]:
def wrapper(x: int, /, *args: P.args, **kwargs: P.kwargs) -> None:
...
return wrapper
def func1(func: Callable[Concatenate[int, P], None]) -> Callable[P, None]:
...
def func2(a: int, b: str, c: str) -> None:
...
def func3(a: int, /, b: str, c: str) -> None:
...
def func4(a: int, b: str, /, c: str) -> None:
...
v1 = func1(func2)
reveal_type(v1, expected_text="(b: str, c: str) -> None")
v2 = func1(func3)
reveal_type(v2, expected_text="(b: str, c: str) -> None")
v3 = func1(func4)
reveal_type(v3, expected_text="(b: str, /, c: str) -> None")
def func5(__fn: Callable[P, R], *args: P.args, **kwargs: P.kwargs) -> R:
...
def func6(name: str, *args: str):
...
v5 = func5(func6, "a", "b", "c")
# This should generate an error because 1 isn't assignable to str.
v6 = func5(func6, "a", "b", "c", 1)
def func7(name: str, **kwargs: str):
...
v7 = func5(func7, "a", b="b", c="c")
# This should generate an error because 1 isn't assignable to str.
v8 = func5(func7, "a", b="b", c=1)
| 3.234375
| 3
|
src/Actions/While.py
|
willfleetw/Joy
| 2
|
12778117
|
<gh_stars>1-10
from Actions.Action import Action
from Actions.Condition import ConditionEvaluator, ConditionSet
class While(Action, ConditionEvaluator):
_actions: list[Action]
def __init__(self, condition_sets: list[ConditionSet] = [], actions: list[Action] = []) -> None:
self._condition_sets = condition_sets
self._actions = actions
super().__init__()
def execute(self, cmd_context) -> None:
while self.evaluate(cmd_context):
for action in self._actions:
action.execute(cmd_context)
return super().execute(cmd_context)
| 2.5625
| 3
|
PyCTPM/core/config.py
|
sinagilassi/CTPM
| 1
|
12778118
|
# CONFIG APP
# -----------
# import packages/modules
import enum
# app config
appConfig = {
"calculation": {
"roundAccuracy": 2,
"roundAccuracyRoot": 4
}
}
# round function accuracy
ROUND_FUN_ACCURACY = appConfig['calculation']['roundAccuracy']
# eos root accuracy
EOS_ROOT_ACCURACY = appConfig['calculation']['roundAccuracyRoot']
| 2.0625
| 2
|
manage.py
|
Semprini/cbe-telco
| 1
|
12778119
|
<reponame>Semprini/cbe-telco
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "utilities.settings")
if len(sys.argv) == 5 and sys.argv[1] == "createsuperuser":
# when used as python manage.py createsuperuser <username> <email> <password>
import django
django.setup()
from django.contrib.auth.models import User
superuser = User.objects.create_superuser(sys.argv[2], sys.argv[3], sys.argv[4])
elif len(sys.argv) == 5 and sys.argv[1] == "getorcreatesuperuser":
# when used as python manage.py getorcreatesuperuser <username> <email> <password>
import django
django.setup()
from django.contrib.auth.models import User
users = User.objects.filter(username=sys.argv[2])
if len(users) == 0:
superuser = User.objects.create_superuser(sys.argv[2], sys.argv[3], sys.argv[4])
print( "New superuser created" )
else:
superuser = users[0]
print( "Existing superuser found" )
else:
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 2.171875
| 2
|
dataset/test.py
|
DavidZechm/BikeSight
| 0
|
12778120
|
<reponame>DavidZechm/BikeSight<gh_stars>0
import cv2
def click_event(event, x, y, flags, param):
global img
if event == cv2.EVENT_LBUTTONDOWN:
print(x,y)
cv2.circle(img, (x, y), 10, (0, 0, 255), -1)
cv2.imshow('image', img)
if event == cv2.EVENT_RBUTTONDBLCLK:
img = cv2.imread(img_path)
print("cleaned")
cv2.imshow('image', img)
img_path = "/media/davidzechm/LaCie/dataset/labeled/right/0a7b4aeb1649d8.jpg"
global img
img = cv2.imread(img_path)
while(1):
cv2.setMouseCallback('image', click_event)
cv2.imshow('image', img)
k=cv2.waitKey(1) & 0xFF
if k==27: #Escape KEY
break
cv2.imshow('image', img)
cv2.destroyAllWindows()
| 2.71875
| 3
|
pyjobs/core/migrations/0015_job_ad_interested.py
|
Mdslino/PyJobs
| 132
|
12778121
|
<filename>pyjobs/core/migrations/0015_job_ad_interested.py<gh_stars>100-1000
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-11-04 18:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("core", "0014_auto_20180511_2122")]
operations = [
migrations.AddField(
model_name="job",
name="ad_interested",
field=models.BooleanField(
default=False,
verbose_name="Interessado em ficar em destaque no PyJobs?",
),
)
]
| 1.40625
| 1
|
src/protocols/create_water_container/protocol.py
|
scottbecker/delve_tx_public
| 2
|
12778122
|
<reponame>scottbecker/delve_tx_public
from __future__ import print_function
from transcriptic_tools.utils import ml, ul
from transcriptic_tools.harness import run
from transcriptic_tools.custom_protocol import CustomProtocol as Protocol
from autoprotocol.protocol import Container
def main(p, params):
assert isinstance(p, Protocol)
#create a container
container = p.ref(params['container_name'], cont_type=params['container_type'],
storage=params['storage_conditions'], discard=False)
p.provision_by_name('water',container.all_wells(), ul(params['volume_ul']))
if __name__ == '__main__':
run(main, "CreateWaterContainer")
| 2.109375
| 2
|
grip/model/test/test_dependency.py
|
Eugeny/grip
| 16
|
12778123
|
import pkg_resources
import unittest
from grip.model import Dependency, Version, Package
class TestDependency(unittest.TestCase):
def test_ctor_str(self):
dep = Dependency('django==2.0')
self.assertEqual(dep.name, 'django')
self.assertTrue(dep.matches_version('2.0'))
self.assertFalse(dep.matches_version('2.1'))
def test_ctor_pkgr_req(self):
req = pkg_resources.Requirement('django==2.0')
dep = Dependency(req)
self.assertEqual(dep.name, 'django')
self.assertTrue(dep.matches_version('2.0'))
self.assertFalse(dep.matches_version('2.1'))
def test_ctor_err(self):
with self.assertRaises(TypeError):
Dependency(2)
def test_matching(self):
dep = Dependency('celery>=3,<5')
self.assertFalse(dep.matches_version('2.7'))
self.assertTrue(dep.matches_version(Version('3.0')))
self.assertTrue(dep.matches_version('3.2'))
self.assertFalse(dep.matches_version('5'))
def test_compare(self):
a = Dependency('celery>=3,<5')
b = Dependency('django==2')
self.assertGreater(b, a)
self.assertLess(a, b)
def test_exact(self):
a = Dependency('django==2')
b = Dependency.exact(Package('django' ,'2'))
self.assertEqual(a.name, b.name)
self.assertEqual(a.specifier, b.specifier)
def test_str(self):
dep = Dependency.exact(Package('django' ,'2'))
self.assertEqual(str(dep), 'django==2')
dep = Dependency('django==2')
dep.url = 'git+git@github.com:a/b.git'
self.assertEqual(str(dep), dep.url + '#egg=django==2')
| 2.546875
| 3
|
modules/sensor_stat/api_controllers.py
|
srcc-msu/job_statistics
| 0
|
12778124
|
from flask import Blueprint, Response, request, jsonify
from sqlalchemy import func
from application.database import global_db
from application.helpers import crossdomain, gen_csv_response
from core.monitoring.models import SENSOR_CLASS_MAP
sensor_stat_api_pages = Blueprint('sensor_stat_api', __name__
, template_folder='templates', static_folder='static')
@sensor_stat_api_pages.route("/avg/<string:sensor>")
@crossdomain(origin='*')
def get_sensor_stat(sensor: str) -> Response:
try:
t_from = request.args["t_from"]
t_to = request.args["t_to"]
sensor_class = SENSOR_CLASS_MAP[sensor]
except KeyError as e:
raise e
query = (global_db.session.query(
sensor_class.time
, func.count(sensor_class.time).label("working_nodes")
, func.avg(sensor_class.avg).label("avg"))
.filter(sensor_class.time > t_from)
.filter(sensor_class.time < t_to)
.group_by(sensor_class.time)
.order_by(sensor_class.time))
return gen_csv_response(query.column_descriptions, query.all())
| 2.21875
| 2
|
src/blueprints/legal/__init__.py
|
primeithard/yandex-disk-telegram-bot
| 15
|
12778125
|
<reponame>primeithard/yandex-disk-telegram-bot
from .bp import bp as legal_blueprint
from . import views
| 1.015625
| 1
|
spongeauth/accounts/migrations/0004_create_dummy_group.py
|
felixoi/SpongeAuth
| 10
|
12778126
|
<reponame>felixoi/SpongeAuth
# -*- coding: utf-8 -*-
# Creates "Dummy" group, used to flag dummy accounts created by API.
from __future__ import unicode_literals
from django.db import migrations
def forwards_func(apps, schema_editor):
Group = apps.get_model("accounts", "Group")
db_alias = schema_editor.connection.alias
Group.objects.using(db_alias).bulk_create([Group(name="Dummy")])
def reverse_func(apps, schema_editor):
Group = apps.get_model("accounts", "Group")
db_alias = schema_editor.connection.alias
Group.objects.using(db_alias).filter(name="Dummy").delete()
class Migration(migrations.Migration):
dependencies = [("accounts", "0003_create_group_model")]
operations = [migrations.RunPython(forwards_func, reverse_func)]
| 2.25
| 2
|
pollsapp/tests.py
|
queenfiona/polls
| 0
|
12778127
|
"""docstring for pollsapp tests."""
import datetime
from django.test import TestCase, Client
from django.utils import timezone
from django.urls import reverse
from .models import Question
client = Client()
def create_question(question_text, days):
"""Create a question and add no. of days to now."""
time = timezone.now() + datetime.timedelta(days=days)
return Question.objects.create(question_text=question_text, pub_date=time)
class QuestionModelTests(TestCase):
"""docstring for QuestionModelTests."""
def test_was_published_recently_with_future_question(self):
"""Should return false."""
time = timezone.now() + datetime.timedelta(days=30)
future_question = Question(pub_date=time)
self.assertIs(future_question.was_published_recently(), False)
def test_was_published_recently_with_old_question(self):
"""Func returns False for questions whose pub-date is older than 1 day."""
time = timezone.now() - datetime.timedelta(days=1, seconds=1)
old_question = Question(pub_date=time)
self.assertIs(old_question.was_published_recently(), False)
def test_was_published_recently_with_recent_question(self):
"""Func returns True for questions whose pub-date is within the last day."""
time = timezone.now() - datetime.timedelta(hours=23, minutes=59, seconds=59)
recent_question = Question(pub_date=time)
self.assertIs(recent_question.was_published_recently(), True)
class QuestionIndexViewTests(TestCase):
"""docstring for QuestionIndexViewTests."""
def test_no_questions(self):
"""Display appropriate msg if no questions exist."""
response = self.client.get(reverse('pollsapp:index'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "No polls are available")
self.assertQuerysetEqual(response.context['latest_question_list'], [])
def test_past_question(self):
"""Display questions with a pub_date in the past on the index page."""
create_question(question_text="Past question.", days=-30)
response = self.client.get(reverse('pollsapp:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question.>']
)
def test_future_question(self):
"""Don't display questions with a pub_date in the future on the index page."""
create_question(question_text="Future question.", days=30)
response = self.client.get(reverse('pollsapp:index'))
self.assertContains(response, "No polls are available")
self.assertQuerysetEqual(response.context['latest_question_list'], [])
def test_future_question_and_past_question(self):
"""Display past questions even if both past and future questions exist."""
create_question(question_text="Past question.", days=-30)
create_question(question_text="Future question.", days=30)
response = self.client.get(reverse('pollsapp:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question.>']
)
def test_two_past_questions(self):
"""The questions index page may display multiple questions."""
create_question(question_text="Past question 1.", days=-30)
create_question(question_text="Past question 2.", days=-5)
response = self.client.get(reverse('pollsapp:index'))
self.assertQuerysetEqual(
response.context['latest_question_list'],
['<Question: Past question 2.>', '<Question: Past question 1.>']
)
class QuestionDetailViewTests(object):
"""docstring for QuestionDetailViewTests."""
def test_future_question(self):
"""Return 404 if pub-date is in the future."""
future_question = create_question(
question_text='Future question.', days=30)
url = reverse("pollsapp:detail", args=(future_question.id,))
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_past_question(self):
"""Question text should be displayed."""
past_question = create_question(question_text='Past question', days=-5)
url = reverse("pollsapp:detail", args=(past_question.id))
response = self.client.get(url)
self.assertContains(response, past_question.question_text)
| 2.828125
| 3
|
autokeras/__init__.py
|
MustafaKadioglu/autokeras
| 1
|
12778128
|
<gh_stars>1-10
from autokeras.image.image_supervised import ImageClassifier, ImageRegressor
from autokeras.text.text_supervised import TextClassifier, TextRegressor
from autokeras.tabular.tabular_supervised import TabularClassifier, TabularRegressor
from autokeras.net_module import CnnGenerator, MlpModule
| 1.210938
| 1
|
members/views.py
|
KonichiwaKen/band-dashboard
| 0
|
12778129
|
import json
from django.forms import model_to_dict
from rest_framework import views
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from attendance.models import Attendance
from attendance.models import Event
from attendance.permissions import IsAttendanceAdmin
from attendance.permissions import IsAttendanceAdminOrReadOnly
from authentication.models import Account
from authentication.permissions import IsAccountAdminOrAccountOwner
from members.models import Band
from members.models import BandMember
from members.serializers import BandMemberSerializer
from members.serializers import BandSerializer
class BandViewSet(viewsets.ModelViewSet):
queryset = Band.objects.all()
serializer_class = BandSerializer
permission_classes = (IsAuthenticated, IsAttendanceAdminOrReadOnly,)
class BandAssignmentView(views.APIView):
permission_classes = (IsAuthenticated, IsAttendanceAdminOrReadOnly,)
def get(self, request, format=None):
band_assignments = {}
for band in Band.objects.all():
member_assignments = {}
member_assignments["assigned"] = []
member_assignments["unassigned"] = []
for assigned_member in band.assigned_members.all():
member_assignment = {
"id": assigned_member.id,
"name": assigned_member.full_name,
"section": assigned_member.section_display
}
member_assignments["assigned"].append(member_assignment)
for unassigned_member in band.unassigned_members.all():
member_assignment = {
"id": unassigned_member.id,
"name": unassigned_member.full_name,
"section": unassigned_member.section_display
}
member_assignments["unassigned"].append(member_assignment)
band_assignments[band.id] = member_assignments
return Response(band_assignments)
def post(self, request, format=None):
data = json.loads(request.body)
member_id = data.get('member', None)
band_id = data.get('band', None)
action = data.get('action', None)
if member_id and band_id and action:
band_member = BandMember.objects.get(id=member_id)
band = Band.objects.get(id=band_id)
if action == 'assign':
band.unassigned_members.remove(band_member)
band.assigned_members.add(band_member)
for event in band.events.all():
try:
attendance = Attendance.objects.get(event=event, member=band_member)
if attendance.points is None:
is_modified = False
if not attendance.assigned:
attendance.assigned = True
is_modified = True
if not attendance.is_active:
attendance.is_active = True
is_modified = True
if is_modified:
attendance.save()
except Attendance.DoesNotExist:
Attendance.objects.create(event=event, member=band_member, assigned=True)
elif action == 'unassign':
band.unassigned_members.add(band_member)
band.assigned_members.remove(band_member)
for event in band.events.all():
try:
attendance = Attendance.objects.get(
event=event,
member=band_member,
points__isnull=True,
assigned=True)
attendance.assigned = False
attendance.is_active = False
attendance.save()
except Attendance.DoesNotExist:
pass
band.save()
return Response()
else:
return Response({
'status': 'Bad request',
'message': 'Missing parameter in request',
}, status=status.HTTP_400_BAD_REQUEST)
class BandMemberViewSet(viewsets.ModelViewSet):
queryset = BandMember.objects.all()
serializer_class = BandMemberSerializer
permission_classes = (IsAuthenticated, IsAccountAdminOrAccountOwner,)
class UnassignedMembersView(views.APIView):
permission_classes = (IsAuthenticated, IsAttendanceAdmin,)
def get(self, request, format=None):
event_id = self.request.query_params.get('event_id', None)
if event_id:
try:
event = Event.objects.get(id=event_id)
except Event.DoesNotExist:
return Response({
'status': 'Bad request',
'message': 'Could not find event from event_id',
}, status=status.HTTP_400_BAD_REQUEST)
existing_unassigned_members = Attendance.objects.filter(
event=event,
assigned=False,
is_active=True,
).values_list('member_id', flat=True).distinct()
band = event.band
if band:
unassigned_members_queryset = band.unassigned_members
else:
unassigned_members_queryset = BandMember.objects.filter(account__is_active=True)
unassigned_members = unassigned_members_queryset.exclude(
id__in=existing_unassigned_members).all()
unassigned_members_dicts = []
for unassigned_member in unassigned_members:
full_name = unassigned_member.full_name
member_dict = model_to_dict(unassigned_member)
member_dict['full_name'] = full_name
unassigned_members_dicts.append(member_dict)
return Response(unassigned_members_dicts)
return Response({
'status': 'Bad request',
'message': 'No event_id in request',
}, status=status.HTTP_400_BAD_REQUEST)
| 1.96875
| 2
|
cascad/server/routes/home.py
|
Will-Holden/cascadv2
| 0
|
12778130
|
<filename>cascad/server/routes/home.py
from flask import Blueprint, render_template, url_for, request
from cascad.models.datamodel import AgentTypeModel, ComputeExperimentModel, ComputeExperimentTypeModel, AgentModel
from pyecharts import options as opts
from pyecharts.charts import Bar, Scatter
from jinja2 import Markup
from cascad.experiment.token_sender import ERC20TokenWorld
home_bp = Blueprint('home_bp', __name__)
def bar_base() -> Scatter:
c = (
Scatter()
.add_xaxis(["衬衫", "羊毛衫", "雪纺衫", "裤子", "高跟鞋", "袜子"])
.add_yaxis("商家A", [5, 20, 36, 10, 75, 90])
.add_yaxis("商家B", [15, 25, 16, 55, 48, 8])
.set_global_opts(title_opts=opts.TitleOpts(title="代币分布图"))
)
return c
def token_discribute(max_step, world_id) -> Scatter:
agent_models = AgentModel.objects(step=max_step, world_id=world_id)
result = [
(str(agent_model.unique_id)[-4:], agent_model.state['token']) for agent_model in agent_models
]
c = (
Scatter()
.add_xaxis([x[0] for x in result])
.add_yaxis("代币数量", [x[1] for x in result])
.set_global_opts(title_opts=opts.TitleOpts(title="代币分布图"))
)
return c
@home_bp.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
@home_bp.route("/compute_experiment", methods=['GET', 'POST'])
def compute():
experiments = ComputeExperimentModel.objects.all()
return render_template('compute_experiment.html', experiments=experiments)
@home_bp.route("/agents", methods=['GET', 'POST'])
def agent():
if request.method == 'POST':
agent_type = request.form['agent_type']
else:
agents = AgentTypeModel.objects.all()
return render_template('agent.html', agents=agents)
@home_bp.route("/config_experiment", methods=["GET", "POST"])
@home_bp.route("/config_experiment/<step>", methods=["GET", "POST"])
def config_experiment(step=0):
step = int(step)
if request.method == 'POST':
experiment_type = request.form['experiment_type']
if step == 1:
agent_types = AgentTypeModel.objects.all()
return render_template(
'config_1.html',
experiment_type = experiment_type,
agent_types = agent_types
)
elif step == 2:
selected_agents = request.form.getlist('agent_types')
experiment_type = request.form['experiment_type']
experiment_params = ComputeExperimentTypeModel.objects.get(experiment_type=experiment_type).experiment_params
agent_types = AgentTypeModel.objects.all()
selected_agent_types = zip(agent_types, selected_agents)
return render_template(
'config_2.html',
experiment_type = experiment_type,
experiment_params = experiment_params,
agent_types = agent_types,
selected_agents = selected_agents,
selected_agent_types = selected_agent_types
)
elif step == 3:
selected_agents = request.form.getlist('agent_types')
experiment_type = request.form['experiment_type']
experiment_params = ComputeExperimentTypeModel.objects.get(experiment_type=experiment_type).experiment_params
agent_types = AgentTypeModel.objects.all()
selected_agent_types = zip(agent_types, selected_agents)
params_result = {
param: request.form[param] for param in experiment_params
}
if experiment_type == '_erc20_token':
erc20_token_world = ERC20TokenWorld(
float(params_result['AgentRadio']),
int(params_result['AgentNumber']),
int(params_result['IterNumbers']),
)
world_id = erc20_token_world.unique_id
max_step = int(params_result['IterNumbers']) - 1
erc20_token_world.run()
else:
pass
return render_template(
'config_3.html',
experiment_type = experiment_type,
experiment_params = experiment_params,
agent_types = agent_types,
selected_agents = selected_agents,
selected_agent_types = selected_agent_types,
params_result = params_result,
world_id = world_id,
max_step = max_step
)
else:
if step == 0:
experiment_types = ComputeExperimentTypeModel.objects.all()
return render_template('config_0.html', experiment_types=experiment_types)
@home_bp.route("/tokens/<max_step>/<world_id>", methods=["GET", "POST"])
def token_data(max_step, world_id):
c = token_discribute(max_step, world_id)
return c.dump_options_with_quotes()
@home_bp.route("/barChart")
def get_bar_chart():
c = bar_base()
return c.dump_options_with_quotes()
@home_bp.route("/bar")
def get_bar_index():
return render_template("bart.html")
| 2.1875
| 2
|
application/profiles/manage.py
|
kendog/coalman
| 3
|
12778131
|
<reponame>kendog/coalman
"""Routes for user authentication."""
from flask import redirect, render_template, flash, Blueprint, request, url_for
from flask_login import login_required
from flask import current_app as app
from flask_security import roles_required, current_user
#from .assets import compile_auth_assets
#from .forms import LoginForm, SignupForm
from ..db import db
from ..models import User, Profile, Account
# Blueprint Configuration
manage_profiles_bp = Blueprint('manage_profiles_bp', __name__,
template_folder='templates',
static_folder='static',
url_prefix='/manage')
@manage_profiles_bp.route('/profiles')
@roles_required('super-admin')
def profiles():
profiles = Profile.query.all()
return render_template('/profiles/manage/list.html', profiles=profiles)
@manage_profiles_bp.route('/profile/add', methods=['POST', 'GET'])
@roles_required('super-admin')
def profile_add():
if 'submit-add' in request.form:
profile = Profile(
name=request.form['name'],
bio=request.form['bio'],
address1=request.form['address1'],
address2=request.form['address2'],
city=request.form['city'],
state=request.form['state'],
zip=request.form['zip'],
phone=request.form['phone'],
user_id=request.form['user_id'],
account_id=request.form['account_id'],
creator_id=current_user.id)
db.session.add(profile)
db.session.commit()
return redirect(url_for('manage_profiles_bp.profiles'))
accounts = Account.query.all()
users = User.query.all()
profile = []
return render_template('/profiles/manage/form.html', template_mode='add', profile=profile, accounts=accounts, users=users)
@manage_profiles_bp.route('/profile/edit/<id>', methods=['POST', 'GET'])
@roles_required('super-admin')
def profile_edit(id):
profile = Profile.query.filter_by(id=id).first()
if 'submit-edit' in request.form:
if profile:
profile.name = request.form.get('name')
profile.bio = request.form.get('bio')
profile.address1 = request.form.get('address1')
profile.address2 = request.form.get('address2')
profile.city = request.form.get('city')
profile.state = request.form.get('state')
profile.zip = request.form.get('zip')
profile.phone = request.form.get('phone')
profile.user_id = request.form.get('user_id')
profile.account_id = request.form.get('account_id')
db.session.commit()
return redirect(url_for('manage_profiles_bp.profiles'))
accounts = Account.query.all()
users = User.query.all()
return render_template('/profiles/manage/form.html', template_mode='edit', profile=profile, accounts=accounts, users=users)
@manage_profiles_bp.route('/profile/delete/<id>', methods=['POST', 'GET'])
@roles_required('super-admin')
def profile_delete(id):
profile = Profile.query.filter_by(id=id).first()
if 'submit-delete' in request.form:
if profile:
db.session.delete(profile)
db.session.commit()
return redirect(url_for('manage_profiles_bp.profiles'))
accounts = Account.query.all()
users = User.query.all()
return render_template('/profiles/manage/form.html', template_mode='delete', profile=profile, accounts=accounts, users=users)
| 2.46875
| 2
|
task1/clean.py
|
Save404/captcha
| 3
|
12778132
|
#-*- coding: utf-8 -*-
import os
from PIL import Image, ImageDraw, ImageEnhance
def denoise(img):
im = Image.open(img)
enhancer = ImageEnhance.Contrast(im)
im = enhancer.enhance(3)
im = im.convert('1')
data = im.getdata()
w, h = im.size
for x in range(1, w-1):
l = []
y = 1
while(y < h-1):
m = y
count = 0
while(m < h-1 and im.getpixel((x, m)) == 0):
count = count + 1
m = m + 1
if(count <= 2 and count > 0):
c = count
while c > 0:
l.append(m - c)
c = c - 1
y = y + count + 1
if len(l) != 0:
i = 1
while i < len(l):
data.putpixel((x, l[i]), 255)
i = i + 1
for y in range(1, h-1):
l = []
x = 1
while(x < w-1):
m = x
count = 0
while(m < w-1 and im.getpixel((m, y)) == 0):
count = count + 1
m = m + 1
if(count <= 2 and count > 0):
c = count
while c > 0:
l.append(m - c)
c = c - 1
x = x + count + 1
if len(l) != 0:
i = 1
while i < len(l):
data.putpixel((l[i], y), 255)
i = i + 1
return im
def NaiveRemoveNoise(im, pnum = 5):
w, h = im.size;
white = 255;
black = 0;
for i in range(0, w):
im.putpixel((i, 0), white);
im.putpixel((i, h - 1), white);
for i in range(0, h):
im.putpixel((0, i), white);
im.putpixel((w - 1, i), white);
for i in range(1, w - 1):
for j in range(1, h - 1):
val = im.getpixel((i, j));
# 黑色的情况
if val == black:
cnt = 0;
for ii in range(-1, 2):
for jj in range(-1, 2):
if im.getpixel((i + ii, j + jj)) == black:
cnt += 1;
if cnt < pnum:
im.putpixel((i, j), white);
else:
cnt = 0;
for ii in range(-1, 2):
for jj in range(-1, 2):
if im.getpixel((i + ii, j + jj)) == black:
cnt += 1;
if cnt >= 7:
im.putpixel((i, j), black);
if __name__ == '__main__':
img = 'test/0004.jpg'
new = denoise(img)
new.save('clean1.jpg')
NaiveRemoveNoise(new)
new.save('clean2.jpg')
| 3.171875
| 3
|
abagen/allen.py
|
abkosar/abagen
| 0
|
12778133
|
# -*- coding: utf-8 -*-
"""
Functions for mapping AHBA microarray dataset to atlases and and parcellations
in MNI space
"""
from functools import reduce
from nilearn._utils import check_niimg_3d
import numpy as np
import pandas as pd
from scipy.spatial.distance import cdist
from abagen import datasets, io, process, utils
def _assign_sample(sample, atlas, sample_info=None, atlas_info=None,
tolerance=2):
"""
Determines which parcel `sample` belongs to in `atlas`
Parameters
----------
sample : (1, 3) array_like
Coordinates (ijk) of microarray sample in `atlas` space
atlas : niimg-like object
ROI image, where each ROI should be identified with a unique
integer ID
sample_info : pandas.DataFrame
A single row of an `annotation` file, corresponding to the given sample
atlas_info : pandas.DataFrame,
Dataframe containing information about the specified `atlas`. Must have
_at least_ columns 'id', 'hemisphere', and 'structure' containing
information mapping atlas IDs to hemisphere and broad structural class
(i.e., "cortex", "subcortex", "cerebellum"). Default: None
tolerance : int, optional
Distance (in mm) that a sample must be from a parcel for it to be
matched to that parcel. This is only considered if the sample is not
directly within a parcel. Default: 2
Returns
-------
label : int
Parcel label of `sample`
"""
# pull relevant info from atlas
label_data = check_niimg_3d(atlas).get_data()
# expand provided coordinates to include those w/i `tolerance` of `coords`
# set a hard euclidean distance limit to account for different voxel sizes
coords = utils.expand_roi(sample, dilation=tolerance, return_array=True)
coords = coords[cdist(sample, coords).squeeze() < tolerance]
# grab non-zero labels for expanded coordinates
possible_labels = label_data[coords[:, 0], coords[:, 1], coords[:, 2]]
nz_labels = possible_labels[possible_labels.nonzero()]
labels, counts = np.unique(nz_labels, return_counts=True)
# if atlas_info and sample_info are provided, drop potential labels who
# don't match hemisphere or structural class defined in `sample_info`
if atlas_info is not None and sample_info is not None:
for old_label in labels:
new_label = _check_label(old_label, sample_info, atlas_info)
if old_label != new_label:
nz_labels[nz_labels == old_label] = new_label
labels, counts = np.unique(nz_labels[nz_labels.nonzero()],
return_counts=True)
# if there is still nothing in the vicinity, return 0
if labels.size == 0:
return 0
# if there is only one ROI in the vicinity, use that
elif labels.size == 1:
return labels[0]
# if more than one ROI in the vicinity, return the most frequent
indmax, = np.where(counts == counts.max())
if indmax.size == 1:
return labels[indmax[0]]
# if two or more parcels tied for neighboring frequency, use ROI
# with closest centroid to `coords`
centroids = utils.get_centroids(atlas, labels)
return labels[utils.closest_centroid(sample, centroids)]
def _check_label(label, sample_info, atlas_info):
"""
Checks that `label` defined by `sample_info` is coherent with `atlas_info`
Parameters
----------
label : int
Tenative label for sample described by `sample_info`
sample_info : pandas.DataFrame
A single row of an `annotation` file, corresponding to the given sample
atlas_info : pandas.DataFrame,
Dataframe containing information about the atlas of interest. Must have
_at least_ columns 'id', 'hemisphere', and 'structure' containing
information mapping atlas IDs to hemisphere and broad structural class
(i.e., "cortex", "subcortex", "cerebellum"). Default: None
Returns
-------
label : int
New label for sample
"""
cols = ['hemisphere', 'structure']
if label != 0:
sample_info = sample_info[cols]
atlas_info = atlas_info.loc[label][cols]
if not np.all(sample_info.values == atlas_info.values):
label = 0
return label
def label_samples(annotation, atlas, atlas_info=None, tolerance=2):
"""
Matches all microarray samples in `annotation` to parcels in `atlas`
Attempts to place each sample provided in `annotation` into a parcel in
`atlas`, where the latter is a 3D niimg-like object that contains parcels
each idnetified by a unique integer ID.
The function tries to best match samples in `annotation` to parcels defined
in `atlas` by:
1. Determining if the sample falls directly within a parcel,
2. Checking to see if there are nearby parcels by slowly expanding the
search space to include nearby voxels, up to a specified distance
(specified via the `tolerance` parameter),
3. Assigning the sample to the closest parcel if there are multiple
nearby parcels, where closest is determined by the parcel centroid.
If at any step a sample can be assigned to a parcel the matching process is
terminated. If there is still no parcel for a given sample after this
process the sample is provided a label of 0.
Parameters
----------
annotation : (S, 13) pandas.DataFrame
Pre-loaded annotation information for a given AHBA donor
atlas : niimg-like object
A parcellation image in MNI space, where each parcel is identified by a
unique integer ID
atlas_info : pandas.DataFrame, optional
Filepath to or pre-loaded dataframe containing information about
`atlas`. Must have _at least_ columns 'id', 'hemisphere', and
'structure' containing information mapping atlas IDs to hemisphere and
broad structural class (i.e., "cortex", "subcortex", "cerebellum").
Default: None
tolerance : int, optional
Distance (in mm) that a sample must be from a parcel for it to be
matched to that parcel. This is only considered if the sample is not
directly within a parcel. Default: 2
Returns
-------
labels : (S, 1) pandas.DataFrame
Dataframe with parcel labels for each of `S` samples
"""
# get annotation and atlas data
annotation = io.read_annotation(annotation)
atlas = check_niimg_3d(atlas)
label_data, affine = atlas.get_data(), atlas.affine
# load atlas_info, if provided
if atlas_info is not None:
atlas_info = utils.check_atlas_info(atlas, atlas_info)
# get ijk coordinates for microarray samples and find labels
g_ijk = utils.xyz_to_ijk(annotation[['mni_x', 'mni_y', 'mni_z']], affine)
labelled_samples = label_data[g_ijk[:, 0], g_ijk[:, 1], g_ijk[:, 2]]
# if sample coordinates aren't directly inside a parcel, increment radius
# around sample up to `tolerance` to try and find nearby parcels.
# if still no parcel, then ignore this sample
for idx in np.where(labelled_samples == 0)[0]:
label, tol = labelled_samples[idx], 1
while label == 0 and tol <= tolerance:
label = _assign_sample(g_ijk[[idx]], atlas,
sample_info=annotation.iloc[idx],
atlas_info=atlas_info,
tolerance=tol)
tol += 1
labelled_samples[idx] = label
return pd.DataFrame(labelled_samples, dtype=int,
columns=['label'], index=annotation.index)
def group_by_label(microarray, sample_labels, labels=None, metric='mean'):
"""
Averages expression data in `microarray` over samples with same label
Parameters
----------
microarray : (S, G) pandas.DataFrame
Microarray expression data, where `S` is samples and `G` is genes
sample_labels : (S, 1) pandas.DataFrame
Parcel labels for `S` samples, as returned by e.g., `label_samples()`
labels : (L,) array_like, optional
All possible labels for parcellation (to account for possibility that
some parcels have NO expression data). Default: None
metric : str or func, optional
Mechanism by which to collapse across samples within a parcel. If a
str, should be in ['mean', 'median']; if a function, should be able to
accept an `N`-dimensional input and the `axis` keyword argument and
return an `N-1`-dimensional output. Default: 'mean'
Returns
-------
gene_by_label : (L, G) pandas.DataFrame
Microarray expression data
"""
# get combination function
metric = utils.check_metric(metric)
# get missing labels
if labels is not None:
missing = np.setdiff1d(labels, sample_labels)
labels = pd.DataFrame(columns=microarray.columns,
index=pd.Series(missing, name='label'))
gene_by_label = (microarray.merge(sample_labels,
left_index=True,
right_index=True)
.groupby('label')
.aggregate(metric)
.append(labels)
.drop([0])
.sort_index()
.rename_axis('label'))
return gene_by_label
def get_expression_data(atlas, atlas_info=None, *, exact=True,
tolerance=2, metric='mean', ibf_threshold=0.5,
corrected_mni=True, reannotated=True,
return_counts=False, return_donors=False,
donors='all', data_dir=None):
"""
Assigns microarray expression data to ROIs defined in `atlas`
This function aims to provide a workflow for generating pre-processed,
microarray expression data for abitrary `atlas` designations. First, some
basic filtering of genetic probes is performed, including:
1. Intensity-based filtering of microarray probes to remove probes that
do not exceed a certain level of background noise (specified via the
`ibf_threshold` parameter), and
2. Selection of a single, representative probe for each gene via a
differential stability metric, wherein the probe that has the most
consistent regional variation across donors is retained.
Tissue samples are then matched to parcels in the defined `atlas` for each
donor. If `atlas_info` is provided then this matching is constrained by
both hemisphere and tissue class designation (e.g., cortical samples from
the left hemisphere are only matched to ROIs in the left cortex,
subcortical samples from the right hemisphere are only matched to ROIs in
the left subcortex); see the `atlas_info` parameter description for more
information.
Matching of microarray samples to parcels in `atlas` is done via a multi-
step process:
1. Determine if the sample falls directly within a parcel,
2. Check to see if there are nearby parcels by slowly expanding the
search space to include nearby voxels, up to a specified distance
(specified via the `tolerance` parameter),
3. If there are multiple nearby parcels, the sample is assigned to the
closest parcel, as determined by the parcel centroid.
If at any step a sample can be assigned to a parcel the matching process is
terminated. If multiple sample are assigned to the same parcel they are
aggregated with the metric specified via the `metric` parameter. More
control over the sample matching can be obtained by setting the `exact`
parameter; see the parameter description for more information.
Once all samples have been matched to parcels for all supplied donors, the
microarray expression data are normalized within-donor via a scaled robust
sigmoid (SRS) procedure before being combined across donors via the
supplied `metric`.
Parameters
----------
atlas : niimg-like object
A parcellation image in MNI space, where each parcel is identified by a
unique integer ID
atlas_info : str or :class:`pandas.DataFrame`, optional
Filepath to or pre-loaded dataframe containing information about
`atlas`. Must have at least columns 'id', 'hemisphere', and 'structure'
containing information mapping atlas IDs to hemisphere (i.e, "L", "R")
and broad structural class (i.e., "cortex", "subcortex", "cerebellum").
Default: None
exact : bool, optional
Whether to use exact matching of donor tissue samples to parcels in
`atlas`. If True, this function will match tissue samples to parcels
within `threshold` mm of the sample; any samples that are beyond
`threshold` mm of a parcel will be discarded. This may result in some
parcels having no assigned sample / expression data. If False, the
default matching procedure will be performed and followed by a check
for parcels with no assigned samples; any such parcels will be matched
to the nearest sample (nearest defined as the sample with the closest
Euclidean distance to the parcel centroid). Default: True
tolerance : int, optional
Distance (in mm) that a sample must be from a parcel for it to be
matched to that parcel. This is only considered if the sample is not
directly within a parcel. Default: 2
metric : str or func, optional
Mechanism by which to collapse across donors, if input `files` provides
multiple donor datasets. If a str, should be in ['mean', 'median']; if
a function, should be able to accept an `N`-dimensional input and the
`axis` keyword argument and return an `N-1`-dimensional output.
Default: 'mean'
ibf_threshold : [0, 1] float, optional
Threshold for intensity-based filtering specifying. This number should
specify the ratio of samples, across all supplied donors, for which a
probe must have signal above background noise in order to be retained.
Default: 0.5
corrected_mni : bool, optional
Whether to use the "corrected" MNI coordinates shipped with the
`alleninf` package instead of the coordinates provided with the AHBA
data when matching tissue samples to anatomical regions. Default: True
reannotated : bool, optional
Whether to use reannotated probe information provided by [1]_ instead
of the default probe information from the AHBA dataset. Using
reannotated information will discard probes that could not be reliably
matched to genes. Default: True
return_counts : bool, optional
Whether to return how many samples were assigned to each parcel in
`atlas` for each donor. Default: False
return_donors : bool, optional
Whether to return donor-level expression arrays instead of aggregating
expression across donors with provided `metric`. Default: False
donors : list, optional
List of donors to use as sources of expression data. Can be either
donor numbers or UID. If not specified will use all available donors.
Default: 'all'
data_dir : str, optional
Directory where expression data should be downloaded (if it does not
already exist) / loaded. If not specified will use the current
directory. Default: None
Returns
-------
expression : (R, G) :class:`pandas.DataFrame`
Microarray expression for `R` regions in `atlas` for `G` genes,
aggregated across donors, where the index corresponds to the unique
integer IDs of `atlas` and the columns are gene names.
counts : (R, D) :class:`pandas.DataFrame`
Number of samples assigned to each of `R` regions in `atlas` for each
of `D` donors (if multiple donors were specified); only returned if
`return_counts=True`.
References
----------
.. [1] <NAME>., <NAME>., & <NAME>. (2019). A
practical guide to linking brain-wide gene expression and neuroimaging
data. NeuroImage, 189, 353-367.
.. [2] <NAME>. et al. (2012) An anatomically comprehensive atlas of
the adult human transcriptome. Nature, 489, 391-399.
"""
# fetch files
files = datasets.fetch_microarray(data_dir=data_dir, donors=donors)
for key in ['microarray', 'probes', 'annotation', 'pacall', 'ontology']:
if key not in files:
raise KeyError('Provided `files` dictionary is missing {}. '
'Please check inputs.'.format(key))
# load atlas_info, if provided
atlas = check_niimg_3d(atlas)
if atlas_info is not None:
atlas_info = utils.check_atlas_info(atlas, atlas_info)
# get combination functions
metric = utils.check_metric(metric)
# get some info on the number of subjects, labels in `atlas_img`
num_subj = len(files.microarray)
all_labels = utils.get_unique_labels(atlas)
if not exact:
centroids = utils.get_centroids(atlas, labels=all_labels)
# reannotate probes based on updates from Arnatkeviciute et al., 2018 then
# perform intensity-based filter of probes and select probe with highest
# differential stability for each gene amongst remaining probes
if reannotated:
probes = process.reannotate_probes(files.probes[0])
else:
probes = io.read_probes(files.probes[0])
probes = process.filter_probes(files.pacall, probes,
threshold=ibf_threshold)
probes = process.get_stable_probes(files.microarray, files.annotation,
probes)
expression, missing = [], []
counts = pd.DataFrame(np.zeros((len(all_labels) + 1, num_subj)),
index=np.append([0], all_labels))
for subj in range(num_subj):
# get rid of samples whose coordinates don't match ontological profile
annotation = process.drop_mismatch_samples(files.annotation[subj],
files.ontology[subj],
corrected=corrected_mni)
# subset representative probes + samples from microarray data
microarray = io.read_microarray(files.microarray[subj])
samples = microarray.loc[probes.index, annotation.index].T
samples.columns = probes.gene_symbol
# assign samples to regions and aggregate samples w/i the same region
sample_labels = label_samples(annotation, atlas,
atlas_info=atlas_info,
tolerance=tolerance)
expression += [group_by_label(samples, sample_labels,
all_labels, metric=metric)]
# get counts of samples collapsed into each ROI
labs, num = np.unique(sample_labels, return_counts=True)
counts.loc[labs, subj] = num
# if we don't want to do exact matching then cache which parcels are
# missing data and the expression data for the closest sample to that
# parcel; we'll use this once we've iterated through all donors
if not exact:
coords = utils.xyz_to_ijk(annotation[['mni_x', 'mni_y', 'mni_z']],
atlas.affine)
empty = ~np.in1d(all_labels, labs)
closest, dist = utils.closest_centroid(coords, centroids[empty],
return_dist=True)
closest = samples.loc[annotation.iloc[closest].index]
empty = all_labels[empty]
closest.index = pd.Series(empty, name='label')
missing += [(closest, dict(zip(empty, np.diag(dist))))]
# check for missing ROIs and fill in, as needed
if not exact:
# find labels that are missing across all donors
empty = reduce(set.intersection, [set(f.index) for f, d in missing])
for roi in empty:
# find donor with sample closest to centroid of empty parcel
ind = np.argmin([d.get(roi) for f, d in missing])
# assign expression data from that sample and add to count
expression[ind].loc[roi] = missing[ind][0].loc[roi]
counts.loc[roi, ind] += 1
# normalize data with SRS and aggregate across donors
expression = [process.normalize_expression(e) for e in expression]
if not return_donors:
expression = process.aggregate_donors(expression, metric)
if return_counts:
return expression, counts.iloc[1:]
return expression
| 2.53125
| 3
|
python_design_patterns/singleton.py
|
johanvergeer/python-design-patterns
| 0
|
12778134
|
"""
Singleton pattern ensures that the class can have only one existing instance per Java
classloader instance and provides global access to it.
One of the risks of this pattern is that bugs resulting from setting a singleton up in a
distributed environment can be tricky to debug, since it will work fine if you debug with a
single classloader. Additionally, these problems can crop up a while after the implementation of
a singleton, since they may start out synchronous and only become async with time, so you it may
not be clear why you are seeing certain changes in behaviour.
There are many ways to implement the Singleton. The first one is the eagerly initialized
instance in :class:`IvoryTower`. Eager initialization implies that the implementation is thread
safe. If you can afford giving up control of the instantiation moment, then this implementation
will suit you fine.
"""
from typing import Optional, cast
class IvoryTower:
"""Singleton class example"""
_instance: Optional["IvoryTower"] = None
def __new__(cls, *args, **kwargs):
if cls._instance:
return cls._instance
cls._instance = cast(
"IvoryTower", super(IvoryTower, cls).__new__(cls, *args, **kwargs)
)
return cls._instance
def __str__(self):
return f"The id of this Ivory Tower is {id(self)}"
if __name__ == "__main__":
tower_1 = IvoryTower()
tower_2 = IvoryTower()
print(tower_1)
print(tower_2)
| 3.921875
| 4
|
src/python/Tools/vcfcallerinfo.py
|
Steven-N-Hart/hap.py
| 0
|
12778135
|
# coding=utf-8
#
# Copyright (c) 2010-2015 Illumina, Inc.
# All rights reserved.
#
# This file is distributed under the simplified BSD license.
# The full text can be found here (and in LICENSE.txt in the root folder of
# this distribution):
#
# https://github.com/sequencing/licenses/blob/master/Simplified-BSD-License.txt
import tempfile
import itertools
import subprocess
import logging
import os
import json
class CallerInfo(object):
""" Class for collecting caller info and version
"""
def __init__(self):
# callers and aligners are stored in tuples of three:
# (caller/aligner, version, parameters)
self.callers = []
self.aligners = []
def __repr__(self):
return "aligners=[" + ",".join(["/".join(xx) for xx in self.aligners]) + "] " + \
"callers=[" + ",".join(["/".join(xx) for xx in self.callers]) + "]"
def asDict(self):
kvd = ["name", "version", "parameters"]
return {"aligners": [dict(y for y in zip(kvd, x)) for x in self.aligners],
"callers": [dict(y for y in zip(kvd, x)) for x in self.callers]}
def addVCF(self, vcfname):
""" Add caller versions from a VCF
:param vcfname: VCF file name
"""
tf = tempfile.NamedTemporaryFile(delete=False)
tf.close()
vfh = {}
try:
sp = subprocess.Popen("vcfhdr2json '%s' '%s'" % (vcfname, tf.name),
shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
o, e = sp.communicate()
if sp.returncode != 0:
raise Exception("vcfhdr2json call failed: %s / %s" % (o, e))
vfh = json.load(open(tf.name))
finally:
try:
os.unlink(tf.name)
except:
pass
cp = ['unknown', 'unknown', '']
gatk_callers = ["haplotypecaller", "unifiedgenotyper", "mutect"]
sent_callers = ["haplotyper"]
source_found = False
for hf in vfh["fields"]:
try:
k = hf["key"]
if k == "source":
try:
cp[0] = str(hf["values"])
except:
cp[0] = hf["value"]
if cp[0].startswith("Platypus_Version_"):
cp[1] = cp[0][len("Platypus_Version_"):]
cp[0] = "Platypus"
source_found = True
elif k == "source_version":
try:
cp[1] = str(hf["values"])
except:
cp[1] = hf["value"]
source_found = True
elif k == "cmdline":
try:
cp[2] = str(hf["values"])
except:
cp[2] = hf["value"]
source_found = True
elif k == "platypusOptions":
try:
cp[2] = str(hf["values"])
except:
cp[2] = hf["value"]
source_found = True
elif k == "octopus":
# octopus doesn't add a version
self.callers.append(["octopus", "unknown", str(hf["values"])])
elif k.startswith("GATKCommandLine"):
caller = "GATK"
try:
caller += "-" + hf["values"]["ID"]
except:
pass
version = "unknown"
try:
version = hf["values"]["Version"]
except:
pass
options = ""
try:
options = hf["values"]["CommandLineOptions"]
except:
pass
if any(g in caller.lower() for g in gatk_callers):
self.callers.append([caller, version, options])
elif k.startswith("SentieonCommandLine"):
caller = "Sentieon"
try:
caller += "-" + hf["values"]["ID"]
except:
pass
version = "unknown"
try:
version = hf["values"]["Version"]
except:
pass
options = ""
if any(s in caller.lower() for s in sent_callers):
self.callers.append([caller, version])
except:
pass
if source_found:
self.callers.append(cp)
def addBAM(self, bamfile):
""" Extract aligner information from a BAM file
:param bamfile: name of BAM file
"""
sp = subprocess.Popen("samtools view -H '%s'" % bamfile,
shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
o, e = sp.communicate()
if sp.returncode != 0:
raise Exception("Samtools call failed: %s / %s" % (o, e))
for line in o.split("\n"):
if not line.startswith("@PG"):
continue
try:
# noinspection PyTypeChecker
x = dict(y.split(":", 1) for y in line.split("\t")[1:])
except:
logging.warn("Unable to parse SAM/BAM header line: %s" % line)
continue
cp = ['unknown', 'unknown', '']
try:
cp[0] = x['PN']
except:
try:
cp[0] = x['ID']
if "-" in cp[0]:
cp[0] = cp[0].split("-")[0]
except:
pass
try:
cp[1] = x['VN']
except:
pass
try:
cp[2] = x['CL']
except:
pass
self.aligners.append(cp)
| 2.1875
| 2
|
examples/reconstruct.py
|
stefanv/lulu
| 3
|
12778136
|
<gh_stars>1-10
import sys
sys.path.insert(0, '..')
from demo import load_image
import numpy as np
import matplotlib.pyplot as plt
import os
import time
import lulu
import lulu.connected_region_handler as crh
img = load_image()
print("Decomposing a %s matrix." % str(img.shape))
tic = time.time()
regions = lulu.decompose(img.copy())
toc = time.time()
print("Execution time: %.2fs" % (toc - tic))
print("-"*78)
print("Reconstructing image...", end=None)
out, areas, area_count = lulu.reconstruct(regions, img.shape)
print("done.")
print("Reconstructed from %d pulses." % sum(area_count))
print("-"*78)
plt.subplot(2, 2, 1)
plt.imshow(img, interpolation='nearest', cmap=plt.cm.gray)
plt.title('Original')
plt.subplot(2, 2, 2)
plt.imshow(out, interpolation='nearest', cmap=plt.cm.gray)
plt.title('Reconstruction (%d pulses)' % sum(area_count))
plt.subplot(2, 2, 4)
s = np.cumsum(area_count)
midpt = (s[-1] + s[0])/2.
ind = np.argmin(np.abs(s - midpt))
plt.plot([areas[ind]], [area_count[ind]], 'r.', markersize=10)
areas = areas[:ind*3]
area_count = area_count[:ind*3]
#plt.fill_between(areas[ind:], area_count[ind:], alpha=0.3)
plt.plot(areas, area_count)
plt.xlabel('Pulse Area')
plt.ylabel('Number of Pulses')
plt.title('Histogram of Pulse Areas (up to area %d)' % (ind*3))
print("-"*78)
print("Thresholded reconstruction...", end=None)
out, areas, area_count = \
lulu.reconstruct(regions, img.shape, min_area=areas[ind])
print("done.")
print("Reconstructed from %d pulses." % sum(area_count))
for area in regions:
if area < areas[ind]:
regions[area] = []
plt.subplot(2, 2, 3)
plt.imshow(out, interpolation='nearest', cmap=plt.cm.gray)
plt.title('Reconstruction with areas >= %d (%d pulses)' % \
(areas[ind], sum(area_count)))
plt.suptitle('2D LULU Reconstruction')
plt.show()
| 2.109375
| 2
|
game_data.py
|
Ammarpad/OutreachyProject
| 5
|
12778137
|
#!/usr/local/bin/python3
import common
import pywikibot
import wikitextparser as parser
from pywikibot import pagegenerators
GAME_MODE_PROP_ID = 'P404'
TEMPLATE = 'Infobox video game'
def main():
site = pywikibot.Site('en', 'wikipedia')
repo = site.data_repository()
temp = pywikibot.Page(site, TEMPLATE, ns=10)
summary = '([[Wikidata:Requests for permissions/Bot/AmmarBot $|Add maximum capacity]])'
all_pages = temp.getReferences(
follow_redirects = False,
only_template_inclusion=False,
namespaces = [0],
total = 100
)
processPages(all_pages, temp)
def processPages(pages, temp):
def getRedirects(p):
backlinks = p.backlinks(filter_redirects=True)
redirects = list()
for link in backlinks:
redirects.append(link.title(with_ns=False).lower())
return redirects
redirects = getRedirects(temp)
for page in pages:
extractMode(page, redirects)
def extractMode(page, redirects):
templates = page.raw_extracted_templates
for (template, values) in templates:
if template.title() == TEMPLATE or template.title() in redirects:
print(values.get('game_mode'))
if __name__ == '__main__':
main()
| 2.828125
| 3
|
pyston/pyston_lite/setup.py
|
sthagen/pyston-pyston
| 0
|
12778138
|
<filename>pyston/pyston_lite/setup.py
from distutils.core import setup, Extension, Distribution
from distutils.command.build_ext import build_ext
from distutils import sysconfig
import os
import subprocess
import sys
class pyston_build_ext(build_ext):
def run(self):
subprocess.check_call(["../../pyston/tools/dynasm_preprocess.py", "aot_ceval_jit.c", "aot_ceval_jit.prep.c"])
subprocess.check_call(["luajit", "../../pyston/LuaJIT/dynasm/dynasm.lua", "-o", "aot_ceval_jit.gen.c", "aot_ceval_jit.prep.c"])
return super(pyston_build_ext, self).run()
ext = Extension(
"pyston_lite",
sources=["aot_ceval.c", "aot_ceval_jit.gen.c", "aot_ceval_jit_helper.c", "lib.c"],
include_dirs=["../../pyston/LuaJIT", os.path.join(sysconfig.get_python_inc(), "internal")],
define_macros=[("PYSTON_LITE", None), ("PYSTON_SPEEDUPS", "1"), ("Py_BUILD_CORE", None), ("ENABLE_AOT", None)],
extra_compile_args=["-std=gnu99"],
)
setup(name="pyston_lite",
cmdclass={"build_ext":pyston_build_ext},
version="2.3.3.1",
description="A JIT for Python",
author="<NAME>",
url="https://www.github.com/pyston/pyston",
ext_modules=[ext],
)
| 1.984375
| 2
|
src/fridayUI/kitchen_gui_func.py
|
ThiefOfTime/KitchenOrganisator
| 0
|
12778139
|
<reponame>ThiefOfTime/KitchenOrganisator<gh_stars>0
# -*- coding: utf-8 -*-
"""
Created on Mai 11, 2018
@author: ThiefOfTime
"""
import re
import cv2
import queue
import threading
import multiprocessing
# import kitchen gui
import fridayUI.kitchen_gui as kitchen
# import Hive modules
from connections.HiveIO import RecipeReader
from fridayUI.weekly_add_input_dialog_func import MealInputDialog
# import PySide modules
try:
from PySide.QtGui import QHeaderView, QListWidgetItem, QTableWidgetItem, QImage, QPixmap, QGraphicsPixmapItem, \
QGraphicsScene, QWidget, QHBoxLayout, QToolButton, QLCDNumber, QInputDialog, QLineEdit, QIcon
from PySide.QtCore import QTime, QTimer, QSize
pyside_import = True
except ModuleNotFoundError:
from PySide2.QtWidgets import QHeaderView, QListWidgetItem, QTableWidgetItem, QGraphicsPixmapItem, QGraphicsScene, \
QWidget, QHBoxLayout, QToolButton, QLCDNumber, QInputDialog, QLineEdit, QMainWindow
from PySide2.QtGui import QImage, QPixmap, QIcon, QColor
from PySide2.QtCore import QTime, QTimer, QSize
pyside_import = False
class Kitchen(QMainWindow, kitchen.Ui_Kitchen):
def __init__(self, start, hive_connection):
super(Kitchen, self).__init__()
self.setupUi(self)
# Hive database
self.database_connector = hive_connection
self.recipe_reader = RecipeReader(self.database_connector)
self.recipe_reader.load_recipes()
# Recipe List
self.recipes = self.recipe_reader.get_recipes()
item = QListWidgetItem('< None >')
self.recipe_list.addItem(item)
for recipe in self.recipes:
item = QListWidgetItem(recipe)
self.recipe_list.addItem(item)
self.recipe_list.itemClicked.connect(self.set_ingredients)
self.recipe_list.currentItemChanged.connect(self.clear_table)
self.regex = re.compile('([0-9,.]+)([a-zA-Z]*)')
self.start = start
self.cancel_bt.clicked.connect(self.show_start)
# name widget chooser
self.mid = MealInputDialog('Title', 'Message', 'Name', 'Links', ('Cancel', 'Accept'), lambda x, y: print(x))
# language tag
self.lang_tag = 'de'
# language dicts
self.stuff_hor_header_labels = {'en': ['Amount', 'Unit', 'Name'], 'de': ['Menge', 'Einheit', 'Name']}
self.weekly_hor_header_labels = {'en': 'Meal', 'de': 'Gericht'}
self.weekly_vert_header_labels = {'en': ['Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday', 'Monday',
'Tuesday'], 'de': ['Montag', 'Dienstag', 'Mittwoch', 'Donnerstag',
'Freitag', 'Samstag', 'Sonntag']}
self.set_language_label_text = {'en': 'set Language:', 'de': 'Sprache wechseln:'}
self.rad_buttons_text = {'en': ['Calories Summation', 'Show stock (empty things)',
'Show stock (existing things)', 'Bought goods'], 'de': ['Kalorienrechner',
'Leere Nahrungsmittel', 'Vorhandene Nahrungsmittel', 'Einkaufsinventur']}
self.tab_text = {'en': ['Recipes', 'Weekly Rota', 'Playlist'], 'de': ['Rezepte', 'Wochenplan', 'Playlist']}
self.buttons_text = {'en': [('Calculate calories', 'Push to Database'), 'Cancel'],
'de': [('Kalorien berechnen', 'Übernehmen'), 'Zurück']}
self.button_bought_calc_val = True
# Tables
self.stuff_table.setColumnCount(3)
if not pyside_import:
self.stuff_table.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)
else:
self.stuff_table.horizontalHeader().setResizeMode(QHeaderView.Stretch)
self.weekly_tw.setColumnCount(1)
self.weekly_tw.setRowCount(7)
self.toggle_language()
if not pyside_import:
self.weekly_tw.verticalHeader().setSectionResizeMode(QHeaderView.Stretch)
self.weekly_tw.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)
else:
self.weekly_tw.verticalHeader().setResizeMode(QHeaderView.Stretch)
self.weekly_tw.horizontalHeader().setResizeMode(QHeaderView.Stretch)
# Time
self.time_lcd.setDigitCount(8)
timer = QTimer(self)
timer.timeout.connect(self.show_time)
timer.start(1000)
self.show_time()
# Timer
self.timer_lcd.setDigitCount(8)
self.timer_dial.setMaximum(3600)
if pyside_import:
self.timer_dial.dialMoved.connect(self.set_timer)
else:
self.timer_dial.valueChanged.connect(self.set_timer)
self.timer_timer = QTimer(self)
self.timer_timer.timeout.connect(self.count_downwards)
self.timer_bt.clicked.connect(self.timer_start)
self.timer_time = 0
self.timer_running = False
bl_pixmap = QPixmap('icons/aperture.png')
red_pixmap = QPixmap('icons/aperture_red.png')
self.icon_bl = QIcon()
self.icon_bl.addPixmap(bl_pixmap)
self.icon_red = QIcon()
self.icon_red.addPixmap(red_pixmap)
# Music buttons
# prev
prev_icon = QIcon()
prev_pixmap = QPixmap('icons/prev.png')
prev_icon.addPixmap(prev_pixmap)
self.prev_bt.setIcon(prev_icon)
# next
next_icon = QIcon()
next_pixmap = QPixmap('icons/next.png')
next_icon.addPixmap(next_pixmap)
self.next_bt.setIcon(next_icon)
# play
self.play_icon = QIcon()
play_pixmap = QPixmap('icons/play.png')
self.play_icon.addPixmap(play_pixmap)
self.pause_icon = QIcon()
pause_pixmap = QPixmap('icons/pause.png')
self.pause_icon.addPixmap(pause_pixmap)
self.play_pause_bt.setIcon(self.play_icon)
# stop
stop_icon = QIcon()
stop_pixmap = QPixmap('icons/stop.png')
stop_icon.addPixmap(stop_pixmap)
self.stop_bt.setIcon(stop_icon)
# search
search_icon = QIcon()
search_pixmap = QPixmap('icons/search2.png')
search_icon.addPixmap(search_pixmap)
self.go_bt.setIcon(search_icon)
# table
self.weekly_meals = {}
self.stuff_table.cellClicked.connect(self.item_clicked)
self.weekly_tw.cellClicked.connect(self.weekly_cell_clicked)
# button calc
self.calculate_bt.clicked.connect(self.calculate_cal)
self.lcd_palette = self.calorie_lcd.palette()
self.lcd_palette.setColor(self.lcd_palette.WindowText, QColor(102, 255, 102))
self.calorie_lcd.setPalette(self.lcd_palette)
# radio buttons
self.show_emp_rad.clicked.connect(lambda: self.radio_button_clicked(True, True))
self.show_rad.clicked.connect(lambda: self.radio_button_clicked(True, False))
self.cal_rad.clicked.connect(lambda: self.radio_button_clicked(True, None))
self.bought_rb.clicked.connect(lambda: self.radio_button_clicked(False, None))
# video
stand_by_image = cv2.imread('src/default.png')
self.update_frame(stand_by_image)
#self.queue = queue.Queue()
#self.capture_thread = threading.Thread(target=self.grab, args=(0, self.queue, stand_by_image))
#self.timer = QTimer(self)
#self.timer.timeout.connect(self.update_frame)
#self.timer.start(1)
#global running
#running = True
#self.capture_thread.start()
# language button
self.language_bt.clicked.connect(self.toggle_language)
# Barcode queue and thread
# TODO: use different way, not input. maybe get serial to work or make an input window, or start a new program
#self.bar_thread = multiprocessing.Process(target=Kitchen.read_barcodes,
# args=(self.update_invent_table_barcode, ))
#self.bar_thread.start()
def update_invent_table_barcode(self, bar):
'''
update the inventory table via barcode
:param bar: recieved barcode
:return:
'''
row_count = self.stuff_table.rowCount()
self.stuff_table.insertRow(row_count - 1)
pass
def radio_button_clicked(self, up_cal, empty_full):
'''
change button behavior if radio buttons are clicked
:param up_cal: if calorien should be
:param empty_full:
:return:
'''
self.calorie_lcd.display('0')
if up_cal:
if empty_full is None:
self.show_calorie_calc()
else:
self.show_empty_full_items(empty=empty_full)
if self.button_bought_calc_val:
self.calculate_bt.setText(self.buttons_text[self.lang_tag][0][0])
self.toggle_signal_event(self.calculate_bt.clicked, self.calculate_cal, self.update_database)
self.button_bought_calc_val = False
else:
self.show_calorie_calc(row_count=2)
if not self.button_bought_calc_val:
self.calculate_bt.setText(self.buttons_text[self.lang_tag][0][1])
self.toggle_signal_event(self.calculate_bt.clicked, self.update_database, self.calculate_cal)
self.button_bought_calc_val = True
def update_database(self):
'''
update the database
:return:
'''
for i in range(self.stuff_table.rowCount()):
vol = ''
unit = ''
name = ''
if self.stuff_table.item(i, 0) is not None and self.stuff_table.item(i, 1) is not None and \
self.stuff_table.item(i, 2) is not None:
vol = self.stuff_table.item(i, 0).text()
unit = self.stuff_table.item(i, 1).text()
name = self.stuff_table.item(i, 2).text()
else:
return
self.recipe_reader.get_hive_connection().set_volume_of_item(name, '%s%s' % (vol, unit))
def toggle_language(self):
'''
set the language of the gui
:return:
'''
self.lang_tag = 'de' if self.lang_tag == 'en' else 'en'
self.language_bt.setText(self.lang_tag)
self.label_2.setText(self.set_language_label_text[self.lang_tag])
# weekly table
self.weekly_tw.setHorizontalHeaderLabels([self.weekly_hor_header_labels[self.lang_tag]])
self.weekly_tw.setVerticalHeaderLabels(self.weekly_vert_header_labels[self.lang_tag])
# stuff table
self.stuff_table.setHorizontalHeaderLabels(self.stuff_hor_header_labels[self.lang_tag])
# buttons
button_text_i = 0 if self.button_bought_calc_val else 1
self.cancel_bt.setText(self.buttons_text[self.lang_tag][1])
self.calculate_bt.setText(self.buttons_text[self.lang_tag][0][button_text_i])
self.button_bought_calc_val = False
# radio buttons
self.cal_rad.setText(self.rad_buttons_text[self.lang_tag][0])
self.show_emp_rad.setText(self.rad_buttons_text[self.lang_tag][1])
self.show_rad.setText(self.rad_buttons_text[self.lang_tag][2])
self.bought_rb.setText(self.rad_buttons_text[self.lang_tag][3])
# tabs
self.gloabal_tab.setTabText(0, self.tab_text[self.lang_tag][0])
self.gloabal_tab.setTabText(1, self.tab_text[self.lang_tag][1])
self.gloabal_tab.setTabText(2, self.tab_text[self.lang_tag][2])
def weekly_cell_clicked(self, row, col):
'''
weekly play cell clicked
:param row:
:param col:
:return:
'''
item = self.weekly_tw.item(row, col)
if item is None or item.text() == '':
day = self.weekly_vert_header_labels[self.lang_tag][row]
title = {'en': 'Enter new Meal', 'de': 'Neue Mahlzeit hinzufügen'}
message = {'en': 'Please enter the meal and link for %s' % day,
'de': 'Bitte geben sie ein Gericht und entsprechenden Link für den %s ein' % day}
buttons = {'en': ('Cancel', 'Accept'), 'de': ('Abbrechen', 'Annehmen')}
# TODO: handle links
self.mid.change_settings(title[self.lang_tag], message[self.lang_tag], "Name",
"Links", (buttons[self.lang_tag][0], buttons[self.lang_tag][1]),
lambda x, y: self.weekly_tw.setItem(row, col, QTableWidgetItem(x)))
self.mid.show()
def closeEvent(self, event):
'''
override closing event
:param event:
:return:
'''
global running
running = False
self.bar_thread.terminate()
self.bar_thread.join()
def update_frame(self, img):
'''
update the image
:return:
'''
#if not self.queue.empty():
# frame = self.queue.get()
#img = frame['img']
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
size = self.video_view.size()
img = cv2.resize(img, (size.width()-10, size.height()-10))
height, width, bpc = img.shape
bpl = bpc * width
image = QImage(img.data, width, height, bpl, QImage.Format_RGB888)
pitem = QGraphicsPixmapItem(QPixmap.fromImage(image))
scene = QGraphicsScene()
scene.addItem(pitem)
self.video_view.setScene(scene)
def calculate_cal(self):
'''
calculate calories out of table
:return:
'''
all_there = True
names = []
for i in range(self.stuff_table.rowCount() - 1):
if self.stuff_table.item(i, 2) is None:
# if item is empty continue
continue
names.append(self.stuff_table.item(i, 2).text())
if not self.recipe_reader.get_hive_connection().check_if_in_stock(self.stuff_table.item(i, 2).text()):
# if the item is not in the stock mark it as red
self.stuff_table.item(i, 0).setBackground(QColor(255, 128, 128))
self.stuff_table.item(i, 1).setBackground(QColor(255, 128, 128))
self.stuff_table.item(i, 2).setBackground(QColor(255, 128, 128))
all_there = False
elif self.stuff_table.item(i, 0).background() == QColor(255, 128, 128):
all_there = False
if all_there:
# if all cals are there calculate everything and post it
cals = 0
for name in names:
if not name == '':
cals += int(self.recipe_reader.get_hive_connection().get_cal_for_name(name))
self.calorie_lcd.display(cals)
self.calorie_lcd.setSegmentStyle(QLCDNumber.Flat)
def show_empty_full_items(self, empty):
'''
show empty items
:return:
'''
self.clear_table(None, None)
self.stuff_table.setColumnCount(3)
self.stuff_table.setHorizontalHeaderLabels(['Volume', 'State', 'Name'])
items = self.recipe_reader.get_hive_connection().get_empty_stock_items(empty)
self.stuff_table.setRowCount(len(items))
for i, item in enumerate(items):
self.stuff_table.setItem(i, 0, QTableWidgetItem(str(item.vol)))
self.stuff_table.setItem(i, 1, QTableWidgetItem(item.state))
self.stuff_table.setItem(i, 2, QTableWidgetItem(item.name))
if empty:
self.stuff_table.item(i, 0).setBackground(QColor(255, 128, 128))
def show_calorie_calc(self, row_count=0):
'''
show calorie calculator
:return:
'''
if row_count > 0:
row_count += 1
self.clear_table(None, None)
self.stuff_table.clearContents()
self.stuff_table.setColumnCount(3)
self.stuff_table.setRowCount(row_count)
self.stuff_table.setHorizontalHeaderLabels(['Amount', 'Unit', 'Name'])
if row_count > 0:
stuff_widget = QWidget()
stuff_pixmap = QPixmap('icons/add.png')
stuff_icon = QIcon()
stuff_add_bt = QToolButton()
stuff_icon.addPixmap(stuff_pixmap)
stuff_add_bt.setIcon(stuff_icon)
stuff_add_bt.setIconSize(QSize(8, 8))
stuff_add_bt.clicked.connect(lambda: self.stuff_table.insertRow(row_count-1))
stuff_layout = QHBoxLayout()
stuff_layout.addWidget(stuff_add_bt)
stuff_widget.setLayout(stuff_layout)
self.stuff_table.setCellWidget(row_count-1, 2, stuff_widget)
def item_clicked(self, row, col):
'''
action to perform when one item is clicked
:param row: item row
:param col: item col
:return:
'''
item = self.stuff_table.item(row, col)
if item is None:
# if Item is none ignore
return
bg = item.background()
if bg == QColor(255, 128, 128):
# if item is colorful ask the user to add cals
name = self.stuff_table.item(row, 2).text()
text, ok = QInputDialog.getText(self, 'Calorie Input Dialog',
'Enter the calories per 100g for %s:' % name)
reg = re.compile('([0-9,.]+)')
cif = reg.match(text)
if cif is None or len(cif.group(1)) == 0:
return
# if cal is added reprint the item
self.recipe_reader.get_hive_connection().set_calorie_for_name(name, int(cif.group(1)))
self.stuff_table.item(row, 0).setBackground(QColor(0, 0, 0))
self.stuff_table.item(row, 1).setBackground(QColor(0, 0, 0))
self.stuff_table.item(row, 2).setBackground(QColor(0, 0, 0))
def clear_table(self, old, new):
'''
clear the table
:param old: not used
:param new: not used
:return:
'''
for i in range(self.stuff_table.rowCount()):
self.stuff_table.removeRow(i)
def set_ingredients(self, item):
'''
action for recipe choosed
:param item: choosen item
:return:
'''
if item.text() == '< None >':
# if None is choosen set up empty table
self.show_calorie_calc(20)
else:
ing = self.recipe_reader.get_ingredients(item.text())
ing_dic = {}
self.stuff_table.setRowCount(len(ing))
# set Row count to current list length
for i, (vol, name, cal) in enumerate(ing):
vol_gr = self.regex.match(str(vol))
# regex the amount
volume = self.recipe_reader.get_hive_connection().get_volume_of_item(name)
volume = self.regex.match(volume)
if vol_gr is not None:
vol = float(vol_gr.group(1))
unit = vol_gr.group(2)
else:
vol = 1.0
unit = 'Priese'
if name in ing_dic.keys():
i, vol_o, unit_o = ing_dic[name]
vol += vol_o
else:
self.stuff_table.setItem(i, 2, QTableWidgetItem(name))
# fill table
ing_dic[name] = (i, vol, unit)
self.stuff_table.setItem(i, 0, QTableWidgetItem(str(vol)))
self.stuff_table.setItem(i, 1, QTableWidgetItem(unit))
volume = volume.group(1)
if int(volume) == 0:
self.stuff_table.item(i, 0).setBackground(QColor(247, 188, 7))
if cal is None:
self.stuff_table.item(i, 2).setBackground(QColor(255, 128, 128))
def set_timer(self, value):
'''
set the timer
:param value: value to set to
:return:
'''
self.timer_time = int(value)
self.timer_lcd.display(Kitchen.format_time(value))
def timer_start(self):
'''
start/stop the timer
:return:
'''
if self.timer_running:
self.timer_running = not self.timer_running
self.timer_bt.setIcon(self.icon_bl)
self.set_timer(0)
self.timer_dial.setValue(0)
self.timer_time = 0
self.timer_dial.setDisabled(False)
self.timer_timer.stop()
else:
if not self.timer_time == 0:
self.timer_dial.setDisabled(True)
self.timer_timer.start(1000)
self.timer_running = not self.timer_running
self.timer_bt.setIcon(self.icon_red)
def count_downwards(self):
'''
timer count down
:return:
'''
value = self.timer_time - 1
self.timer_time = value
if value == 0:
self.timer_dial.setDisabled(False)
self.timer_timer.stop()
self.timer_lcd.display(Kitchen.format_time(value))
def show_time(self):
'''
clock
:return:
'''
time = QTime.currentTime()
text = time.toString('hh:mm:ss')
self.time_lcd.display(text)
def show_start(self):
'''
show start window
:return:
'''
self.hide()
self.start.show()
@staticmethod
def format_time(number):
'''
format a number to time
:param number: number to format
:return:
'''
sek = number % 60
minutes = int(number / 60) % 60
hours = int(number / 3600)
if sek < 10:
sek = '0%i' % sek
else:
sek = str(sek)
if minutes < 10:
minutes = '0%i' % minutes
else:
minutes = str(minutes)
if hours < 10:
hours = '0%i' % hours
else:
hours = str(hours)
return '%s:%s:%s' % (hours, minutes, sek)
# currently ignore this method
@staticmethod
def grab(cam, grab_queue, standby_img):
'''
grabbing the next image and check if the camera is still connected
:param cam: camera number
:param grab_queue: queue
:param standby_img: standby_img that has to be shown if no camera is found
:return:
'''
global running
capture = cv2.VideoCapture(cam)
while running:
frame = {}
if capture.isOpened():
retval, img = capture.read()
if not retval:
frame['img'] = standby_img
#capture.release()
#capture = cv2.VideoCapture(cam)
else:
frame['img'] = img
else:
frame['img'] = standby_img
capture.open()
if grab_queue.qsize() < 10:
grab_queue.put(frame)
@staticmethod
def toggle_signal_event(signal, new_event, old_event):
'''
change the event of an signal slot
:param signal: signal of an item
:param new_event: new event method
:param old_event: old event method
:return:
'''
if new_event is None:
return
signal.disconnect()
signal.connect(new_event)
@staticmethod
def read_barcodes(update):
'''
barcode reader
:param bar_queue: queue for data
:return:
'''
while True:
bar = input()
if len(bar) > 0:
if bar == '000000000':
break
update(bar)
return
| 1.726563
| 2
|
nes/video/video_out.py
|
Hexadorsimal/pynes
| 1
|
12778140
|
<reponame>Hexadorsimal/pynes
class VideoOut:
def power_up(self):
pass
def power_down(self):
pass
def pixel(self, x, y, color):
raise NotImplementedError
def hsync(self):
raise NotImplementedError
def vsync(self):
raise NotImplementedError
| 2.421875
| 2
|
upload_file.py
|
smartdan/dsrace
| 2
|
12778141
|
<reponame>smartdan/dsrace<filename>upload_file.py
#!/usr/bin/env python
'''
Copyright 2017 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import ftplib
import sys
import os
import base64
server = 'ftp.xxxxxxxx.it'
username = 'xxxxxxxxx.it'
password = '<PASSWORD>'
current_path = os.path.dirname(__file__) + '/'
ftp_connection = ftplib.FTP(server, username, password)
remote_path = "/htdocs/dsrace"
ftp_connection.cwd(remote_path)
try:
name = sys.argv[1]
fh1 = open(current_path + name + ".txt", 'rb')
ftp_connection.storbinary('STOR ' + name + '.txt', fh1)
fh1.close()
finally:
print "Sent File"
| 2.265625
| 2
|
robosuite/scripts/Final_Copy/train.py
|
spatric5/robosuite
| 0
|
12778142
|
<gh_stars>0
import numpy as np
import gym
import os, sys
import robosuite
from arguments import get_args
from mpi4py import MPI
from subprocess import CalledProcessError
from ddpg_agent import ddpg_agent
import robosuite as suite
from robosuite.wrappers import GymWrapper
from robosuite import load_controller_config
"""
train the agent, the MPI part code is copy from openai baselines(https://github.com/openai/baselines/blob/master/baselines/her)
"""
def get_env_params(env,args):
obs = env.reset()
if args.env_name == 'robosuite':
joint_c = obs['robot0_joint_pos_cos']
joint_s = obs['robot0_joint_pos_sin']
joint_angs = np.arctan2(joint_s,joint_c)
state_space_size = joint_angs.shape[0]+obs['robot0_joint_vel'].shape[0]+obs['robot0_eef_pos'].shape[0]+obs['robot0_eef_quat'].shape[0]
action_space_size = 7
goal_space_size = 3
if args.robosuite_string == 'TwoArmLift':
joint_c = obs['robot1_joint_pos_cos']
joint_s = obs['robot1_joint_pos_sin']
joint_angs = np.arctan2(joint_s,joint_c)
state_space_size = state_space_size+joint_angs.shape[0]+obs['robot1_joint_vel'].shape[0]+obs['robot1_eef_pos'].shape[0]+obs['robot1_eef_quat'].shape[0]
action_space_size = action_space_size+7
goal_space_size = goal_space_size+3
params = {'obs': state_space_size,
'goal': goal_space_size,
'action': action_space_size,
'action_max': 1.0,
}
params['max_timesteps'] = args.MAX_STEPS
else:
# close the environment
params = {'obs': obs['observation'].shape[0],
'goal': obs['desired_goal'].shape[0],
'action': env.action_space.shape[0],
'action_max': env.action_space.high[0],
}
params['max_timesteps'] = env._max_episode_steps
return params
def launch(args):
# create the ddpg_agent
if args.env_name == 'robosuite':
config = load_controller_config(default_controller=args.CTRL_STRING)
if args.robosuite_string == 'Lift':
env =suite.make(
args.robosuite_string,
robots="Sawyer", # use Sawyer robot
use_camera_obs=False, # do not use pixel observations
has_offscreen_renderer=False, # not needed since not using pixel obs
has_renderer=args.RENDER_ENV, # make sure we can render to the screen
reward_shaping=True, # use dense rewards
control_freq=20, # control should happen fast enough so that simulation looks smooth
controller_configs = config, # Controller config
)
else:
env =suite.make(
args.robosuite_string,
robots=["Sawyer","Sawyer"], # use Sawyer robot
use_camera_obs=False, # do not use pixel observations
has_offscreen_renderer=False, # not needed since not using pixel obs
has_renderer=args.RENDER_ENV, # make sure we can render to the screen
reward_shaping=True, # use dense rewards
control_freq=20, # control should happen fast enough so that simulation looks smooth
controller_configs = config, # Controller config
)
else:
env = gym.make(args.env_name)
# get the environment parameters
env_params = get_env_params(env,args)
# create the ddpg agent to interact with the environment
ddpg_trainer = ddpg_agent(args, env, env_params)
ddpg_trainer.learn()
if __name__ == '__main__':
# take the configuration for the HER
os.environ['OMP_NUM_THREADS'] = '1'
os.environ['MKL_NUM_THREADS'] = '1'
os.environ['IN_MPI'] = '1'
# get the params
args = get_args()
launch(args)
| 2.25
| 2
|
src/project/models/user.py
|
farzadghanei/flask-skel
| 0
|
12778143
|
'''
project.models.user
-------------------
Defines a user model class
'''
from datetime import datetime
from werkzeug.security import generate_password_hash, check_password_hash
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from itsdangerous import SignatureExpired, BadSignature
from flask.ext.login import UserMixin
from ..extensions import db
from . import ModelMixin
class User(UserMixin, ModelMixin, db.Model):
__tablename__ = 'user'
email = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
confirmed = db.Column(db.Boolean, default=False)
name = db.Column(db.String(64))
surname = db.Column(db.String(64))
member_since = db.Column(db.DateTime(), default=datetime.utcnow)
last_seen = db.Column(db.DateTime(), default=datetime.utcnow)
def __init__(self, **kwargs):
super(User, self).__init__(**kwargs)
def __repr__(self):
return '<User {} {} <{}>>'.format(self.name, self.surname, self.email)
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def generate_confirmation_token(self, secret_key, expiration=3600):
return Serializer(secret_key, expiration).dumps({'confirm': self.id})
def confirm(self, secret_key, token):
serializer = Serializer(secret_key)
try:
data = serializer.loads(token)
except (ValueError, SignatureExpired, BadSignature):
return False
if data.get('confirm') != self.id:
return False
self.confirmed = True
return True
def generate_reset_token(self, secret_key, expiration=3600):
return Serializer(secret_key, expiration).dumps({'reset': self.id})
def reset_password(self, secret_key, token, <PASSWORD>_password):
serializer = Serializer(secret_key)
try:
data = serializer.loads(token)
except (ValueError, SignatureExpired, BadSignature):
return False
if data.get('reset') != self.id:
return False
self.password = <PASSWORD>
return True
def generate_auth_token(self, secret_key, expires_in):
return Serializer(secret_key, expires_in=expires_in).dumps({'id': self.id}).decode('ascii')
@staticmethod
def verify_auth_token(secret_key, token):
serializer = Serializer(secret_key)
try:
data = serializer.loads(token)
except (ValueError, SignatureExpired, BadSignature):
return None
return User.query.get(data['id'])
__all__ = ('User',)
| 2.8125
| 3
|
test/server/test_mailbox.py
|
BoniLindsley/pymap
| 18
|
12778144
|
<reponame>BoniLindsley/pymap
from textwrap import dedent
import pytest
from .base import TestBase
pytestmark = pytest.mark.asyncio
class TestMailbox(TestBase):
async def test_list_sep(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'list1 LIST "" ""\r\n')
transport.push_write(
b'* LIST (\\Noselect) "/" ""\r\n'
b'list1 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_list(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'list1 LIST "" *\r\n')
transport.push_write(
b'* LIST (\\HasNoChildren) "/" INBOX\r\n'
b'* LIST (\\HasNoChildren) "/" Sent\r\n'
b'* LIST (\\HasNoChildren) "/" Trash\r\n'
b'list1 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_create(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'create1 CREATE "test mailbox"\r\n')
transport.push_write(
b'create1 OK [MAILBOXID (', (br'F[a-f0-9]+', ), b')]'
b' CREATE completed.\r\n')
transport.push_readline(
b'list1 LIST "" *\r\n')
transport.push_write(
b'* LIST (\\HasNoChildren) "/" INBOX\r\n'
b'* LIST (\\HasNoChildren) "/" Sent\r\n'
b'* LIST (\\HasNoChildren) "/" Trash\r\n'
b'* LIST (\\HasNoChildren) "/" "test mailbox"\r\n'
b'list1 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_create_inferior(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'create1 CREATE "Trash/test mailbox"\r\n')
transport.push_write(
b'create1 OK [MAILBOXID (', (br'F[a-f0-9]+', ), b')]'
b' CREATE completed.\r\n')
transport.push_readline(
b'list1 LIST "Trash" *\r\n')
transport.push_write(
b'* LIST (\\HasChildren) "/" Trash\r\n'
b'* LIST (\\HasNoChildren) "/" "Trash/test mailbox"\r\n'
b'list1 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_delete(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'delete1 DELETE Sent\r\n')
transport.push_write(
b'delete1 OK DELETE completed.\r\n')
transport.push_readline(
b'list2 LIST "" *\r\n')
transport.push_write(
b'* LIST (\\HasNoChildren) "/" INBOX\r\n'
b'* LIST (\\HasNoChildren) "/" Trash\r\n'
b'list2 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_delete_superior(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'create1 CREATE "Trash/test mailbox"\r\n')
transport.push_write(
b'create1 OK [MAILBOXID (', (br'F[a-f0-9]+', ), b')]'
b' CREATE completed.\r\n')
transport.push_readline(
b'delete1 DELETE Trash\r\n')
transport.push_write(
b'delete1 OK DELETE completed.\r\n')
transport.push_readline(
b'list1 LIST "Trash" *\r\n')
transport.push_write(
b'* LIST (\\Noselect \\HasChildren) "/" Trash\r\n'
b'* LIST (\\HasNoChildren) "/" "Trash/test mailbox"\r\n'
b'list1 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_delete_selected(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_select(b'Sent')
transport.push_readline(
b'delete1 DELETE Sent\r\n')
transport.push_write(
b'* BYE Selected mailbox no longer exists.\r\n'
b'delete1 OK DELETE completed.\r\n')
await self.run(transport)
async def test_lsub(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'lsub1 LSUB "" *\r\n')
transport.push_write(
b'* LSUB (\\HasNoChildren) "/" INBOX\r\n'
b'lsub1 OK LSUB completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_subscribe_unsubscribe(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'subscribe1 SUBSCRIBE "Sent"\r\n')
transport.push_write(
b'subscribe1 OK SUBSCRIBE completed.\r\n')
transport.push_readline(
b'subscribe2 SUBSCRIBE "Trash"\r\n')
transport.push_write(
b'subscribe2 OK SUBSCRIBE completed.\r\n')
transport.push_readline(
b'unsubscribe1 UNSUBSCRIBE "Trash"\r\n')
transport.push_write(
b'unsubscribe1 OK UNSUBSCRIBE completed.\r\n')
transport.push_readline(
b'lsub1 LSUB "" *\r\n')
transport.push_write(
b'* LSUB (\\HasNoChildren) "/" INBOX\r\n'
b'* LSUB (\\HasNoChildren) "/" Sent\r\n'
b'lsub1 OK LSUB completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_status(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'status1 STATUS INBOX '
b'(MESSAGES RECENT UIDNEXT UIDVALIDITY UNSEEN MAILBOXID)\r\n')
transport.push_write(
b'* STATUS INBOX (MESSAGES 4 RECENT 1 UIDNEXT 105 '
b'UIDVALIDITY ', (br'\d+', b'uidval1'), b' UNSEEN 2 '
b'MAILBOXID (', (br'F[a-f0-9]+', b'mbxid'), b'))\r\n'
b'status1 OK STATUS completed.\r\n')
transport.push_select(b'INBOX', 4, 1, 105, 3)
transport.push_readline(
b'status2 STATUS INBOX '
b'(MESSAGES RECENT UIDNEXT UIDVALIDITY UNSEEN)\r\n')
transport.push_write(
b'* STATUS INBOX (MESSAGES 4 RECENT 1 UIDNEXT 105 '
b'UIDVALIDITY ', (br'\d+', b'uidval2'), b' UNSEEN 2)\r\n'
b'status2 OK STATUS completed.\r\n')
transport.push_readline(
b'close1 CLOSE\r\n')
transport.push_write(
b'close1 OK CLOSE completed.\r\n')
transport.push_readline(
b'status3 STATUS INBOX '
b'(MESSAGES RECENT UIDNEXT UIDVALIDITY UNSEEN)\r\n')
transport.push_write(
b'* STATUS INBOX (MESSAGES 4 RECENT 0 UIDNEXT 105 '
b'UIDVALIDITY ', (br'\d+', b'uidval2'), b' UNSEEN 2)\r\n'
b'status3 OK STATUS completed.\r\n')
transport.push_logout()
await self.run(transport)
assert self.matches['uidval1'] == self.matches['uidval2']
assert self.matches['mbxid1'] == self.matches['mbxid']
async def test_append(self, imap_server):
transport = self.new_transport(imap_server)
message = b'test message\r\n'
transport.push_login()
transport.push_readline(
b'append1 APPEND INBOX (\\Seen) {%i}\r\n' % len(message))
transport.push_write(
b'+ Literal string\r\n')
transport.push_readexactly(message)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append1 OK [APPENDUID ', (br'\d+', ), b' 105]'
b' APPEND completed.\r\n')
transport.push_select(b'INBOX', 5, 2, 106, 3)
transport.push_logout()
await self.run(transport)
async def test_append_empty(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'append1 APPEND INBOX {0}\r\n')
transport.push_write(
b'+ Literal string\r\n')
transport.push_readexactly(
b'')
transport.push_readline(
b'\r\n')
transport.push_write(
b'append1 NO APPEND cancelled.\r\n')
transport.push_select(b'INBOX', 4, 1, 105, 3)
transport.push_logout()
await self.run(transport)
async def test_append_multi(self, imap_server):
transport = self.new_transport(imap_server)
message_1 = b'test message\r\n'
message_2 = b'other test message\r\n'
transport.push_login()
transport.push_readline(
b'append1 APPEND INBOX (\\Seen) {%i}\r\n' % len(message_1))
transport.push_write(
b'+ Literal string\r\n')
transport.push_readexactly(message_1)
transport.push_readline(
b' {%i}\r\n' % len(message_2))
transport.push_write(
b'+ Literal string\r\n')
transport.push_readexactly(message_2)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append1 OK [APPENDUID ', (br'\d+', ), b' 105:106]'
b' APPEND completed.\r\n')
transport.push_select(b'INBOX', 6, 3, 107, 3)
transport.push_logout()
await self.run(transport)
async def test_append_selected(self, imap_server):
transport = self.new_transport(imap_server)
message = b'test message\r\n'
transport.push_login()
transport.push_select(b'INBOX', 4, 1, 105, 3)
transport.push_readline(
b'append1 APPEND INBOX (\\Seen) {%i}\r\n' % len(message))
transport.push_write(
b'+ Literal string\r\n')
transport.push_readexactly(message)
transport.push_readline(
b'\r\n')
transport.push_write(
b'* 5 EXISTS\r\n'
b'* 2 RECENT\r\n'
b'* 5 FETCH (FLAGS (\\Recent \\Seen))\r\n'
b'append1 OK [APPENDUID ', (br'\d+', ), b' 105]'
b' APPEND completed.\r\n')
transport.push_readline(
b'status1 STATUS INBOX (RECENT)\r\n')
transport.push_write(
b'* STATUS INBOX (RECENT 2)\r\n'
b'status1 OK STATUS completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_append_email_id(self, imap_server):
transport = self.new_transport(imap_server)
message_1 = b'test message\r\n'
message_2 = b'other test message\r\n'
message_3 = message_1
transport.push_login()
transport.push_readline(
b'append1 APPEND INBOX {%i+}\r\n' % len(message_1))
transport.push_readexactly(message_1)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append1 OK [APPENDUID ', (br'\d+', ), b' 105]'
b' APPEND completed.\r\n')
transport.push_readline(
b'append2 APPEND INBOX {%i+}\r\n' % len(message_2))
transport.push_readexactly(message_2)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append2 OK [APPENDUID ', (br'\d+', ), b' 106]'
b' APPEND completed.\r\n')
transport.push_readline(
b'append3 APPEND INBOX {%i+}\r\n' % len(message_3))
transport.push_readexactly(message_3)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append3 OK [APPENDUID ', (br'\d+', ), b' 107]'
b' APPEND completed.\r\n')
transport.push_select(b'INBOX')
transport.push_readline(
b'fetch1 UID FETCH 105:107 (EMAILID)\r\n')
transport.push_write(
b'* 5 FETCH (EMAILID (', (br'M[a-f0-9]+', b'id1'), b') '
b'UID 105)\r\n'
b'* 6 FETCH (EMAILID (', (br'M[a-f0-9]+', b'id2'), b') '
b'UID 106)\r\n'
b'* 7 FETCH (EMAILID (', (br'M[a-f0-9]+', b'id3'), b') '
b'UID 107)\r\n'
b'fetch1 OK UID FETCH completed.\r\n')
transport.push_logout()
await self.run(transport)
assert self.matches['id1'] != self.matches['id2']
assert self.matches['id1'] == self.matches['id3']
async def test_append_thread_id(self, imap_server):
messages = [dedent("""\
Message-Id: <one>
Subject: thread one
""").encode('ascii'),
dedent("""\
Message-Id: <one>
Subject: Fwd: thread one
""").encode('ascii'),
dedent("""\
Message-Id: <one>
Subject: unrelated to thread one
""").encode('ascii'),
dedent("""\
Message-Id: <two>
Subject: thread two
""").encode('ascii'),
dedent("""\
Message-Id: <three>
In-Reply-To: <two>
Subject: Re: thread two
""").encode('ascii'),
dedent("""\
Message-Id: <four>
References: <two> <five>
Subject: [a list] thread two
""").encode('ascii'),
dedent("""\
Message-Id: <five>
Subject: thread two
""").encode('ascii')]
transport = self.new_transport(imap_server)
transport.push_login()
for i, message in enumerate(messages):
transport.push_readline(
b'append1 APPEND INBOX {%i+}\r\n' % len(message))
transport.push_readexactly(message)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append1 OK [APPENDUID ', (br'\d+ \d+', ), b']'
b' APPEND completed.\r\n')
transport.push_select(b'INBOX')
transport.push_readline(
b'fetch1 UID FETCH 105:* (THREADID)\r\n')
transport.push_write(
b'* 5 FETCH (THREADID (', (br'T[a-f0-9]+', b'id1'), b') '
b'UID 105)\r\n'
b'* 6 FETCH (THREADID (', (br'T[a-f0-9]+', b'id2'), b') '
b'UID 106)\r\n'
b'* 7 FETCH (THREADID (', (br'T[a-f0-9]+', b'id3'), b') '
b'UID 107)\r\n'
b'* 8 FETCH (THREADID (', (br'T[a-f0-9]+', b'id4'), b') '
b'UID 108)\r\n'
b'* 9 FETCH (THREADID (', (br'T[a-f0-9]+', b'id5'), b') '
b'UID 109)\r\n'
b'* 10 FETCH (THREADID (', (br'T[a-f0-9]+', b'id6'), b') '
b'UID 110)\r\n'
b'* 11 FETCH (THREADID (', (br'T[a-f0-9]+', b'id7'), b') '
b'UID 111)\r\n'
b'fetch1 OK UID FETCH completed.\r\n')
transport.push_logout()
await self.run(transport)
assert self.matches['id1'] == self.matches['id2']
assert self.matches['id1'] != self.matches['id3']
assert self.matches['id1'] != self.matches['id4']
assert self.matches['id4'] == self.matches['id5']
assert self.matches['id4'] == self.matches['id6']
assert self.matches['id4'] == self.matches['id7']
| 2.3125
| 2
|
mprotect.py
|
anakrish/mystikos-debug-utils
| 1
|
12778145
|
import gdb
import math
import tempfile
class myst_mprotect_tracker(gdb.Breakpoint):
def __init__(self):
#super(myst_mprotect_tracker, self).__init__('myst_mprotect_ocall', internal=True)
#self.bp = gdb.Breakpoint.__init__(self,'exec.c:637', internal=True)
#self.bp = gdb.Breakpoint.__init__(self,'_mprotect', internal=True)
super(myst_mprotect_tracker, self).__init__('_mprotect', internal=False)
self.calls = []
self.bt_spec = []
self.breaks = []
def stop(self):
addr = int(gdb.parse_and_eval('(uint64_t)addr'))
length = int(gdb.parse_and_eval('(uint64_t)len'))
prot = int(gdb.parse_and_eval('(int)prot'))
thread = int(gdb.parse_and_eval('$_thread'))
bt = None
index = len(self.calls) + 1
if self.bt_spec:
frames = self.bt_spec[0]
start_index = self.bt_spec[1]
end_index = self.bt_spec[2]
if index >= start_index and index <= end_index:
bt = gdb.execute('bt %d' % frames, False, True)
self.calls.append((addr, length, prot, bt, thread))
if index in self.breaks:
print("myst-prot: breaking at call %d" % index)
return True
return False
def do_command(self, arg0, *args):
if arg0 == "-bt":
self.set_bt_spec(*args)
elif arg0 == "-b":
self.add_breaks(*args)
else:
self.get_prot(arg0, *args)
def set_bt_spec(self, frames=1000, start_index=1, end_index=pow(2,32)):
self.bt_spec = (frames, start_index, end_index)
def add_breaks(self, *args):
for a in args:
self.breaks.append(int(a))
def get_prot(self, addr_str, get_all=None):
addr = int(gdb.parse_and_eval(addr_str))
print('address %s = 0x%x' % (addr_str, addr))
index = len(self.calls) + 1
for c in reversed(self.calls):
index -= 1
start = c[0]
length = c[1]
end = start + length
end = math.ceil(end/4096) * 4096
prot = c[2]
bt = c[3]
thread = c[4]
if addr >= start and addr < end:
print('matching mprotect call %d : thread %d, start=0x%x, adjusted end=0x%x, prot=%d, length = %d' %
(index, thread, start, end, prot, length))
if bt:
print(bt)
if not get_all:
break
mprotect_tracker = None
command = """
define myst-prot
if $argc == 4
python mprotect_tracker.do_command("$arg0", $arg1, $arg2, $arg3)
end
if $argc == 3
python mprotect_tracker.do_command("$arg0", $arg1, $arg2)
end
if $argc == 2
python mprotect_tracker.do_command("$arg0", $arg1)
end
if $argc == 1
python mprotect_tracker.do_command("$arg0")
end
end
"""
if __name__ == "__main__":
gdb.events.exited.connect(exit_handler)
mprotect_tracker = myst_mprotect_tracker()
with tempfile.NamedTemporaryFile('w') as f:
f.write(command)
f.flush()
gdb.execute('source %s' % f.name)
def exit_handler(event):
global mprotect_tracker
mprotect_tracker = None
| 2.296875
| 2
|
setup.py
|
azafred/skeletor
| 0
|
12778146
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from sample.version import __version__
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name='sample',
version=__version__,
description='Sample package',
long_description=readme,
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/azafred/sample',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
install_requires=required,
tests_require=['nose', 'testfixtures', 'mock'],
test_suite="nose.collector",
entry_points={
'console_scripts': [
'sample = sample.main:main'
]
},
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Operating System :: MacOS'
]
)
| 1.203125
| 1
|
ElevatorBot_old/slashCommands/externalWebsites.py
|
LukasSchmid97/destinyBloodoakStats
| 3
|
12778147
|
<filename>ElevatorBot_old/slashCommands/externalWebsites.py<gh_stars>1-10
from discord.ext import commands
from discord_slash import cog_ext, SlashContext, ButtonStyle
from discord_slash.utils import manage_components
from discord_slash.utils.manage_commands import create_option, create_choice
from ElevatorBot.backendNetworking.destinyPlayer import DestinyPlayer
from ElevatorBot.backendNetworking.slashCommandFunctions import get_user_obj
from ElevatorBot.static.slashCommandOptions import options_user
class ExternalWebsitesCommands(commands.Cog):
def __init__(self, client):
self.client = client
self.rrsystem = {1: "xb", 2: "ps", 3: "pc"}
# @cog_ext.cog_slash(
# name="website",
# description="Gets your personalised link to a bunch of Destiny 2 related websites",
# options=[
# create_option(
# name="website",
# description="The name of the website you want a personalised link for",
# option_type=3,
# required=True,
# choices=[
# create_choice(name="Braytech.org", value="Braytech.org"),
# create_choice(name="D2 Checklist", value="D2 Checklist"),
# create_choice(name="Destiny Tracker", value="Destiny Tracker"),
# create_choice(name="Dungeon Report", value="Dungeon Report"),
# create_choice(name="Grandmaster Report", value="Grandmaster Report"),
# create_choice(name="Nightfall Report", value="Nightfall Report"),
# create_choice(name="Raid Report", value="Raid Report"),
# create_choice(name="Solo Report", value="Solo Report"),
# create_choice(name="Expunge Report", value="Expunge Report"),
# create_choice(name="Trials Report", value="Trials Report"),
# create_choice(name="Triumph Report", value="Triumph Report"),
# create_choice(name="Wasted on Destiny", value="Wasted on Destiny"),
# ],
# ),
# options_user(),
# ],
# )
# async def _website(self, ctx: SlashContext, website, **kwargs):
# user = await get_user_obj(ctx, kwargs)
# destiny_player = await DestinyPlayer.from_discord_id(user.id, ctx=ctx)
# if not destiny_player:
# return
#
# # get the text
# text = ""
# if website == "Solo Report":
# text = f"https://elevatorbot.ch/soloreport/{destiny_player.system}/{destiny_player.destiny_id}"
# elif website == "Expunge Report":
# text = f"https://elevatorbot.ch/expungereport/{self.rrsystem[destiny_player.system]}/{destiny_player.destiny_id}"
# elif website == "Raid Report":
# text = f"https://raid.report/{self.rrsystem[destiny_player.system]}/{destiny_player.destiny_id}"
# elif website == "Dungeon Report":
# text = f"https://dungeon.report/{self.rrsystem[destiny_player.system]}/{destiny_player.destiny_id}"
# elif website == "Grandmaster Report":
# text = f"https://grandmaster.report/user/{destiny_player.system}/{destiny_player.destiny_id}"
# elif website == "Nightfall Report":
# text = f"https://nightfall.report/guardian/{destiny_player.system}/{destiny_player.destiny_id}"
# elif website == "Trials Report":
# text = f"https://destinytrialsreport.com/report/{destiny_player.system}/{destiny_player.destiny_id}"
# elif website == "Triumph Report":
# text = f"https://triumph.report/{destiny_player.system}/{destiny_player.destiny_id}"
# elif website == "Braytech.org":
# text = f"https://braytech.org/{destiny_player.system}/{destiny_player.destiny_id}"
# elif website == "D2 Checklist":
# text = f"https://www.d2checklist.com/{destiny_player.system}/{destiny_player.destiny_id}"
# elif website == "Destiny Tracker":
# text = f"https://destinytracker.com/destiny-2/profile/{destiny_player.system}/{destiny_player.destiny_id}"
# elif website == "Wasted on Destiny":
# text = f"https://wastedondestiny.com/{destiny_player.system}_{destiny_player.destiny_id}"
#
# components = [
# manage_components.create_actionrow(
# manage_components.create_button(
# style=ButtonStyle.URL,
# label=f"{user.display_name} - {website}",
# url=text,
# ),
# ),
# ]
# await ctx.send(content="", components=components)
def setup(client):
client.add_cog(ExternalWebsitesCommands(client))
| 2.34375
| 2
|
python/fileSearch/fileSearch.py
|
ped998/scripts
| 0
|
12778148
|
<gh_stars>0
#!/usr/bin/env python
"""search for files using python"""
# version 2021.02.23
# usage: ./backedUpFileList.py -v mycluster \
# -u myuser \
# -d mydomain.net \
# -s server1.mydomain.net \
# -j myjob \
# -f '2020-06-29 12:00:00'
# import pyhesity wrapper module
from pyhesity import *
# from datetime import datetime
# import codecs
import sys
import argparse
if sys.version_info.major >= 3 and sys.version_info.minor >= 5:
from urllib.parse import quote_plus
else:
from urllib import quote_plus
# command line arguments
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--vip', type=str, required=True) # cluster to connect to
parser.add_argument('-u', '--username', type=str, default='helios') # username
parser.add_argument('-d', '--domain', type=str, default='local') # domain - defaults to local
parser.add_argument('-i', '--useApiKey', action='store_true') # use API key authentication
parser.add_argument('-pwd', '--password', type=str, default=None) # optional password
parser.add_argument('-p', '--filepath', type=str, required=True) # optional password
parser.add_argument('-s', '--sourceserver', type=str, default=None) # name of source server
parser.add_argument('-j', '--jobname', type=str, default=None) # narrow search by job name
parser.add_argument('-t', '--jobtype', type=str, choices=['VMware', 'Physical', None], default=None)
parser.add_argument('-x', '--showversions', type=int, default=None) # narrow search by job name
args = parser.parse_args()
vip = args.vip
username = args.username
domain = args.domain
password = <PASSWORD>.password
useApiKey = args.useApiKey
filepath = args.filepath
sourceserver = args.sourceserver
jobname = args.jobname
jobtype = args.jobtype
showversions = args.showversions
# authenticate
apiauth(vip=vip, username=username, domain=domain, password=password, useApiKey=useApiKey)
jobs = api('get', 'protectionJobs')
encodedFile = quote_plus(filepath)
searchUrl = '/searchfiles?filename=%s' % encodedFile
if jobname is not None:
job = [j for j in jobs if j['name'].lower() == jobname.lower()]
if len(job) == 0:
print('Job %s not found' % jobname)
exit()
else:
searchUrl = '%s&jobIds=%s' % (searchUrl, job[0]['id'])
if jobtype is not None:
searchUrl = '%s&entityTypes=k%s' % (searchUrl, jobtype)
search = api('get', searchUrl)
print('')
x = 0
if search is not None and 'files' in search and len(search['files']) > 0:
for file in search['files']:
job = [j for j in jobs if j['id'] == file['fileDocument']['objectId']['jobId']]
if len(job) > 0:
if sourceserver is None or file['fileDocument']['objectId']['entity']['displayName'].lower() == sourceserver.lower():
x += 1
print('%s: %s / %s -> %s' % (x, job[0]['name'], file['fileDocument']['objectId']['entity']['displayName'], file['fileDocument']['filename']))
if showversions == x:
clusterId = file['fileDocument']['objectId']['jobUid']['clusterId']
clusterIncarnationId = file['fileDocument']['objectId']['jobUid']['clusterIncarnationId']
entityId = file['fileDocument']['objectId']['entity']['id']
jobId = file['fileDocument']['objectId']['jobId']
versions = api('get', '/file/versions?clusterId=%s&clusterIncarnationId=%s&entityId=%s&filename=%s&fromObjectSnapshotsOnly=false&jobId=%s' % (clusterId, clusterIncarnationId, entityId, encodedFile, jobId))
if versions is not None and 'versions' in versions and len(versions['versions']) > 0:
print('\n%10s %s' % ('runId', 'runDate'))
print('%10s %s' % ('-----', '-------'))
for version in versions['versions']:
print('%10d %s' % (version['instanceId']['jobInstanceId'], usecsToDate(version['instanceId']['jobStartTimeUsecs'])))
if showversions is None:
print('\n%s files found' % x)
else:
print('')
| 2.1875
| 2
|
relaxrender/rasterization.py
|
hefangwuwu/relaxrender
| 4
|
12778149
|
<reponame>hefangwuwu/relaxrender
import numpy as np
from .points import Point, Vector, Points
from .triangle import Triangle, Triangles
class Raster:
# for OpenGL alike forward rendering.
def __init__(self, context):
self.context = context
def rasterize(self, triangles):
pass
class SimpleRaster(Raster):
def __init__(self, context):
super().__init__(context)
def rasterize(self, triangles):
pass
| 2.78125
| 3
|
DataWorkflow/file_deletion/migrations/0010_remove_batch_number.py
|
Swiss-Polar-Institute/data-workflow
| 0
|
12778150
|
<reponame>Swiss-Polar-Institute/data-workflow
# Generated by Django 2.2.6 on 2019-10-30 11:34
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('file_deletion', '0009_filetobedeleted_batch'),
]
operations = [
migrations.RemoveField(
model_name='batch',
name='number',
),
migrations.RemoveField(
model_name='filetobedeleted',
name='batch_old',
),
]
| 1.453125
| 1
|