repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
openmb/openblackhole-enigma2
|
lib/python/Components/About.py
|
Python
|
gpl-2.0
| 2,999
| 0.037012
|
# -*- coding: utf-8 -*-
import sys, os, time
from Tools.HardwareInfo import HardwareInfo
def getVersionString():
return getImageVersionString()
def getImageVersionString():
try:
if os.path.isfile('/var/lib/opkg/status'):
st = os.stat('/var/lib/opkg/status')
else:
st = os.stat('/usr/lib/ipkg/status')
tm = time.localtime(st.st_mtime)
if tm.tm_year >= 2011:
return time.strftime("%Y-%m-%d %H:%M:%S", tm)
except:
pass
return _("unavailable")
def getFlashDateString():
try:
return time.strftime(_("%Y-%m-%d %H:%M"), time.localtime(os.stat("/boot").st_ctime))
except:
return _("unknown")
def getEnigmaVersionString():
import enigma
enigma_version = enigma.getEnigmaVersionString()
if '-(no branch)' in enigma_version:
enigma_version = enigma_version [:-12]
return enigma_version
def getGStreamerVersionString():
import enigma
return enigma.getGStreamerVersionString()
def getKernelVersionString():
try:
return open("/proc/version","r").read().split(' ', 4)[2].split('-',2)[0]
except:
return _("unknown")
def getHardwareTypeString():
return HardwareInfo().get_device_string()
def getImageTypeString():
try:
return open("/etc/issue").readlines()[-2].capitalize().strip()[:-6]
except:
return _("undefined")
def getCPUInfoString():
try:
cpu_count = 0
cpu_speed = 0
for line in open("/proc/cpuinfo").readlines():
line = [x.s
|
trip() for x in line.strip().split(":")]
if line[0] in ("system type", "model name"):
processor = line[1].split()[0]
elif line[0] == "cpu MHz":
cpu_speed = "%1.0f" % float(line[1])
elif line[0] == "processor":
cpu_count +=
|
1
if not cpu_speed:
try:
cpu_speed = int(open("/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq").read()) / 1000
except:
cpu_speed = "-"
if os.path.isfile('/proc/stb/fp/temp_sensor_avs'):
temperature = open("/proc/stb/fp/temp_sensor_avs").readline().replace('\n','')
return "%s %s MHz (%s) %s°C" % (processor, cpu_speed, ngettext("%d core", "%d cores", cpu_count) % cpu_count, temperature)
return "%s %s MHz (%s)" % (processor, cpu_speed, ngettext("%d core", "%d cores", cpu_count) % cpu_count)
except:
return _("undefined")
def getDriverInstalledDate():
try:
from glob import glob
driver = [x.split("-")[-2:-1][0][-8:] for x in open(glob("/var/lib/opkg/info/*-dvb-modules-*.control")[0], "r") if x.startswith("Version:")][0]
return "%s-%s-%s" % (driver[:4], driver[4:6], driver[6:])
except:
return _("unknown")
def getPythonVersionString():
try:
import commands
status, output = commands.getstatusoutput("python -V")
return output.split(' ')[1]
except:
return _("unknown")
def getDriverVuInstalledDate():
try:
driver = os.popen("opkg list-installed | grep vuplus-dvb-").read().strip()
driver = driver.split("-")
#return driver[:4] + "-" + driver[4:6] + "-" + driver[6:]
return driver[5]
except:
return "unknown"
# For modules that do "from About import about"
about = sys.modules[__name__]
|
bburns/PyVoyager
|
src/vgComposite.py
|
Python
|
mit
| 10,684
| 0.007675
|
"""
vg composite command
Build composite images from centered images, based on records in composites.csv.
See also vgInitComposites.py, which builds initial pass at composites.csv.
Note: even single channel images get a composite image (bw).
Uses centered image if available, otherwise uses the plain adjusted image.
"""
import os
import csv
import cv2
import config
import lib
import libimg
import vgCenter
import vgInpaint
def printStatus(channelRows,volume,nfile,startId):
"print status message"
nchannels = len(channelRows)
print 'Volume %s compositing %d: %s (%d channels) \r' % \
(volume,nfile,startId,nchannels),
def processChannels(channelRows, optionAlign):
"""
Combine channel images into new file, attempting to align them if optionAlign is True.
channelRows is an array of rows corresponding to rows in the composites.csv file.
should have [compositeId,centerId,volnum,filter,weight,x,y]
eg [
['C434823','C434823','5101','Orange']
['C434823','C434825','5101','Blue','0.8','42','18']
['C434823','C434827','5101','Green','1','-50','83']
]
they are combined and written to a file in the composites folder, step05_composites.
Can have single channel groups.
If optionAlign is True, will attempt to align the channels, and will return updated
x,y values in channelRows.
"""
#. could also have zoom factor, warp info, rotate
# for row in channelRows: print row
centered = False
weightXYFilledOut = False
if len(channelRows) > 0:
volume = ''
compositeId = ''
channels = []
for row in channelRows:
compositeId = row[config.colCompositesCompositeId]
fileId = row[config.colCompositesFileId]
volume = row[config.colCompositesVolume]
filter = row[config.colCompositesFilter]
weight = float(row[config.colCompositesWeight]) \
if len(row)>config.colCompositesWeight else 1.0
x = int(row[config.colCompositesX]) if len(row)>config.colCompositesX else 0
y = int(row[config.colCompositesY]) if len(row)>config.colCompositesY else 0
if len(row)>config.colCompositesWeight: weightXYFilledOut = True
# if don't have an inpaint or centered file, use the adjusted file
channelfilepath = lib.getFilepath('inpaint', volume, fileId)
if os.path.isfile(channelfilepath):
centered = True
else:
channelfilepath = lib.getFilepath('center', volume, fileId, filter)
if os.path.isfile(channelfilepath):
centered = True
else:
channelfilepath = lib.getFilepath('adjust', volume, fileId, filter)
if os.path.isfile(channelfilepath):
channel = [fileId,filter,channelfilepath,weight,x,y]
channels.append(channel)
if len(channels)>0:
outfilepath = lib.getFilepath('composite', volume, compositeId)
if centered: optionAlign = False # don't try to align images if already centered
if weightXYFilledOut: optionAlign = False # don't align if already have values
# combine the channel images
im, channels = libimg.combineChannels(channels, optionAlign)
libimg.imw
|
rite(outfilepath, im)
# if -align: update channels x
|
,y etc
if optionAlign:
# make sure all the rows have all their columns
for row in channelRows:
while len(row)<=config.colCompositesY:
row.append('')
# find each row in channelRows and update weights and x,y translation
for row in channels:
for row2 in channelRows:
if row2[config.colCompositesFileId]==row[config.colChannelFileId]:
row2[config.colCompositesWeight]=row[config.colChannelWeight]
row2[config.colCompositesX]=row[config.colChannelX]
row2[config.colCompositesY]=row[config.colChannelY]
# print [ch[:-1] for ch in channels if ch]
# return channels
# caller needs to know if x,y values were changed
xyChanged = not centered
return xyChanged
def writeUpdates(csvNew, channelRows):
""
for row in channelRows:
# row = [compositeId, fileId, volume, filter, weight, x, y]
csvNew.writerow(row)
# print row
def vgComposite(filterVolume=None, filterCompositeId=None, filterTargetPath=None,
optionOverwrite=False, optionAlign=False, directCall=True):
"""
Build composite images by combining channel images.
Walks over records in composites.csv, merges channel images, writes to composites folder.
eg
composites.csv:
compositeId,centerId,volume,filter,weight,x,y
C1537728,C1537728,5103,Blue
C1537728,C1537730,5103,Orange,0.8
C1537728,C1537732,5103,Green,1,10,3
=>
step05_composites/VGISS_5103/C1537728_composite.jpg
Note: weight,x,y are optional - default to 1,0,0
"""
if filterCompositeId: filterCompositeId = filterCompositeId.upper() # always capital C
# note: targetPathParts = [system, craft, target, camera]
targetPathParts = lib.parseTargetPath(filterTargetPath)
# build volume for previous step
if filterVolume:
filterVolume = str(filterVolume)
outputSubfolder = lib.getSubfolder('composite', filterVolume)
# quit if volume folder exists
if os.path.isdir(outputSubfolder) and optionOverwrite==False:
if directCall: print "Folder exists: " + outputSubfolder
return
# build the previous step, if not already there
vgCenter.vgCenter(filterVolume, '', optionOverwrite=False, directCall=False)
# vgInpaint.vgInpaint(filterVolume, '', optionOverwrite=False, directCall=False)
# make folder
lib.mkdir(outputSubfolder)
# read small dbs into memory
compositingInfo = lib.readCsv(config.dbCompositing) # when to turn centering on/off
retargetingInfo = lib.readCsv(config.dbRetargeting) # remapping listed targets
# open files.csv so can join to it
csvFiles, fFiles = lib.openCsvReader(config.dbFiles)
# open compositesNew.csv for writing
if optionAlign:
lib.rm(config.dbCompositesNew)
csvNew, fNew = lib.openCsvWriter(config.dbCompositesNew)
# iterate over composites.csv records
csvComposites, fComposites = lib.openCsvReader(config.dbComposites)
startId = ''
startVol = ''
channelRows = []
nfile = 0
for row in csvComposites:
# get composite info
compositeId = row[config.colCompositesCompositeId]
fileId = row[config.colCompositesFileId]
volume = row[config.colCompositesVolume]
# join on files.csv to get more image properties
# (note: since compositeId repeats, we might have already advanced to the next record,
# in which case rowFiles will be None. But the target properties will remain the same.)
rowFiles = lib.getJoinRow(csvFiles, config.colFilesFileId, compositeId)
if rowFiles:
# get file info
filter = rowFiles[config.colFilesFilter]
system = rowFiles[config.colFilesSystem]
craft = rowFiles[config.colFilesCraft]
target = rowFiles[config.colFilesTarget]
camera = rowFiles[config.colFilesCamera]
# relabel target field if necessary - see db/targets.csv for more info
target = lib.retarget(retargetingInfo, compositeId, target)
# filter on volume, composite id and targetpath
volumeOk = (volume==filterVolume if filterVolume else True)
compositeOk = (compositeId==filterCompositeId if filterCompositeId else True)
targetPathOk = (lib.targetMatches(targetPathParts, system, craft, target, camera) \
if filterTargetPath else True)
doComposite = (volumeOk and compositeOk and targetPathOk)
|
ConnectBox/wifi-test-framework
|
ansible/plugins/mitogen-0.2.3/tests/local_test.py
|
Python
|
mit
| 1,663
| 0
|
import os
import sys
import unittest2
import mitogen
import mitogen.
|
ssh
import mitogen.utils
import testlib
import plain_old_module
def get_sys_executable():
return sys.executable
def get_os_environ():
return dict(os.environ)
class LocalTest(testlib.RouterMixin, unittest2.TestCase):
stream_class = mitogen.ssh.Stream
def test_stream_name(self):
context = self.router.local()
pid = context.call(os.getpid)
self.assertEquals('local.%d' % (pid,), context.name)
class PythonPathTest(testlib.RouterMixin, unit
|
test2.TestCase):
stream_class = mitogen.ssh.Stream
def test_inherited(self):
context = self.router.local()
self.assertEquals(sys.executable, context.call(get_sys_executable))
def test_string(self):
os.environ['PYTHON'] = sys.executable
context = self.router.local(
python_path=testlib.data_path('env_wrapper.sh'),
)
self.assertEquals(sys.executable, context.call(get_sys_executable))
env = context.call(get_os_environ)
self.assertEquals('1', env['EXECUTED_VIA_ENV_WRAPPER'])
def test_list(self):
context = self.router.local(
python_path=[
testlib.data_path('env_wrapper.sh'),
"magic_first_arg",
sys.executable
]
)
self.assertEquals(sys.executable, context.call(get_sys_executable))
env = context.call(get_os_environ)
self.assertEquals('magic_first_arg', env['ENV_WRAPPER_FIRST_ARG'])
self.assertEquals('1', env['EXECUTED_VIA_ENV_WRAPPER'])
if __name__ == '__main__':
unittest2.main()
|
nsnam/ns-3-dev-git
|
src/wifi/examples/reference/bianchi11ax.py
|
Python
|
gpl-2.0
| 6,874
| 0.010038
|
#
# Copyright 2020 University of Washington
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation;
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Authors: Hao Yin and Sebastien Deronne
#
import numpy as np
import math
def bianchi_ax(data_rate, ack_rate, k, difs):
# Parameters for 11ax
nA = np.linspace(5, 50, 10)
CWmin = 15
CWmax = 1023
L_DATA = 1500 * 8 # data size in bits
L_ACK = 14 * 8 # ACK size in bits
#B = 1/(CWmin+1)
B=0
EP = L_DATA/(1-B)
T_GI = 800e-9 # guard interval in seconds
T_SYMBOL_ACK = 4e-6 # symbol duration in seconds (for ACK)
T_SYMBOL_DATA = 12.8e-6 + T_GI # symbol duration in seconds (for DATA)
T_PHY_ACK = 20e-6 # PHY preamble & header duration in seconds (for ACK)
T_PHY_DATA = 44e-6 # PHY preamble & header duration in seconds (for DATA)
L_SERVICE = 16 # service field length in bits
L_TAIL = 6 # tail lengthh in bits
L_MAC = (30) * 8 # MAC header size in bits
L_APP_HDR = 8 * 8 # bits added by the upper layer(s)
T_SIFS = 16e-6
T_DIFS = 34e-6
T_SLOT = 9e-6
delta = 1e-7
Aggregation_Type = 'A_MPDU' #A_MPDU or A_MSDU (HYBRID not fully supported)
K_MSDU = 1
K_MPDU = k
L_MPDU_HEADER = 4
L_MSDU_HEADER = 14 * 8
if (k <= 1):
Aggregation_Type = 'NONE'
N_DBPS = data_rate * T_SYMBOL_DATA # number of data bits per OFDM symbol
if (Aggregation_Type == 'NONE'):
N_SYMBOLS = math.ceil((L_SERVICE + (L_MAC + L_DATA + L_APP_HDR) + L_TAIL)/N_DBPS)
T_DATA = T_PHY_DATA + (T_SYMBOL_DATA * N_SYMBOLS)
K_MPDU = 1
K_MSDU = 1
if (Aggregation_Type == 'A_MSDU'):
N_SYMBOLS = math.ceil((L_SERVICE + K_MPDU*(L_MAC + L_MPDU_HEADER + K_MSDU*(L_MSDU_HEADER + L_DATA + L_APP_HDR)) + L_TAIL)/N_DBPS)
T_DATA = T_PHY_DATA + (T_SYMBOL_DATA * N_SYMBOLS)
if (Aggregation_Type == 'A_MPDU'):
N_SYMBOLS = math.ceil((L_SERVICE + K_MPDU*(L_MAC + L_MPDU_HEADER + L_DATA + L_APP_HDR) + L_TAIL)/N_DBPS)
T_DATA = T_PHY_DATA + (T_SYMBOL_DATA * N_SYMBOLS)
#Calculate ACK Duration
N_DBPS = ack_rate * T_SYMBOL_ACK # number of data bits per OFDM symbol
N_SYMBOLS = math.ceil((L_SERVICE + L_ACK + L_TAIL)/N_DBPS)
T_ACK = T_PHY_ACK + (T_SYMBOL_ACK * N_SYMBOLS)
T_s = T_DATA + T_SIFS + T_ACK + T_DIFS
if difs == 1: #DIFS
T_C = T_DATA + T_DIFS
else:
T_s = T_DATA + T_SIFS + T_ACK + T_DIFS + delta
T_C = T_DATA + T_DIFS + T_SIFS + T_ACK + delta
T_S = T_s/(1-B) + T_SLOT
S_bianchi = np.zeros(len(nA))
for j in range(len(nA)):
n = nA[j]*1
W = CWmin + 1
m = math.log2((CWmax + 1)/(CWmin + 1))
tau1 = np.linspace(0, 0.1, 100000)
p = 1 - np.power((1 - tau1),(n - 1))
ps = p*0
for i in range(int(m)):
ps = ps + np.power(2*p, i)
taup = 2./(1 + W + p*W*ps)
b = np.argmin(np.abs(tau1 - taup))
tau = taup[b]
Ptr = 1 - math.pow((1 - tau), int(n))
Ps = n*tau*math.pow((1 - tau), int(n-1))/Ptr
S_bianchi[j] = K_MSDU*K_MPDU*Ps*Ptr*EP/((1-Ptr)*T_SLOT+Ptr*Ps*T_S+Ptr*(1-Ps)*T_C)/1e6
bianchi_result = S_bianchi
return bianchi_result
def str_result(bianchi_result, mcs, bw):
str_bianchi = ' {' + '\"HeMcs{:d}'.format(mcs) + '_{:d}MHz\"'.format(bw) + ', {\n'
for i in range (len(bianchi_result)):
str_tmp = ' {' + '{:d}, {:.4f}'.format(5*(i+1), bianchi_result[i]) +'},\n'
str_bianchi = str_bianchi + str_tmp
str_bianchi = str_bianchi + " }},\n"
print(str_bianchi)
return str_bianchi
# Settings for different MCS and mode
data_rates_20MHz = [8.603e6, 17.206e6, 25.8e6, 34.4e6, 51.5e6, 68.8e6, 77.4e6, 86e6, 103.2e6, 114.7e6, 129e6, 143.4e6]
ack_rates_20MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6]
data_rates_40MHz = [17.2e6, 34.4e6, 51.5e6, 68.8e6, 103.2e6, 137.6e6, 154.9e6, 172.1e6, 206.5e6, 229.4e6, 258.1e6, 286.8e6]
ack_rates_40MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6]
data_rates_80MHz = [36e6, 72.1e6, 108.1e6, 144.1e6, 216.2e6, 288.2e6, 324.3e6, 360.3e6, 432.4e6, 480.4e6, 540.4e6, 600.5e6]
ack_rates_80MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6]
data_rates_160MHz = [72.1e6, 144.1e6, 216.2e6, 288.2e6, 432.4e6, 576.5e6, 648.5e6, 720.6e6, 864.7e6, 960.8e6, 1080.9e6, 1201e6]
ack_rates_160MHz = [6e6, 12e6, 12e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6, 24e6]
# Generate results with frame aggregation disabled
k = 1
difs = 1
fo = open("bianchi_11ax_difs.txt", "w")
for i in range(len(data_rates_20MHz)):
bianchi_result = bianchi_ax(data_rates_20MHz[i], ack_rates_20MHz[i], k, difs)
str_s = str_result(bianchi_result, i, 20)
fo.write(str_s)
for i in range(len(data_rates_40MHz)):
bianchi_result = bianchi_ax(data_rates_40MHz[i], ack_rates_40MHz[i], k, difs)
str_s = str_result(bianchi_
|
result, i, 40)
fo.write(str_s)
for i in range(len(data_rates_80MHz)):
bianchi_result = bianchi_ax(data_rates_80MHz[i], ack_rates_80MHz[i], k, difs)
str_s = str_result(bianchi_result, i, 80)
fo.write(str_s)
for i in range(len(data_rates_160MHz)):
bianchi_result = bianchi_ax(data_rates_160MHz[i], ack_rates_160MHz[i], k, difs)
str_s = str_result(bianchi_result, i, 160)
fo.write(str_s)
fo.close()
difs = 0
fo = open("bianchi_11ax_eifs.txt", "w")
for i in range(len(data_ra
|
tes_20MHz)):
bianchi_result = bianchi_ax(data_rates_20MHz[i], ack_rates_20MHz[i], k, difs)
str_s = str_result(bianchi_result, i, 20)
fo.write(str_s)
for i in range(len(data_rates_40MHz)):
bianchi_result = bianchi_ax(data_rates_40MHz[i], ack_rates_40MHz[i], k, difs)
str_s = str_result(bianchi_result, i, 40)
fo.write(str_s)
for i in range(len(data_rates_80MHz)):
bianchi_result = bianchi_ax(data_rates_80MHz[i], ack_rates_80MHz[i], k, difs)
str_s = str_result(bianchi_result, i, 80)
fo.write(str_s)
for i in range(len(data_rates_160MHz)):
bianchi_result = bianchi_ax(data_rates_160MHz[i], ack_rates_160MHz[i], k, difs)
str_s = str_result(bianchi_result, i, 160)
fo.write(str_s)
fo.close()
|
pisskidney/dota
|
dota/urls.py
|
Python
|
mit
| 222
| 0.004505
|
from django.conf.urls import
|
patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^', include('ebets.urls')),
url(
|
r'^admin/', include(admin.site.urls)),
)
|
derNarr/synchronicity
|
experiment/sessions/ses_vp48.py
|
Python
|
mit
| 5,332
| 0.001313
|
# vpnr 48
run_trial(hori2, duration=4.000, speed=300)
run_trial(rws[2], duration=8.0)
run_trial(rbs[12], duration=8.0)
run_trial(rws[6], duration=8.0)
run_trial(rbs[22], duration=8.0)
run_trial(cm200, duration=8.0, speed=150)
run_trial(cm200, duration=8.0, speed=800)
run_trial(rbs[6], duration=8.0)
run_trial(msm0, duration=4.000, speed=400)
run_trial(rbs[9], duration=8.0)
run_trial(mem2, duration=3.000, speed=600)
run_trial(mem0, duration=7.000, speed=200)
run_trial(rws[16], duration=8.0)
run_trial(rws[18], duration=8.0)
run_trial(rbs[1], duration=8.0)
run_trial(rbs[10], duration=8.0)
run_trial(rws[15], duration=8.0)
run_trial(rws[21], duration=8.0)
run_trial(rbs[0], duration=8.0)
run_trial(rws[1], duration=8.0)
run_trial(mem2, duration=5.000, speed=300)
show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',))
run_trial(rws[12], duration=8.0)
run_trial(cm400, duration=8.0, speed=400)
run_trial(rbs[4], duration=8.0)
run_trial(rbs[19], duration=8.0)
run_trial(mem0, duration=4.000, speed=400)
run_trial(rbs[8], duration=8.0)
run_trial(rbs[11], duration=8.0)
run_trial(rws[13], duration=8.0)
run_trial(rws[8], duration=8.0)
run_trial(cm400, duration=8.0, speed=200)
run_trial(mem1, duration=5.000, speed=300)
run_trial(cm400, duration=8.0, speed=300)
run_trial(hori1, duration=6.000, speed=200)
run_trial(rbs[15], duration=8.0)
run_trial(hori0, duration=3.000, speed=400)
run_trial(msm0, duration=7.000, speed=200)
run_trial(rws[0], duration=8.0)
run_trial(mem0, duration=2.500, speed=800)
run_trial(rws[17], duration=8.0)
run_trial(cm100, duration=8.0, speed=200)
run_trial(mem0, duration=3.000, speed=600)
show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',))
run_trial(msm1, duration=4.000, speed=400)
run_trial(rbs[18], duration=8.0)
run_trial(mem1, duration=4.000, speed=400)
run_trial(msm2, duration=4.000, speed=400)
run_trial(mem1, duration=7.000, speed=200)
run_trial(msm2, duration=3.000, speed=600)
run_trial(mem1, duration=2.500, speed=800)
run_trial(hori0, duration=2.000, speed=600)
run_trial(mem1, duration=9.000, speed=150)
run_trial(rbs[23], duration=8.0)
run_trial(cm100, duration=8.0, speed=150)
run_trial(cm200, duration=8.0, speed=200)
run_trial(rws[5], duration=8.0)
run_trial(hori2, duration=2.000, speed=600)
run_trial(msm1, duration=2.500, speed=800)
run_trial(rws[9], duration=8.0)
run_trial(cm100, duration=8.0, speed=400)
run_trial(rbs[2], duration=8.0)
run_trial(rbs[14], duration=8.0)
run_trial(cm200, duration=8.0, speed=400)
run_trial(rbs[5], duration=8.0)
show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',))
run_trial(hori1, duration=8.000, speed=150)
run_trial(rws[10], duration=8.0)
run_trial(rws[19], duration=8.0)
run_trial(rws[20], duration=8.0)
run_trial(rbs[21], duration=8.0)
run_trial(hori0, duration=6.000, speed=200)
run_trial(msm0, duration=3.000, speed=600)
run_trial(rbs[13], duration=8.0)
run_trial(cm200, duration=8.0, speed=300)
run_trial(msm1, duration=3.000, speed=600)
run_trial(cm400, duration=8.0, speed=600)
run_trial(rbs[7], duration=8.0)
run_trial(rws[7], duration=8.0)
run_trial(rbs[3], duration=8.0)
run_trial(hori0, duration=8.000, speed=150)
run_trial(mem2, duration=9.000, speed=150)
run_trial(rws[4], duration=8.0)
run_trial(hori2, duration=1.500, speed=800)
run_trial(cm400, duration=8.0, speed=150)
run_trial(hori0, duration=4.000, speed=300)
run_trial(cm400, duration=8.0, speed=800)
show(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',))
run_trial(msm1, duration=5.000, speed=300)
run_trial(msm2, duration=5.000, speed=300)
run_trial(msm0, duration=2.500, speed=800)
run_trial(mem2, duration=4.000, speed=400)
run_trial(cm200, duration=8.0, speed=600)
run_trial(hori1, duration=1.500, speed=800)
run_trial(msm0, duration=9.000, speed=150)
run_trial(hori0, duration=1.500, speed=800)
run_trial(mem2, duration=2.500, speed=800)
run_trial(rbs[24], duration=8.0)
run_trial(msm2, duration=9.000, speed=150)
run_trial(hori1, duration=4.000, speed=300)
run_trial(rbs[16], duration=8.0)
run_trial(rbs[17], duration=8.0)
run_trial(msm2, duration=2.500, speed=800)
run_trial(mem1, duration=3.000, speed=600)
run_trial(msm1, duration=9.000, speed=150)
run_trial(rws[11], duration=8.0)
run_trial(hori2, duration=8.000, speed=150)
run_trial(hori1, duration=2.000, speed=600)
run_trial(msm2, duration=7.000, speed=200)
show
|
(u'Machen Sie eine kurze Pause.\n\nWeiter mit Leertaste.', wait_keys=('space',))
run_trial(mem0, duration=5.000, speed=300)
run_trial(hori2, duration=6.000, speed=200)
run_trial(msm0, duration=5.000, speed=300)
run_trial(rws[22], duration=8.0)
run_trial(cm100, duration=8.0, speed=300)
run_trial(mem0, duration=9.000, speed=150)
run
|
_trial(rws[23], duration=8.0)
run_trial(rws[14], duration=8.0)
run_trial(rws[24], duration=8.0)
run_trial(msm1, duration=7.000, speed=200)
run_trial(rws[3], duration=8.0)
run_trial(cm100, duration=8.0, speed=800)
run_trial(hori2, duration=3.000, speed=400)
run_trial(rbs[20], duration=8.0)
run_trial(hori1, duration=3.000, speed=400)
run_trial(mem2, duration=7.000, speed=200)
run_trial(cm100, duration=8.0, speed=600)
run_movie(movie1audio, 'Jetzt folgt ein Video mit Ton.\n\nWeiter mit Leertaste')
run_movie(movie2noaudio, 'Jetzt folgt ein Video OHNE Ton.\n\nWeiter mit Leertaste')
|
frostblooded/kanq
|
api/tests/test_user_service.py
|
Python
|
mit
| 1,237
| 0.000808
|
from django.test import TestCase
from api.helpers import user_service
from api.factories import UserFactory, PostFac
|
tory
class UserServiceTest(TestCase):
POSTS_PER_USER = 10
def setUp(self):
self.main_user = UserFactory()
self.follower = UserFactory()
self.test_user = UserFactory()
self.main_user.followers.add(self.follower)
self.follower.following.add(se
|
lf.main_user)
for i in range(0, self.POSTS_PER_USER):
PostFactory(creator=self.main_user)
PostFactory(creator=self.test_user)
PostFactory(creator=self.follower)
def test_user_feed_returns_posts_from_correct_users(self):
posts = user_service.get_user_feed(self.follower.id, 0, 20)
self.assertEqual(len(posts), self.POSTS_PER_USER * 2)
for post in posts:
self.assertIn(post.creator_id, [self.main_user.id, self.follower.id])
def test_user_feed_returns_posts_ordered_correctly(self):
posts = user_service.get_user_feed(self.follower.id, 0, 20)
for i in range(0, len(posts) - 1):
self.assertGreater(posts[i].created_at, posts[i + 1].created_at)
def test_user_feed_returns_correct_pages(self):
pass
|
MrNuggelz/sklearn-glvq
|
sklearn_lvq/lmrslvq.py
|
Python
|
bsd-3-clause
| 15,649
| 0.000511
|
# -*- coding: utf-8 -*-
# Author: Joris Jensen <jjensen@techfak.uni-bielefeld.de>
#
# License: BSD 3 clause
from __future__ import division
import numpy as np
from scipy.optimize import minimize
from sklearn.utils import validation
from .rslvq import RslvqModel
class LmrslvqModel(RslvqModel):
"""Localized Matrix Robust Soft Learning Vector Quantization
Parameters
----------
prototypes_per_class : int or list of int, optional (default=1)
Number of prototypes per class. Use list to specify different
|
numbers per class.
initial_prototypes : array-like, shape = [n_prototypes, n_features + 1],
optional
Prototypes to start with. If not given initialization near the class
means. Class label must be placed as last entry of each prototype.
initial_matrices : list of array-like, optional
|
Matrices to start with. If not given random initialization
regularization : float or array-like, shape = [n_classes/n_prototypes],
optional (default=0.0)
Values between 0 and 1. Regularization is done by the log determinant
of the relevance matrix. Without regularization relevances may
degenerate to zero.
dim : int, optional
Maximum rank or projection dimensions
classwise : boolean, optional
If true, each class has one relevance matrix.
If false, each prototype has one relevance matrix.
sigma : float, optional (default=0.5)
Variance for the distribution.
max_iter : int, optional (default=2500)
The maximum number of iterations.
gtol : float, optional (default=1e-5)
Gradient norm must be less than gtol before successful termination
of bfgs.
display : boolean, optional (default=False)
Print information about the bfgs steps.
random_state : int, RandomState instance or None, optional
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
w_ : array-like, shape = [n_prototypes, n_features]
Prototype vector, where n_prototypes in the number of prototypes and
n_features is the number of features
c_w_ : array-like, shape = [n_prototypes]
Prototype classes
classes_ : array-like, shape = [n_classes]
Array containing labels.
omegas_ : list of array-like
Relevance Matrices
dim_ : list of int
Maximum rank of projection
regularization_ : array-like, shape = [n_classes/n_prototypes]
Values between 0 and 1
See also
--------
RslvqModel, MrslvqModel
"""
def __init__(self, prototypes_per_class=1, initial_prototypes=None,
initial_matrices=None, regularization=0.0, dim=None,
classwise=False, sigma=1, max_iter=2500, gtol=1e-5, display=False,
random_state=None):
super(LmrslvqModel, self).__init__(sigma=sigma,
random_state=random_state,
prototypes_per_class=prototypes_per_class,
initial_prototypes=initial_prototypes,
gtol=gtol, display=display, max_iter=max_iter)
self.regularization = regularization
self.initial_matrices = initial_matrices
self.classwise = classwise
self.initialdim = dim
def _optgrad(self, variables, training_data, label_equals_prototype,
random_state, lr_relevances=0, lr_prototypes=1):
n_data, n_dim = training_data.shape
nb_prototypes = self.c_w_.size
variables = variables.reshape(variables.size // n_dim, n_dim)
prototypes = variables[:nb_prototypes]
# dim to indices
indices = []
for i in range(len(self.dim_)):
indices.append(sum(self.dim_[:i + 1]))
omegas = np.split(variables[nb_prototypes:], indices[:-1]) # .conj().T
g = np.zeros(variables.shape)
if lr_relevances > 0:
gw = []
for i in range(len(omegas)):
gw.append(np.zeros(omegas[i].shape))
c = 1 / self.sigma
for i in range(n_data):
xi = training_data[i]
c_xi = label_equals_prototype[i]
for j in range(prototypes.shape[0]):
if len(omegas) == nb_prototypes:
omega_index = j
else:
omega_index = np.where(self.classes_ == self.c_w_[j])[0][0]
oo = omegas[omega_index].T.dot(omegas[omega_index])
d = (xi - prototypes[j])[np.newaxis].T
p = self._p(j, xi, prototypes=prototypes, omega=omegas[omega_index])
if self.c_w_[j] == c_xi:
pj = self._p(j, xi, prototypes=prototypes, y=c_xi,
omega=omegas[omega_index])
if lr_prototypes > 0:
if self.c_w_[j] == c_xi:
g[j] += (c * (pj - p) * oo.dot(d)).ravel()
else:
g[j] -= (c * p * oo.dot(d)).ravel()
if lr_relevances > 0:
if self.c_w_[j] == c_xi:
gw -= (pj - p) / self.sigma * (
omegas[omega_index].dot(d).dot(d.T))
else:
gw += p / self.sigma * (omegas[omega_index].dot(d).dot(d.T))
if lr_relevances > 0:
if sum(self.regularization_) > 0:
regmatrices = np.zeros([sum(self.dim_), n_dim])
for i in range(len(omegas)):
regmatrices[sum(self.dim_[:i + 1]) - self.dim_[i]:sum(
self.dim_[:i + 1])] = \
self.regularization_[i] * np.linalg.pinv(omegas[i])
g[nb_prototypes:] = 2 / n_data * lr_relevances * \
np.concatenate(gw) - regmatrices
else:
g[nb_prototypes:] = 2 / n_data * lr_relevances * \
np.concatenate(gw)
if lr_prototypes > 0:
g[:nb_prototypes] = 1 / n_data * \
lr_prototypes * g[:nb_prototypes]
g *= -(1 + 0.0001 * random_state.rand(*g.shape) - 0.5)
return g.ravel()
def _optfun(self, variables, training_data, label_equals_prototype):
n_data, n_dim = training_data.shape
nb_prototypes = self.c_w_.size
variables = variables.reshape(variables.size // n_dim, n_dim)
prototypes = variables[:nb_prototypes]
indices = []
for i in range(len(self.dim_)):
indices.append(sum(self.dim_[:i + 1]))
omegas = np.split(variables[nb_prototypes:], indices[:-1])
out = 0
for i in range(n_data):
xi = training_data[i]
y = label_equals_prototype[i]
if len(omegas) == nb_prototypes:
fs = [self._costf(xi, prototypes[j], omega=omegas[j])
for j in range(nb_prototypes)]
else:
fs = [self._costf(xi, prototypes[j], omega=omegas[np.where(self.classes_ == self.c_w_[j])[0][0]])
for j in range(nb_prototypes)]
fs_max = max(fs)
s1 = sum([np.math.exp(fs[i] - fs_max) for i in range(len(fs))
if self.c_w_[i] == y])
s2 = sum([np.math.exp(f - fs_max) for f in fs])
s1 += 0.0000001
s2 += 0.0000001
out += np.math.log(s1 / s2)
return -out
def _optimize(self, x, y, random_state):
nb_prototypes, nb_features = self.w_.shape
nb_classes = len(self.classes_)
if not isinstance(self.classwise, bool):
raise ValueError("classwise must be a boolean")
if self.initialdim is None:
if self.classwise:
self.dim_ = nb_features * np.ones(nb_classes, dtype=np.i
|
alexei-matveev/ccp1gui
|
jobmanager/winprocess.py
|
Python
|
gpl-2.0
| 7,039
| 0.001421
|
"""
Windows Process Control
winprocess.run launches a child process and returns the exit code.
Optionally, it can:
redirect stdin, stdout & stderr to files
run the command as another user
limit the process's running time
control the process window (location, size, window state, desktop)
Works on Windows NT, 2000 & XP. Requires Mark Hammond's win32
extensions.
This code is free for any purpose, with no warranty of any kind.
-- John B. Dell'Aquila <jbd@alum.mit.edu>
"""
import win32api, win32process, win32security
import win32event, win32con, msvcrt, win32gui
def logonUser(loginString):
"""
Login as specified user and return handle.
loginString: 'Domain\nUser\nPassword'; for local
login use . or empty string as domain
e.g. '.\nadministrator\nsecret_password'
"""
domain, user, passwd = loginString.split('\n')
return win32security.LogonUser(
user,
domain,
passwd,
win32con.LOGON32_LOGON_INTERACTIVE,
win32con.LOGON32_PROVIDER_DEFAULT
)
class Process:
"""
A Windows process.
"""
def __init__(self, cmd, login=None,
hStdin=None, hStdout=None, hStderr=None,
show=1, xy=None, xySize=None,
desktop=None):
"""
Create a Windows process.
cmd: command to run
login: run as user 'Domain\nUser\nPassword'
hStdin, hStdout, hStderr:
handles for process I/O; default is caller's stdin,
stdout & stderr
show: wShowWindow (0=SW_HIDE, 1=SW_NORMAL, ...)
xy: window offset (x, y) of upper left corner in pixels
xySize: window size (width, height) in pixels
desktop: lpDesktop - name of desktop e.g. 'winsta0\\default'
None = inherit current desktop
'' = create new desktop if necessary
User calling login requires additional privileges:
Act as part of the operating system [not needed on Windows XP]
Increase quotas
Replace a process level token
Login string must EITHER be an administrator's account
(ordinary user can't access current desktop - see Microsoft
Q165194) OR use desktop='' to run another desktop invisibly
(may be very slow to startup & finalize).
"""
si = win32process.STARTUPINFO()
si.dwFlags = (win32con.STARTF_USESTDHANDLES ^
win32con.STARTF_USESHOWWINDOW)
if hStdin is None:
si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE)
else:
si.hStdInput = hStdin
if hStdout is None:
si.hStdOutput = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE)
else:
si.hStdOutput = hStdout
if hStderr is None:
si.hStdError = win32api.GetStdHandle(win32api.STD_ERROR_HANDLE)
else:
si.hStdError = hStderr
si.wShowWindow = show
if xy is not None:
si.dwX, si.dwY = xy
si.dwFlags ^= win32con.STARTF_USEPOSITION
if xySize is not None:
si.dwXSize, si.dwYSize = xySize
si.dwFlags ^= win32con.STARTF_USESIZE
if desktop is not None:
si.lpDesktop = desktop
procArgs = (None, # appName
cmd, # commandLine
None, # processAttributes
None, # threadAttributes
1, # bInheritHandles
win32process.CREATE_NEW_CONSOLE, # dwCreationFlags
None, # newEnvironment
None, # currentDirectory
si) # startupinfo
if login is not None:
hUser = logonUser(login)
win32security.ImpersonateLoggedOnUser(hUser)
procHandles = win32process.CreateProcessAsUser(hUser, *procArgs)
win32security.RevertToSelf()
else:
procHandles = win32process.CreateProcess(*procArgs)
self.hProcess, self.hThread, self.
|
PId, self.TId = procHandles
def wait(self, mSec=None):
"""
|
Wait for process to finish or for specified number of
milliseconds to elapse.
"""
if mSec is None:
mSec = win32event.INFINITE
return win32event.WaitForSingleObject(self.hProcess, mSec)
def kill(self, gracePeriod=5000):
"""
Kill process. Try for an orderly shutdown via WM_CLOSE. If
still running after gracePeriod (5 sec. default), terminate.
"""
win32gui.EnumWindows(self.__close__, 0)
if self.wait(gracePeriod) != win32event.WAIT_OBJECT_0:
win32process.TerminateProcess(self.hProcess, 0)
win32api.Sleep(100) # wait for resources to be released
def __close__(self, hwnd, dummy):
"""
EnumWindows callback - sends WM_CLOSE to any window
owned by this process.
"""
TId, PId = win32process.GetWindowThreadProcessId(hwnd)
if PId == self.PId:
win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0)
def exitCode(self):
"""
Return process exit code.
"""
return win32process.GetExitCodeProcess(self.hProcess)
def run(cmd, mSec=None, stdin=None, stdout=None, stderr=None, **kw):
"""
Run cmd as a child process and return exit code.
mSec: terminate cmd after specified number of milliseconds
stdin, stdout, stderr:
file objects for child I/O (use hStdin etc. to attach
handles instead of files); default is caller's stdin,
stdout & stderr;
kw: see Process.__init__ for more keyword options
"""
if stdin is not None:
kw['hStdin'] = msvcrt.get_osfhandle(stdin.fileno())
if stdout is not None:
kw['hStdout'] = msvcrt.get_osfhandle(stdout.fileno())
if stderr is not None:
kw['hStderr'] = msvcrt.get_osfhandle(stderr.fileno())
child = Process(cmd, **kw)
if child.wait(mSec) != win32event.WAIT_OBJECT_0:
child.kill()
raise WindowsError, 'process timeout exceeded'
return child.exitCode()
if __name__ == '__main__':
# Pipe commands to a shell and display the output in notepad
print 'Testing winprocess.py...'
import tempfile
timeoutSeconds = 15
cmdString = """\
REM Test of winprocess.py piping commands to a shell.\r
REM This window will close in %d seconds.\r
vol\r
net user\r
_this_is_a_test_of_stderr_\r
""" % timeoutSeconds
cmd, out = tempfile.TemporaryFile(), tempfile.TemporaryFile()
cmd.write(cmdString)
cmd.seek(0)
print 'CMD.EXE exit code:', run('cmd.exe', show=0, stdin=cmd,
stdout=out, stderr=out)
cmd.close()
print 'NOTEPAD exit code:', run('notepad.exe %s' % out.file.name,
show=win32con.SW_MAXIMIZE,
mSec=timeoutSeconds*1000)
out.close()
|
thedeadparrot/ficbot
|
twitterbot.py
|
Python
|
mit
| 555
| 0.003604
|
from __future__ import pri
|
nt_function
from twython import Twython
import util
class TwitterBot(util.SocialMediaBot):
""" Social Media Bot for posting updat
|
es to Tumblr """
NAME = "twitter"
def __init__(self, **kwargs):
super(TwitterBot, self).__init__(**kwargs)
self.client = Twython(*self.oauth_config)
def post_update(self):
text = self.generate_text(limit_characters=140)
self.client.update_status(status=text)
if __name__ == "__main__":
twitterbot = TwitterBot()
twitterbot.post_update()
|
d9w/6858-android-intents
|
analyzer/androguard/core/bytecodes/dvm.py
|
Python
|
mit
| 232,587
| 0.025272
|
# This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
|
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
from androguard.core import bytecode
from androguard.core.androconf import CONF, debug
import sys, re
from struct import pack, unpack, calcsize
DEX_FILE_MAGIC = 'dex\n035\x00'
ODEX_FILE_MAGIC_35 = 'dey\n035\x00'
ODEX_FILE_MAGIC_36 = 'dey\n036\x00'
TYPE_MAP_ITEM = {
0x0 : "TYPE_HEADER_ITEM",
|
0x1 : "TYPE_STRING_ID_ITEM",
0x2 : "TYPE_TYPE_ID_ITEM",
0x3 : "TYPE_PROTO_ID_ITEM",
0x4 : "TYPE_FIELD_ID_ITEM",
0x5 : "TYPE_METHOD_ID_ITEM",
0x6 : "TYPE_CLASS_DEF_ITEM",
0x1000 : "TYPE_MAP_LIST",
0x1001 : "TYPE_TYPE_LIST",
0x1002 : "TYPE_ANNOTATION_SET_REF_LIST",
0x1003 : "TYPE_ANNOTATION_SET_ITEM",
0x2000 : "TYPE_CLASS_DATA_ITEM",
0x2001 : "TYPE_CODE_ITEM",
0x2002 : "TYPE_STRING_DATA_ITEM",
0x2003 : "TYPE_DEBUG_INFO_ITEM",
0x2004 : "TYPE_ANNOTATION_ITEM",
0x2005 : "TYPE_ENCODED_ARRAY_ITEM",
0x2006 : "TYPE_ANNOTATIONS_DIRECTORY_ITEM",
}
ACCESS_FLAGS = [
(0x1 , 'public'),
(0x2 , 'private'),
(0x4 , 'protected'),
(0x8 , 'static'),
(0x10 , 'final'),
(0x20 , 'synchronized'),
(0x40 , 'bridge'),
(0x80 , 'varargs'),
(0x100 , 'native'),
(0x200 , 'interface'),
(0x400 , 'abstract'),
(0x800 , 'strict'),
(0x1000 , 'synthetic'),
(0x4000 , 'enum'),
(0x8000 , 'unused'),
(0x10000, 'constructor'),
(0x20000, 'synchronized'),
]
TYPE_DESCRIPTOR = {
'V': 'void',
'Z': 'boolean',
'B': 'byte',
'S': 'short',
'C': 'char',
'I': 'int',
'J': 'long',
'F': 'float',
'D': 'double',
'STR': 'String',
'StringBuilder': 'String'
}
def get_access_flags_string(value) :
"""
Transform an access flags to the corresponding string
:param value: the value of the access flags
:type value: int
:rtype: string
"""
buff = ""
for i in ACCESS_FLAGS :
if (i[0] & value) == i[0] :
buff += i[1] + " "
if buff != "" :
return buff[:-1]
return buff
def get_type(atype, size=None):
"""
Retrieve the type of a descriptor (e.g : I)
"""
if atype.startswith('java.lang'):
atype = atype.replace('java.lang.', '')
res = TYPE_DESCRIPTOR.get(atype.lstrip('java.lang'))
if res is None:
if atype[0] == 'L':
res = atype[1:-1].replace('/', '.')
elif atype[0] == '[':
if size is None:
res = '%s[]' % get_type(atype[1:])
else:
res = '%s[%s]' % (get_type(atype[1:]), size)
else:
res = atype
return res
MATH_DVM_OPCODES = { "add." : '+',
"div." : '/',
"mul." : '*',
"or." : '|',
"sub." : '-',
"and." : '&',
"xor." : '^',
"shl." : "<<",
"shr." : ">>",
}
FIELD_READ_DVM_OPCODES = [ ".get" ]
FIELD_WRITE_DVM_OPCODES = [ ".put" ]
BREAK_DVM_OPCODES = [ "invoke.", "move.", ".put", "if." ]
BRANCH_DVM_OPCODES = [ "throw", "throw.", "if.", "goto", "goto.", "return", "return.", "packed-switch$", "sparse-switch$" ]
def clean_name_instruction( instruction ) :
op_value = instruction.get_op_value()
# goto range
if op_value >= 0x28 and op_value <= 0x2a :
return "goto"
return instruction.get_name()
def static_operand_instruction( instruction ) :
buff = ""
if isinstance(instruction, Instruction) :
# get instructions without registers
for val in instruction.get_literals() :
buff += "%s" % val
op_value = instruction.get_op_value()
if op_value == 0x1a or op_value == 0x1b :
buff += instruction.get_string()
return buff
html_escape_table = {
"&": "&",
'"': """,
"'": "'",
">": ">",
"<": "<",
}
def dot_buff(ins, idx) :
op_value = ins.get_op_value()
if op_value == 0x300 :
return ins.get_name() + " " + ins.get_output(idx).replace("\"", "")
elif op_value == 0x1a :
return ins.get_name() + " " + ins.get_output(idx).replace("\"", "") #"".join(html_escape_table.get(c,c) for c in ins.get_output())
return ins.get_name() + " " + ins.get_output(idx)
def readuleb128(buff) :
result = ord( buff.read(1) )
if result > 0x7f :
cur = ord( buff.read(1) )
result = (result & 0x7f) | ((cur & 0x7f) << 7)
if cur > 0x7f :
cur = ord( buff.read(1) )
result |= (cur & 0x7f) << 14
if cur > 0x7f :
cur = ord( buff.read(1) )
result |= (cur & 0x7f) << 21
if cur > 0x7f :
cur = ord( buff.read(1) )
if cur > 0x0f :
raise("prout")
result |= cur << 28
return result
def readusleb128(buff) :
result = ord( buff.read(1) )
if result > 0x7f :
cur = ord( buff.read(1) )
result = (result & 0x7f) | ((cur & 0x7f) << 7)
if cur > 0x7f :
cur = ord( buff.read(1) )
result |= (cur & 0x7f) << 14
if cur > 0x7f :
cur = ord( buff.read(1) )
result |= (cur & 0x7f) << 21
if cur > 0x7f :
cur = ord( buff.read(1) )
result |= cur << 28
return result
def readuleb128p1(buff) :
return readuleb128( buff ) - 1
def readsleb128(buff) :
result = unpack( '=b', buff.read(1) )[0]
if result <= 0x7f :
result = (result << 25)
if result > 0x7fffffff :
result = (0x7fffffff & result) - 0x80000000
result = result >> 25
else :
cur = unpack( '=b', buff.read(1) )[0]
result = (result & 0x7f) | ((cur & 0x7f) << 7)
if cur <= 0x7f :
result = (result << 18) >> 18
else :
cur = unpack( '=b', buff.read(1) )[0]
result |= (cur & 0x7f) << 14
if cur <= 0x7f :
result = (result << 11) >> 11
else :
cur = unpack( '=b', buff.read(1) )[0]
result |= (cur & 0x7f) << 21
if cur <= 0x7f :
result = (result << 4) >> 4
else :
cur = unpack( '=b', buff.read(1) )[0]
result |= cur << 28
return result
def get_sbyte(buff) :
return unpack( '=b', buff.read(1) )[0]
def readsleb128_2(buff) :
result = get_sbyte(buff)
if result <= 0x7f :
result = (result << 25) >> 25
else :
cur = get_sbyte(buff)
result = (result & 0x7f) | ((cur & 0x7f) << 7)
if cur <= 0x7f :
result = (result << 18) >> 18
else :
cur = get_sbyte(buff)
result |= (cur & 0x7f) << 14
if cur <= 0x7f :
result = (result << 11) >> 11
else :
cur = get_sbyte(buff)
result |= (cur & 0x7f) << 21
|
SimpleTax/merchant
|
billing/__init__.py
|
Python
|
bsd-3-clause
| 135
| 0
|
from gatew
|
ay import Gateway, get_gateway
from integration import Integration, get_integration
from
|
utils.credit_card import CreditCard
|
btenaglia/hpc-historias-clinicas
|
hpc-historias-clinicas/diagnosticos/models.py
|
Python
|
bsd-3-clause
| 945
| 0.003181
|
# -*- coding: utf-8 -*-
from datetime import datetime
from django.db import models
from django.core.urlresolvers import reverse
from ..core.models import TimeS
|
tampedModel
class TipoDiagnosticos(TimeStampedModel):
nombre = models.CharField(max_length=150, blank=False, null=False, verbose_name=u'Diagnóstico')
def get_absolute_url(self):
return reverse('diagnosticos:list')
def __unicode__(self):
return self.nombre
class Diagnosticos(TimeStampedModel):
tipo_diagnostico = models.ForeignKey(TipoDiagnosticos, blank=True, null=True,
verbose_name=u'Diagnóstico')
fe
|
cha = models.DateField(blank=False, null=False,
help_text=u'Formato: dd/mm/yyyy',
default=datetime.now())
hora = models.TimeField(blank=False, null=False,
help_text=u'Formato: hh:mm', default=datetime.now())
|
midhun3112/restaurant_locator
|
Restaurant_Finder_App/restaurant_finder_app/restaurant_finder_app/restaurant/migrations/0015_auto_20170213_1116.py
|
Python
|
apache-2.0
| 766
| 0.002611
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-13 11:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('restau
|
rant', '0014_remove_menuimage_menu_name'),
]
operations = [
migrations.AlterModelOptions(
name='menuimage',
options={'verbose_name': 'MenuImage', 'verbose_name_plural': 'MenuImages'},
),
migrations.AlterField(
model_name='menuimage',
name='restaurant',
field=models.ForeignKey(null=True, on_delete=django.db
|
.models.deletion.CASCADE, related_name='menu_image', to='restaurant.Restaurant'),
),
]
|
onelab-eu/myslice
|
forge/script/PlcApi/showKeys.py
|
Python
|
gpl-3.0
| 224
| 0.026786
|
#!/us
|
r/bin/env python
from Auth import *
keyId = plc_api.GetKeys(auth, {'person_id': 249241}, ['key_id', 'key'])
for key in keyId:
print "A new key:"
|
print "Key value ->", key['key']
print "Key id ->",key['key_id']
|
kg-bot/SupyBot
|
plugins/TwitterStream/__init__.py
|
Python
|
gpl-3.0
| 2,725
| 0.000734
|
###
# Copyright (c) 2012, Valentin Lorentz
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
"""
Add a description of the plugin (to be presen
|
ted to the user inside the wizard)
here. This should describe *what* the plugin does.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = ""
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.authors.unknown
# This is a dictionary mapping supyb
|
ot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = '' # 'http://supybot.com/Members/yourname/TwitterStream/download'
from . import config
from . import plugin
from imp import reload
reload(plugin) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
from . import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
UltraNurd/tweetworks-py
|
tweetworks/User.py
|
Python
|
gpl-3.0
| 3,623
| 0.002762
|
# -*- coding: utf-8 -*-
"""
Read Tweetworks API users from XML responses.
Nicolas Ward
@ultranurd
ultranurd@yahoo.com
http://www.ultranurd.net/code/tweetworks/
2009.06.19
"""
"""
This file is part of the Tweetworks Python API.
Copyright © 2009 Nicolas Ward
Tweetworks Python API is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later vers
|
ion.
Tweetworks Python API is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the Tweetworks Python API. If not, see http://www.gnu.org/licenses/
The term "Tweetworks" is Copyr
|
ight © 2009 Tweetworks, LLC and is used
under license. See http://www.tweetworks.com/pages/terms
The use of this software requires a unique Tweetworks API key. You must be a
registered Tweetworks user, and have received an API key after requesting one
via http://www.tweetworks.com/pages/contact.
The term "Twitter" is Copyright © 2009 Twitter, Inc.
"""
# System includes
import lxml.etree
from lxml.builder import E
class User:
"""
Represents the data fields of a single Tweetworks user.
"""
def __init__(self, xml = None):
"""
Reads user fields from the XML, or create an empty user.
id - int - Tweetworks numeric user ID
username - string - Tweetworks/Twitter username
avatar_url - string - Twitter avatar URL
twitter_id - int - Twitter numeric user ID
"""
# Initialize an empty user if no XML was provided
if xml == None:
self.id = None
self.username = ""
self.avatar_url = ""
self.twitter_id = None
return
# User ID
self.id = int(xml.xpath("/user/id/text()")[0])
# User's Twitter username
self.username = unicode(xml.xpath("/user/username/text()")[0])
# User avatar URL (loaded from Amazon S3, obtained from Twitter)
self.avatar_url = unicode(xml.xpath("/user/avatar_url/text()")[0])
# User's "real" name
self.name = unicode(xml.xpath("/user/name/text()")[0])
# Twitter ID of the user; this should always be present but isn't always
twitter_id = xml.xpath("/user/twitter_id/text()")
if len(twitter_id) == 1:
self.twitter_id = int(twitter_id[0])
else:
self.twitter_id = None
def __str__(self):
"""
Returns this User as an XML string.
"""
# Get the XML tree and stringify
return lxml.etree.tostring(self.xml())
def __repr__(self):
"""
Returns an eval-ready string for this User's constructor.
"""
return "tweetworks.User(lxml.etree.parsestring(%s))" % repr(str(self))
def xml(self):
"""
Generates an XML element tree for this User.
"""
# Construct the XML tree representing this User
xml = E("user",
E("id", str(self.id)),
E("username", self.username),
E("avatar_url", self.avatar_url),
E("name", self.name),
E("twitter_id",
("", str(self.twitter_id))[self.twitter_id != None]),
)
# Return the XML tree (NOT a string)
return xml
|
Azure/azure-sdk-for-python
|
sdk/storagepool/azure-mgmt-storagepool/setup.py
|
Python
|
mit
| 2,679
| 0.001493
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
# Change the PACKAGE_NAME only to change folder and
|
different name
PACKAGE_NAME = "azure-mgmt-storagepool"
PACKAGE_PPRINT_NAME = "Storage Pool Management"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_p
|
ath, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + changelog,
long_description_content_type='text/markdown',
license='MIT License',
author='Microsoft Corporation',
author_email='azpysdkhelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=[
'tests',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.mgmt',
]),
install_requires=[
'msrest>=0.6.21',
'azure-common~=1.1',
'azure-mgmt-core>=1.3.0,<2.0.0',
],
python_requires=">=3.6"
)
|
steakunderscore/Bandwidth-Monitoring
|
src/user.py
|
Python
|
gpl-3.0
| 4,271
| 0.012175
|
'''
Created on 5/02/2010
@author: henry@henryjenkins.name
'''
import datetime
class user(object):
'''
classdocs
'''
dataUp = None
dataDown = None
macAddress = ""
name = ""
def __init__(self, mac="", name=""):
'''
Constructor
'''
self.name = name
self.dataUp = {}
self.dataDown = {}
self.macAddress = mac
def getData(self, type, date=None, peak='other'):
'''
Method to retrieve data for either a set date, or the total data used by user
Return int, data used by this user
'''
data = 0
if date == None:
data = user.__getTotalData(self, type, peak)
else:
data = self.getDownData(type = type, date = date, peak = peak)
date += self.getUpData(type = type, date = date, peak = peak)
return data
def __getTotalData(self, type, peak='other'):
totalData = self.__getTotalUpData(type, peak)
totalData = totalData + self.__getTotalDownData(peak = peak, type = type)
return totalData
def getUpData(self, type, date=None, peak='other'):
data = 0
if date == None:
data = self.__getTotalUpData(type = type, peak = peak)
elif date in self.dataUp:
if type == 'on' or type == 'off':
data = self.dataUp[date][peak][type]
else:
data = self.dataUp[date]['on'][type] + self.dataUp[date]['off'][type]
return data
def __getTotalUpData(self, type, peak='other'):
dataTotal = 0
for date, data in self.dataUp.items():
if peak == 'on' or peak == 'off':
dataTotal += data[peak][type]
else:
dataTotal += data['on'][type]
dataTotal += data['off'][type]
return dataTotal
def getDownData(self, type, date=None, peak='other'):
data = 0
if date == None:
data = self.__getTotalDownData(type = type, peak = peak)
elif date in self.dataDown:
if type == 'on' or type == 'off':
data = self.dataDown[date][peak][type]
else:
data = self.dataDown[date]['on'][type] + self.dataDown[date]['off'][type]
return data
def __getTotalDownData(self, type, peak='other'):
dataTotal = 0
for date, data in self.dataDown.items():
if peak == 'on' or peak == 'off':
dataTotal += data[peak][type]
else:
|
dataTotal += data['on'][type]
dataTotal += data['off'][type]
return dataTotal
def addData(self, date=None, data=0, pkts=0,
|
peak='on', direction='up'):
if direction == 'up':
self.addUpData(date, data, pkts, peak)
elif direction == 'down':
self.addDownData(date, data, pkts, peak)
def addUpData(self, date=None, data=0, pkts=0, peak='on'): #TODO store packets
date = self.__checkDate(date)
if date not in self.dataUp:# Check if data for date already
self.dataUp[date] = {
'on': {'data': 0, 'pkts': 0},
'off': {'data': 0, 'pkts': 0}
}
self.dataUp[date][peak]['data'] += int(data)
self.dataUp[date][peak]['pkts'] += int(pkts)
def addDownData(self, date=None, data=0, pkts=0, peak='on'): #TODO store packets
date = self.__checkDate(date)
if date not in self.dataDown:# Check if data for date already
self.dataDown[date] = {
'on': {'data': 0, 'pkts': 0},
'off': {'data': 0, 'pkts': 0}
}
self.dataDown[date][peak]['data'] += int(data)
self.dataDown[date][peak]['pkts'] += int(pkts)
'''
Helper method
'''
def __checkDate(self, localDate=None):
if localDate == None:
localDate = datetime.date.today()
return localDate
def setMac(self, mac=None):
self.macAddress = mac
def setName(self, name=None):
self.name = name
|
ntt-sic/cinder
|
cinder/volume/volume_types.py
|
Python
|
apache-2.0
| 5,726
| 0.000175
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 Zadara Storage Inc.
# Copyright (c) 2011 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright 2011 Ken Pepple
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Built-in volume type properties."""
from oslo.config import cfg
from cinder import context
from cind
|
er import db
from cinder import exception
from cinder.openstack.common.db import exception as db_exc
from cinder.openstack.common import log as logging
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def create(context, name, extra_specs={}):
"""Creates volume typ
|
es."""
try:
type_ref = db.volume_type_create(context,
dict(name=name,
extra_specs=extra_specs))
except db_exc.DBError as e:
LOG.exception(_('DB error: %s') % e)
raise exception.VolumeTypeCreateFailed(name=name,
extra_specs=extra_specs)
return type_ref
def destroy(context, id):
"""Marks volume types as deleted."""
if id is None:
msg = _("id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
else:
db.volume_type_destroy(context, id)
def get_all_types(context, inactive=0, search_opts={}):
"""Get all non-deleted volume_types.
Pass true as argument if you want deleted volume types returned also.
"""
vol_types = db.volume_type_get_all(context, inactive)
if search_opts:
LOG.debug(_("Searching by: %s") % str(search_opts))
def _check_extra_specs_match(vol_type, searchdict):
for k, v in searchdict.iteritems():
if (k not in vol_type['extra_specs'].keys()
or vol_type['extra_specs'][k] != v):
return False
return True
# search_option to filter_name mapping.
filter_mapping = {'extra_specs': _check_extra_specs_match}
result = {}
for type_name, type_args in vol_types.iteritems():
# go over all filters in the list
for opt, values in search_opts.iteritems():
try:
filter_func = filter_mapping[opt]
except KeyError:
# no such filter - ignore it, go to next filter
continue
else:
if filter_func(type_args, values):
result[type_name] = type_args
break
vol_types = result
return vol_types
def get_volume_type(ctxt, id):
"""Retrieves single volume type by id."""
if id is None:
msg = _("id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
if ctxt is None:
ctxt = context.get_admin_context()
return db.volume_type_get(ctxt, id)
def get_volume_type_by_name(context, name):
"""Retrieves single volume type by name."""
if name is None:
msg = _("name cannot be None")
raise exception.InvalidVolumeType(reason=msg)
return db.volume_type_get_by_name(context, name)
def get_default_volume_type():
"""Get the default volume type."""
name = CONF.default_volume_type
vol_type = {}
if name is not None:
ctxt = context.get_admin_context()
try:
vol_type = get_volume_type_by_name(ctxt, name)
except exception.VolumeTypeNotFoundByName as e:
# Couldn't find volume type with the name in default_volume_type
# flag, record this issue and move on
#TODO(zhiteng) consider add notification to warn admin
LOG.exception(_('Default volume type is not found, '
'please check default_volume_type config: %s'), e)
return vol_type
def is_key_value_present(volume_type_id, key, value, volume_type=None):
if volume_type_id is None:
return False
if volume_type is None:
volume_type = get_volume_type(context.get_admin_context(),
volume_type_id)
if (volume_type.get('extra_specs') is None or
volume_type['extra_specs'].get(key) != value):
return False
else:
return True
def get_volume_type_extra_specs(volume_type_id, key=False):
volume_type = get_volume_type(context.get_admin_context(),
volume_type_id)
extra_specs = volume_type['extra_specs']
if key:
if extra_specs.get(key):
return extra_specs.get(key)
else:
return False
else:
return extra_specs
def is_encrypted(context, volume_type_id):
if volume_type_id is None:
return False
encryption = db.volume_type_encryption_get(context, volume_type_id)
return encryption is not None
def get_volume_type_qos_specs(volume_type_id):
ctxt = context.get_admin_context()
res = db.volume_type_qos_specs_get(ctxt,
volume_type_id)
return res
|
zzeleznick/zDjango
|
venv/lib/python2.7/site-packages/pystache/tests/test_simple.py
|
Python
|
mit
| 2,785
| 0.002154
|
import unittest
import pystache
from pystache import Renderer
from examples.nested_context import NestedContext
from examples.complex import Complex
from examples.lambdas import Lambdas
from examples.template_partial import TemplatePartial
from examples.simple import Simple
from pystache.tests.common import EXAMPLES_DIR
from pystache.tests.common import AssertStringMixin
class TestSimple(unittest.TestCase, AssertStringMixin):
def test_nested_context(self):
renderer = Renderer()
view = NestedContext(renderer)
view.template = '{{#foo}}{{thing1}} and {{thing2}} and {{outer_thing}}{{/foo}}{{^foo}}Not foo!{{/foo}}'
actual = renderer.render(view)
self.assertString(actual, u"one and foo and two")
def test_looping_and_negation_context(self):
template = '{{#item}}{{header}}: {{name}} {{/item}}{{^item}} Shouldnt see me{{/item}}'
context = Complex()
renderer = Renderer()
actual = renderer.render(template, context)
self.assertEqual(actual, "Colors: red Colors: green Colors: blue ")
def test_empty_context(self):
template = '{{#empty_list}}Shouldnt see me {{/empty_list}}{{^empty_list}}Should see me{{/empty_list}}'
self.assertEqual(pystache.Renderer().render(template), "Should see me")
def test_callables(self):
view = Lambdas()
view.template = '{{#replace_foo_with_bar}}foo != bar. oh, it does!{{/replace_foo_with_bar}}'
renderer = Renderer()
actual = renderer.render(view)
self.assertString(actual, u'bar != bar. oh, it does!')
def test_rendering_partial(self):
renderer = Renderer(search_dirs=EXAMPLES_DIR)
view = TemplatePartial(renderer=renderer)
view.template = '{{>inner_partial}}'
actual = renderer.render(view)
self.assertString(actual, u'Again, Welcome!')
view.template = '{{#looping}}{{>inner_partial}} {{/looping}}'
actual = renderer.render(view)
self.assertString(actual, u"Again, Welcome! Again, Welcome! Again, Welcome! ")
def test_non_existent_val
|
ue_renders_blank(self):
view = Simple()
template = '{{not_set}} {{blank}}'
self.assertEqual(pystache.Renderer().render(template), ' ')
def test_template_partial_extension(self):
"""
Side note:
From the spec--
Partial tags SHOULD be treated as
|
standalone when appropriate.
In particular, this means that trailing newlines should be removed.
"""
renderer = Renderer(search_dirs=EXAMPLES_DIR, file_extension='txt')
view = TemplatePartial(renderer=renderer)
actual = renderer.render(view)
self.assertString(actual, u"""Welcome
-------
## Again, Welcome! ##""")
|
JaneliaSciComp/Neuroptikon
|
Source/lib/CrossPlatform/networkx/readwrite/leda.py
|
Python
|
bsd-3-clause
| 2,097
| 0.023367
|
"""
Read graphs in LEDA format.
See http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html
"""
# Original author: D. Eppstein, UC Irvine, August 12, 2003.
# The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain.
__author__ = """Aric Hagberg (hagberg@lanl.gov)"""
# Copyright (C) 2004-2009 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__all__ = ['read_leda', 'parse_leda']
import networkx
from networkx.exception import NetworkXException, NetworkXError
from networkx.utils import _get_fh, is_string_like
def read_leda(path):
"""Read graph in GraphML format from path.
Returns an XGraph or XDiGraph."""
fh=_get_fh(path,mode='r')
G=parse_leda(fh)
return G
def parse_leda(lines):
"""Parse LEDA.GRAPH format from string or iterable.
Returns an Graph or DiGraph."""
if is_string_like(lines): lines=iter(lines.split('\n'))
lines = iter([line.rstrip('\n') for line in lines \
if not (line.startswith('#') or line.startswith('\n') or line=='')])
for i in range(3):
lines.next()
# Graph
du = int(lines.next()) # -1
|
directed, -2 undirected
if du==-1:
G = networkx.DiGraph()
else:
G = networkx.Graph()
# Nodes
n =int(lines.next()) # number of vertices
node={}
for i in range(1,n+1): # LEDA counts from 1 to n
symbol=lines.next().rstrip().strip('|{}| ')
if symbol=="": symbol=str(i) # use int if no label - could be trouble
node[i]=symbol
G.add_nodes_from([s for i,s in node.items()])
#
|
Edges
m = int(lines.next()) # number of edges
for i in range(m):
try:
s,t,reversal,label=lines.next().split()
except:
raise NetworkXError,\
'Too few fields in LEDA.GRAPH edge %d' % (i+1)
# BEWARE: no handling of reversal edges
G.add_edge(node[int(s)],node[int(t)],label=label[2:-2])
return G
|
Lana-Pa/Python-training
|
test/test_delete_contact_from_group.py
|
Python
|
apache-2.0
| 1,311
| 0.006865
|
from model.contact import Contact
from model.group import Group
from fixture.orm import ORMFixture
import random
def test_del_contact_from_group(app):
orm = ORMFixture(host="127.0.0.1", name="addressbook", user="root", password="")
# check for existing any group
if len(orm.get_group_list()) == 0:
app.group.create(Group(name="test"))
group = random.choice(orm.get_group_list()) # choose random group from list
if len(orm.get_contacts_in_group(Group(id=group.id))) == 0:
if len(orm.get_contacts_not_in_group(Group(id=group.id))) == 0:
app.contact.create(Contact(firstname="Ivan"))
contact_not_in_group = random.choice(orm.get_contacts_not_in_group(Group(id=group.id)))
app.contact.add_contact_to_group_by_id(contact_not_in_group.id, group.id)
|
old_contacts_in_group = orm.get_contacts_in_group(Group(id=group.id))
contact_in_group = random.choice(old_contacts_in_group) # choose random contact from list
app.contact.delete_contact_from_group_by_id(contact_in_group.id, group.id)
new_contacts_in_group = orm.get_contacts_in_group(Group(id=group.id))
old_contacts_in_group.remove(contact_in_group)
assert sorted(old_contacts_in_group, key=Contact.id_or_max
|
) == sorted(new_contacts_in_group, key=Contact.id_or_max)
|
freevo/freevo2
|
src/plugins/jsonrpc/__init__.py
|
Python
|
gpl-2.0
| 8,340
| 0.003357
|
# -*- coding: iso-8859-1 -*-
# -----------------------------------------------------------------------
# jsonrpc - jsonrpc interface for XBMC-compatible remotes
# -----------------------------------------------------------------------
# $Id$
#
# JSONRPC and XBMC eventserver to be used for XBMC-compatible
# remotes. Only tested with Yatse so far. If something is not working,
# do not blame the remote, blame this plugin.
#
# Not all API calls are implemented yet.
#
# -----------------------------------------------------------------------
# Freevo - A Home Theater PC framework
# Copyright (C) 2014 Dirk Meyer, et al.
#
# First Edition: Dirk Meyer <https://github.com/Dischi>
# Maintainer: Dirk Meyer <https://github.com/Dischi>
#
# Please see the file AUTHORS for a complete list of authors.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MER-
# CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# ----------------------------------------------------------------------- */
# python imports
import os
import logging
import socket
import urllib
# kaa imports
import kaa
import kaa.beacon
# freevo imports
from ... import core as freevo
# get logging object
log = logging.getLogger('freevo')
# generic functions
import utils
import eventserver
# jsonrpc callbacks
import videolibrary as VideoLibrary
import player as Player
import playlist as Playlist
class PluginInterface( freevo.Plugin ):
"""
JSONRPC and XBMC eventserver to be used for XBMC-compatible remotes
"""
@kaa.coroutine()
def plugin_activate(self, level):
"""
Activate the plugin
"""
super(PluginInterface, self).plugin_activate(level)
self.httpserver = freevo.get_plugin('httpserver')
if not self.httpserver:
raise RuntimeError('httpserver plugin not running')
self.httpserver.server.add_json_handler('/jsonrpc', self.jsonrpc)
self.httpserver.server.add_handler('/image/', self.provide_image)
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._sock.bind(('', freevo.config.plugin.jsonrpc.eventserver))
udp = kaa.Socket()
udp.wrap(self._sock, kaa.IO_READ | kaa.IO_WRITE)
udp.signals['read'].connect(eventserver.handle)
utils.imagedir = (yield kaa.beacon.get_db_info())['directory']
utils.cachedir = os.path.join(os.environ['HOME'], '.thumbnails')
self.api = {}
for module in ('VideoLibrary', 'Player', 'Playlist'):
for name in dir(eval(module)):
method = getattr(eval(module), name)
if callable(method) and not name.startswith('_'):
self.api[module + '.' + name] = method
@kaa.coroutine()
def provide_image(self, path, **attributes):
"""
HTTP callback for images
"""
filename = ''
path = urllib.unquote(path)
if path.startswith('beacon'):
filename = os.path.join(utils.imagedir, path[7:])
if path.startswith('cache'):
filename = os.path.join(utils.cachedir, path[6:])
if path.startswith('thumbnail'):
item = yield kaa.beacon.query(id=int(path.split('/')[2]), type=path.split('/')[1])
if len(item) != 1:
log.error('beacon returned wrong results')
yield None
thumbnail = item[0].get('thumbnail')
if thumbnail.needs_update or 1:
yi
|
eld kaa.inprogress(thumbnail.create(priority=kaa.beacon.Thumbnail.PRIORITY_HIGH))
filename = thumbnail.large
if filename:
if os.path.is
|
file(filename):
yield open(filename).read(), None, None
log.error('no file: %s' % filename)
yield None
else:
yield None
def Application_GetProperties(self, properties):
"""
JsonRPC Callback Application.GetProperties
"""
result = {}
for prop in properties:
if prop == 'version':
result[prop] = {"major": 16,"minor": 0,"revision": "a5f3a99", "tag": "stable"}
elif prop == 'volume':
result[prop] = 100
elif prop == 'muted':
result[prop] = eventserver.muted
else:
raise AttributeError('unsupported property: %s' % prop)
return result
def Settings_GetSettingValue(self, setting):
"""
JsonRPC Settings.GetSettingValue (MISSING)
"""
return {}
def XBMC_GetInfoBooleans(self, booleans):
"""
JsonRPC Callback XBMC.GetInfoBooleans
"""
result = {}
for b in booleans:
if b == 'System.Platform.Linux':
result[b] = True
else:
result[b] = False
return result
def XBMC_GetInfoLabels(self, labels):
"""
JsonRPC Callback XBMC.GetInfoLabels
"""
result = {}
for l in labels:
# FIXME: use correct values for all these labels
if l == 'System.BuildVersion':
result[l] = "13.1"
elif l == 'System.KernelVersion':
result[l] = "Linux 3.11.0"
elif l == 'MusicPlayer.Codec':
result[l] = ""
elif l == 'MusicPlayer.SampleRate':
result[l] = ""
elif l == 'MusicPlayer.BitRate':
result[l] = ""
else:
raise AttributeError('unsupported label: %s' % l)
return result
def XBMC_Ping(self):
"""
JsonRPC Ping
"""
return ''
def JSONRPC_Ping(self):
"""
JsonRPC Ping
"""
return ''
def GUI_ActivateWindow(self, window, parameters=None):
"""
Switch Menu Type
"""
window = window.lower()
if window == 'pictures':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('image', event_source='user')
elif window == 'musiclibrary':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('audio', event_source='user')
elif window == 'videos':
if parameters and parameters[0] == 'MovieTitles':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('video', 'movie', event_source='user')
if parameters and parameters[0] == 'TvShowTitles':
freevo.Event(freevo.MENU_GOTO_MEDIA).post('video', 'tv', event_source='user')
elif window == 'home':
freevo.Event(freevo.MENU_GOTO_MAINMENU).post(event_source='user')
else:
log.error('ActivateWindow: unsupported window: %s' % window)
@kaa.coroutine()
def jsonrpc(self, path, **attributes):
"""
HTTP callback for /jsonrpc
"""
if not attributes:
# supported XBMC API version
yield {"major": 6,"minor": 14,"patch": 3}
method = attributes.get('method')
params = attributes.get('params')
result = None
if method.startswith('Input'):
callback = eventserver.input(method[6:].lower(), params)
yield {'jsonrpc': '2.0', 'result': 'OK', 'id': attributes.get('id')}
callback = self.api.get(method, None) or getattr(self, method.replace('.', '_'), None)
if callback:
# log.info('%s(%s)' % (method, params))
if params is None:
result = callback()
else:
result = callback(**params)
if isinstance(result, kaa.InProgress):
result = yield result
|
JohannesFeldmann/pism
|
examples/storglaciaren/sg_create_3d.py
|
Python
|
gpl-2.0
| 7,325
| 0.006416
|
#!/usr/bin/env python
#
# Copyright (C) 2011 Andy Aschwanden
#
# This file is part of PISM.
#
# PISM is free software; you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# PISM is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License
# along with PISM; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import sys
import time
import numpy as np
from pyproj import Proj
from sys import stderr
write = stderr.write
# try different netCDF modules
try:
from netCDF4 import Dataset as CDF
except:
from netCDF3 import Dataset as CDF
from optparse import OptionParser
__author__ = "Andy Aschwanden"
# Create PISM-readable input file from Storglaciaren DEM
parser = OptionParser()
parser.usage = "usage: %prog [options]"
parser.description = "Preprocess Storglaciaren files."
(options, args) = parser.parse_args()
# Create PISM-readable input file from Storglaciaren DEM
write('------------------------------\n')
write('PISM-Storglaciaren example\n')
write('------------------------------\n')
# data dir
data_dir = './'
# Bed and Surface DEMs for Storglaciaren
XFile = data_dir + 'X.txt.gz'
YFile = data_dir + 'Y.txt.gz'
zBaseFile = data_dir + 'zBase.txt.gz'
zSurfFile = data_dir + 'zSurf.txt.gz'
# load coordinate information. Note: Swedish grid (RT90) uses inverse notation
# X -> northing, Y -> easting
try:
write('Reading northing coordinate infos from %s: ' % XFile)
X = np.loadtxt(XFile)
write('Done.\n')
write('Reading easting coordinate infos from %s: ' % YFile)
Y = np.loadtxt(YFile)
write('Done.\n')
except IOError:
write('ERROR: File %s or %s could not be found.\n' % XFile % YFile)
exit(2)
# load Bed DEM
try:
write('Reading DEM from %s: ' % zBaseFile)
zBase = np.loadtxt(zBaseFile)
write('Done.\n')
except IOError:
write('ERROR: File %s could not be found.\n' % zBaseFile)
exit(2)
# load Surface DEM
try:
write('Reading DEM from %s: ' % zSurfFile)
zSurf = np.loadtxt(zSurfFile)
write('Done.\n')
except IOError:
write('ERROR: File %s could not be found.\n' % zSurfFile)
exit(2)
# Grid size. DEM has 10m spacing.
N = zBase.shape[1]
M = zBase.shape[0]
e0 = Y.min()
n0 = X.min()
de = 10 # m
dn = 10 # m
e1 = e0 + (N-1)*de
n1 = n0 + (M-1)*dn
easting = np.linspace(e0, e1, N)
northing = np.linspace(n0, n1, M)
# convert to lat/lon
# From http://lists.maptools.org/pipermail/proj/2008-December/004165.html:
#
# However, a simpler method, now recommended by the Swedish Land Survey
# instead of a 7-parameter shift, is to start from the WGS84 datum, and than
# tweak the projection parameters a little: just use a Transverse Mercator
# with
# central meridian: 15" 48' 22.624306" E
# scale factor: 1.00000561024
# false easting: 1500064.274 m
# false northing: -667.711 m
# ( http://www.lantmateriet.se/templates/LMV_Page.aspx?id=5197&lang=EN )
projRT90 = "+proj=tmerc +datum=WGS84 +lon_0=-15.806284 +x_0=1500064.274 +y_0=-667.711 +k=1.00000561024 +units=m"
ee, nn = np.meshgrid(easting, northing)
projection = Proj(projRT90)
longitude, latitude = projection(ee, nn, inverse=True)
write("Coordinates of the lower-left grid corner:\n"
" easting = %.0f\n"
" northing = %.0f\n"
"Grid size:\n"
" rows = %d\n"
" columns = %d\n" % (e0, n0, N, M))
# Fill value
fill_value = -9999
bed_valid_min = -5000.0
thk_valid_min = 0.0
bed = np.flipud(zBase)
dem = np.flipud(zSurf) # ignored by bootstrapping
thk = np.flipud(zSurf-zBase) # used for bootstrapping
# Replace NaNs with zeros
thk = np.nan_to_num(thk)
# There are some negative thickness values
# Quick and dirty: set to zero
# some inconsistencies in the original data still needs to be sorted out
# (filtering)
thk[thk<0] = 0
# Output filename
ncfile = 'pism_storglaciaren_3d.nc'
# Write the data:
nc = CDF(ncfile, "w",format='NETCDF3_CLASSIC') # for netCDF4 module
# Create dimensions x and y
nc.createDimension("x", size=easting.shape[0])
nc.createDimension("y", size=northing.shape[0])
x = nc.createVariable("x", 'f4', dimensions=("x",))
x.units = "m";
x.long_name = "easting"
x.standard_name = "projection_x_coordinate"
y = nc.createVariable("y", 'f4', dimensions=("y",))
y.units = "m";
y.long_name = "northing"
y.standard_name = "projection_y_coordinate"
x[:] = easting
y[:] = northing
def def_var(nc, name, units, fillvalue):
var = nc.createVariable(name, 'f', dimensions=("y", "x"),fill_value=fillvalue)
var.units = units
return var
lon_var = def_var(nc, "lon", "degrees_east", None)
lon_var.standard_name = "longitude"
lon_var[:] = longitude
lat_var = def_var(nc, "lat", "degrees_north", None)
lat_var.standard_name = "latitude"
lat_var[:] = latitude
bed_var = def_var(nc, "topg", "m", fill_value)
bed_var.valid_min = bed_valid_min
bed_var.standard_name = "bedrock_altitude"
bed_var.coordinates = "lat lon"
bed_var[:] = bed
thk_var = def_var(nc, "thk", "m", fill_value)
thk_var.valid_min = thk_valid_min
thk_var.standard_name = "land_ice_thickness"
thk_var.coordinates =
|
"lat lon"
thk_var[:] = thk
dem_var = def_var(nc, "usurf_from_dem", "m", fill_value)
dem_var.standard_name = "surface_altitude"
dem_var.coordinates = "lat lon"
dem_var[:] = dem
# gen
|
erate (somewhat) reasonable acab
acab_max = 2.5 # m/a
acab_min = -3.0 # m/a
acab_up = easting.min() + 200 # m; location of upstream end of linear acab
acab_down = easting.max() - 600 # m;location of downstream end of linear acab
acab = np.ones_like(dem)
acab[:] = acab_max - (acab_max-acab_min) * (easting - acab_up) / (acab_down - acab_up)
acab[thk<1] = acab_min
acab_var = def_var(nc, "climatic_mass_balance", "m year-1", fill_value)
acab_var.standard_name = "land_ice_surface_specific_mass_balance"
acab_var[:] = acab
# Set boundary conditions for Scandinavian-type polythermal glacier
# ------------------------------------------------------------------------------
#
# (A) Surface temperature for temperature equation bc
T0 = 273.15 # K
Tma = -6.0 # degC, mean annual air temperature at Tarfala
zcts = 1300 # m a.s.l.; altitude where CTS is at the surface, projected to topg
slope = 100 # m; range around which surface temp transition happens
# old abrupt jump:
#artm = np.zeros((M,N),float) + T0
#artm[bed<zcts] = T0 + Tma # Scandinavian-type polythermal glacier
# smoothed version; FIXME: can't we at least have it depend on initial DEM?
# additional lapse rate?
artm = T0 + Tma * (zcts + slope - bed) / (2.0 * slope)
artm[bed<zcts-slope] = T0 + Tma
artm[bed>zcts+slope] = T0
artm_var = def_var(nc, "ice_surface_temp", "K", fill_value)
artm_var[:] = artm
# set global attributes
nc.Conventions = "CF-1.4"
historysep = ' '
historystr = time.asctime() + ': ' + historysep.join(sys.argv) + '\n'
setattr(nc, 'history', historystr)
nc.projection = projRT90
nc.close()
write('Done writing NetCDF file %s!\n' % ncfile)
|
amolenaar/gaphor
|
examples/list_classes.py
|
Python
|
lgpl-2.1
| 1,239
| 0
|
#!/usr/bin/python
"""This script lists classes and optionally attributes from UML model created
with Gaphor."""
import optparse
import sys
from gaphor import UML
from gaphor.application import Session
# Setup command line options.
usage = "usage: %prog [options] file.gaphor"
def main():
parser = optparse.OptionParser(usage=usage)
parser.add_option(
"-a",
"--attributes",
dest="attrs",
action="store_true",
help="Print class attributes",
)
(options, args) = parser.parse_args()
if len(args) != 1:
parser.print_help()
sys.exit(1)
# The
|
model file to load.
model = args[0]
# Create the Gaphor application object.
session = Session()
# Get services we need.
element_factory = session.get_service("element_factory")
file_manager = session.get_service("file_manager")
# Load model from file.
file_manager.load(model)
# Find all classes using factory select.
for cls in element_factory.select(UML.Class):
|
print(f"Found class {cls.name}")
if options.attrs:
for attr in cls.ownedAttribute:
print(f" Attribute: {attr.name}")
if __name__ == "__main__":
main()
|
auth0/auth0-python
|
auth0/v3/management/rules.py
|
Python
|
mit
| 4,742
| 0.002109
|
from .rest import RestClient
class Rules(object):
"""Rules endpoint implementation.
Args:
domain (str): Your Auth0 domain, e.g: 'username.auth0.com'
token (str): Management API v2 Token
telemetry (bool, optional): Enable or disable Telemetry
(defaults to True)
timeout (float or tuple, optional): Change the requests
connect and read timeout. Pass a tuple to specify
both values separately or a float to set both to it.
(defaults to 5.0 for both)
rest_options (RestClientOptions): Pass an instance of
RestClientOptions to configure additional RestClient
options, such as rate-limit retries.
(defaults to None)
"""
def __init__(self, domain, token, telemetry=True, timeout=5.0, protocol="https", rest_options=None):
self.domain = domain
self.protocol = protocol
self.client = RestClient(jwt=token, telemetry=telemetry, timeout=timeout, options=rest_options)
def _url(self, id=None):
url = '{}://{}/api/v2/rules'.format(self.protocol, self.domain)
if id is not None:
return '{}/{}'.format(url, id)
return url
def all(self, stage='login_success', enabled=True, fields=None,
include_fields=True, page=None, per_page=None, include_totals=False):
"""Retrieves a list of all rules.
Args:
stage (str, optional): Retrieves rules that match the execution stage.
Defaults to login_success.
enabled (bool, optional): If provided, retrieves rules that match
the value, otherwise all rules are retrieved.
fields (list, optional): A list of fields to include or exclude
(depending o
|
n include_fields) from the result. Leave empty to
retrieve all fields.
include_f
|
ields (bool, optional): True if the fields specified are
to be included in the result, False otherwise. Defaults to True.
page (int, optional): The result's page number (zero based). When not set,
the default value is up to the server.
per_page (int, optional): The amount of entries per page. When not set,
the default value is up to the server.
include_totals (bool, optional): True if the query summary is
to be included in the result, False otherwise. Defaults to False.
See: https://auth0.com/docs/api/management/v2#!/Rules/get_rules
"""
params = {
'stage': stage,
'fields': fields and ','.join(fields) or None,
'include_fields': str(include_fields).lower(),
'page': page,
'per_page': per_page,
'include_totals': str(include_totals).lower()
}
# since the default is True, this is here to disable the filter
if enabled is not None:
params['enabled'] = str(enabled).lower()
return self.client.get(self._url(), params=params)
def create(self, body):
"""Creates a new rule.
Args:
body (dict): Attributes for the newly created rule.
See: https://auth0.com/docs/api/v2#!/Rules/post_rules
"""
return self.client.post(self._url(), data=body)
def get(self, id, fields=None, include_fields=True):
"""Retrieves a rule by its ID.
Args:
id (str): The id of the rule to retrieve.
fields (list, optional): A list of fields to include or exclude
(depending on include_fields) from the result. Leave empty to
retrieve all fields.
include_fields (bool, optional): True if the fields specified are
to be included in the result, False otherwise. Defaults to True.
See: https://auth0.com/docs/api/management/v2#!/Rules/get_rules_by_id
"""
params = {'fields': fields and ','.join(fields) or None,
'include_fields': str(include_fields).lower()}
return self.client.get(self._url(id), params=params)
def delete(self, id):
"""Delete a rule.
Args:
id (str): The id of the rule to delete.
See: https://auth0.com/docs/api/management/v2#!/Rules/delete_rules_by_id
"""
return self.client.delete(self._url(id))
def update(self, id, body):
"""Update an existing rule
Args:
id (str): The id of the rule to modify.
body (dict): Attributes to modify.
See: https://auth0.com/docs/api/v2#!/Rules/patch_rules_by_id
"""
return self.client.patch(self._url(id), data=body)
|
iNecas/katello
|
cli/test/katello/tests/core/template/template_delete_test.py
|
Python
|
gpl-2.0
| 1,936
| 0.002583
|
import unittest
import os
from katello.tests.core.action_test_utils import CLIOptionTestCase, CLIActionTestCase
from katello.tests.core.organization import organization_data
from katello.tests.core.template import template_data
import katello.client.core.template
from katello.client.core.template import Delete
from katello.client.api.utils import ApiDataError
class RequiredCLIOptionsTests(CLIOptionTestCase):
#requires: organization, name
#optional: environment (defaults to Library)
action = Delete()
|
disallowed_options = [
('--environment=dev', '--name=template_1'),
|
('--environment=dev', '--org=ACME'),
]
allowed_options = [
('--org=ACME', '--name=template_1'),
('--org=ACME', '--environment=dev', '--name=template_1'),
]
class TemplateInfoTest(CLIActionTestCase):
ORG = organization_data.ORGS[0]
ENV = organization_data.ENVS[0]
TPL = template_data.TEMPLATES[0]
OPTIONS = {
'org': ORG['name'],
'environment': ENV['name'],
'name': TPL['name'],
}
def setUp(self):
self.set_action(Delete())
self.set_module(katello.client.core.template)
self.mock_printer()
self.mock_options(self.OPTIONS)
self.mock(self.module, 'get_template', self.TPL)
self.mock(self.action.api, 'delete')
def test_it_finds_the_template(self):
self.run_action()
self.module.get_template.assert_called_once_with(self.ORG['name'], self.ENV['name'], self.TPL['name'])
def test_it_returns_error_when_template_not_found(self):
self.mock(self.module, 'get_template').side_effect = ApiDataError
self.run_action(os.EX_DATAERR)
def test_it_returns_success_when_template_found(self):
self.run_action(os.EX_OK)
def test_it_calls_delete_api(self):
self.run_action()
self.action.api.delete.assert_called_once_with(self.TPL['id'])
|
alphagov/notifications-api
|
app/performance_dashboard/rest.py
|
Python
|
mit
| 3,531
| 0.003398
|
from datetime import datetime
from flask import Blueprin
|
t, jsonify, request
from app.dao.fact_notification_status_dao import (
get_total_notifications_for_date_range,
)
from app.dao.fact_processing_time_dao import (
get_processing_time_percentage_for_date_range,
)
from app.dao.services_dao import get_live_services_with_organisation
from app.errors
|
import register_errors
from app.performance_dashboard.performance_dashboard_schema import (
performance_dashboard_request,
)
from app.schema_validation import validate
performance_dashboard_blueprint = Blueprint('performance_dashboard', __name__, url_prefix='/performance-dashboard')
register_errors(performance_dashboard_blueprint)
@performance_dashboard_blueprint.route('')
def get_performance_dashboard():
# All statistics are as of last night this matches the existing performance platform
# and avoids the need to query notifications.
if request.args:
# Is it ok to reuse this? - should probably create a new one
validate(request.args, performance_dashboard_request)
# If start and end date are not set, we are expecting today's stats.
today = str(datetime.utcnow().date())
start_date = datetime.strptime(request.args.get('start_date', today), '%Y-%m-%d').date()
end_date = datetime.strptime(request.args.get('end_date', today), '%Y-%m-%d').date()
total_for_all_time = get_total_notifications_for_date_range(start_date=None, end_date=None)
total_notifications, emails, sms, letters = transform_results_into_totals(total_for_all_time)
totals_for_date_range = get_total_notifications_for_date_range(start_date=start_date, end_date=end_date)
processing_time_results = get_processing_time_percentage_for_date_range(start_date=start_date, end_date=end_date)
services = get_live_services_with_organisation()
stats = {
"total_notifications": total_notifications,
"email_notifications": emails,
"sms_notifications": sms,
"letter_notifications": letters,
"notifications_by_type": transform_into_notification_by_type_json(totals_for_date_range),
"processing_time": transform_processing_time_results_to_json(processing_time_results),
"live_service_count": len(services),
"services_using_notify": transform_services_to_json(services)
}
return jsonify(stats)
def transform_results_into_totals(total_notifications_results):
total_notifications = 0
emails = 0
sms = 0
letters = 0
for x in total_notifications_results:
total_notifications += x.emails
total_notifications += x.sms
total_notifications += x.letters
emails += x.emails
sms += x.sms
letters += x.letters
return total_notifications, emails, sms, letters
def transform_into_notification_by_type_json(total_notifications):
j = []
for x in total_notifications:
j.append({"date": x.bst_date, "emails": x.emails, "sms": x.sms, "letters": x.letters})
return j
def transform_processing_time_results_to_json(processing_time_results):
j = []
for x in processing_time_results:
j.append({"date": x.date, "percentage_under_10_seconds": x.percentage})
return j
def transform_services_to_json(services_results):
j = []
for x in services_results:
j.append({"service_id": x.service_id, "service_name": x.service_name,
"organisation_id": x.organisation_id, "organisation_name": x.organisation_name}
)
return j
|
dpogue/korman
|
korman/properties/modifiers/logic.py
|
Python
|
gpl-3.0
| 5,501
| 0.003272
|
# This file is part of Korman.
#
# Korman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Korman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>.
import bpy
from bpy.props import *
from PyHSPlasma import *
from .base import PlasmaModifierProperties
from ..prop_world import game_versions
from ...exporter import ExportError
from ... import idprops
class PlasmaVersionedNodeTree(idprops.IDPropMixin,
|
bpy.types.PropertyGroup):
name = StringProperty(name="Name")
version = EnumProperty(name="Version",
description="Plasma versions this node tree exports under",
items=game_versions,
options={"ENUM_FLAG"},
|
default=set(list(zip(*game_versions))[0]))
node_tree = PointerProperty(name="Node Tree",
description="Node Tree to export",
type=bpy.types.NodeTree)
node_name = StringProperty(name="Node Ref",
description="Attach a reference to this node")
@classmethod
def _idprop_mapping(cls):
return {"node_tree": "node_tree_name"}
def _idprop_sources(self):
return {"node_tree_name": bpy.data.node_groups}
class PlasmaAdvancedLogic(PlasmaModifierProperties):
pl_id = "advanced_logic"
bl_category = "Logic"
bl_label = "Advanced"
bl_description = "Plasma Logic Nodes"
bl_icon = "NODETREE"
logic_groups = CollectionProperty(type=PlasmaVersionedNodeTree)
active_group_index = IntProperty(options={"HIDDEN"})
def export(self, exporter, bo, so):
version = exporter.mgr.getVer()
for i in self.logic_groups:
our_versions = [globals()[j] for j in i.version]
if version in our_versions:
if i.node_tree is None:
raise ExportError("'{}': Advanced Logic is missing a node tree for '{}'".format(bo.name, i.version))
# If node_name is defined, then we're only adding a reference. We will make sure that
# the entire node tree is exported once before the post_export step, however.
if i.node_name:
exporter.want_node_trees[i.node_tree.name] = (bo, so)
node = i.node_tree.nodes.get(i.node_name, None)
if node is None:
raise ExportError("Node '{}' does not exist in '{}'".format(i.node_name, i.node_tree.name))
# We are going to assume get_key will do the adding correctly. Single modifiers
# should fetch the appropriate SceneObject before doing anything, so this will
# be a no-op in that case. Multi modifiers should accept any SceneObject, however
node.get_key(exporter, so)
else:
exporter.node_trees_exported.add(i.node_tree.name)
i.node_tree.export(exporter, bo, so)
def harvest_actors(self):
actors = set()
for i in self.logic_groups:
actors.update(i.node_tree.harvest_actors())
return actors
class PlasmaSpawnPoint(PlasmaModifierProperties):
pl_id = "spawnpoint"
bl_category = "Logic"
bl_label = "Spawn Point"
bl_description = "Point at which avatars link into the Age"
def export(self, exporter, bo, so):
# Not much to this modifier... It's basically a flag that tells the engine, "hey, this is a
# place the avatar can show up." Nice to have a simple one to get started with.
spawn = exporter.mgr.add_object(pl=plSpawnModifier, so=so, name=self.key_name)
@property
def requires_actor(self):
return True
class PlasmaMaintainersMarker(PlasmaModifierProperties):
pl_id = "maintainersmarker"
bl_category = "Logic"
bl_label = "Maintainer's Marker"
bl_description = "Designates an object as the D'ni coordinate origin point of the Age."
bl_icon = "OUTLINER_DATA_EMPTY"
calibration = EnumProperty(name="Calibration",
description="State of repair for the Marker",
items=[
("kBroken", "Broken",
"A marker which reports scrambled coordinates to the KI."),
("kRepaired", "Repaired",
"A marker which reports blank coordinates to the KI."),
("kCalibrated", "Calibrated",
"A marker which reports accurate coordinates to the KI.")
])
def export(self, exporter, bo, so):
maintmark = exporter.mgr.add_object(pl=plMaintainersMarkerModifier, so=so, name=self.key_name)
maintmark.calibration = getattr(plMaintainersMarkerModifier, self.calibration)
@property
def requires_actor(self):
return True
|
ganeshchand/python3
|
advanced/regular_expression/regular_expresion_anchors.py
|
Python
|
apache-2.0
| 1,306
| 0.011485
|
__author__ = 'ganeshchand'
import re
def regex_search(pattern_string, string_source):
if re.search(pattern_string,string_source):
print("%s matched %s" % (pattern_string, string_source))
else:
print("%s did not match %s" % (pattern_string, string_source))
# matching a pattern in one string
mystring_anchors = 'aaaaa!@#$!@#$aaaaaadefg'
pattern_withoutanchors = r'@#\$!' # $ sign needs escaping if it doesn't represent n
|
eed to represent its special meaning.
# It is an anchor reserved character - it marks the end of the string
# that means, if you say aab$ , you are looking for a string that that ends with pattern aab
# there should be absolutely nothing bey
|
ond aab
regex_search(pattern_withoutanchors, mystring_anchors)
pattern_withanchors = r'defg$'
regex_search(pattern_withanchors, mystring_anchors)
# patterns to be matched
patterns = ["defg$", "^d", "^a", "^a*!"]
# defg$ : string must end with defg
# ^d: must begin with d
# ^a: must begin with
# ^a*!: must beging with a followed by any number of characters and !
for patterntobematched in patterns:
regex_search(patterntobematched, mystring_anchors)
# matching a pattern in an array of string
|
souravsingh/khmer
|
sandbox/sweep-reads2.py
|
Python
|
bsd-3-clause
| 3,734
| 0.000268
|
#!/usr/bin/env python
# This file is part of khmer, https://github.com/dib-lab/khmer/, and is
# Copyright (C) 2012-2015, Michigan State University.
# Copyright (C) 2015, The Regents of the University of California.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the Michigan State University nor the names
# of its contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Contact: khmer-project@idyll.org
"""
Use a set of query reads to sweep out overlapping reads from another file.
% python scripts/sweep-reads2.py <query reads> <search reads>
Results end up in <search reads>.sweep2.
Use '-h' for parameter help.
"""
import sys
import khmer
import os.path
import screed
from khmer import khmer_args
from khmer.khmer_args import (build_nodegraph_args, DEFAULT_MAX_TABLESIZE)
from khmer.utils import broken_paired_reader, write_record
def main():
parser = build_nodegraph_args()
parser.add_argument('-o', '--outfile',
help='output file; default is "infile".sweep2')
parser.add_argument('-q', '--quiet')
parser.add_argument('input_filename')
parser.add_argument('read_filename')
args = parser.parse_args()
inp = args.input_filename
readsfile = args.read_filename
outfile = os.path.basename(readsfile) + '.sweep2'
if args.outfile:
outfile = args.outfile
outfp = open(outfile, 'w')
# create a nodegraph data structure
ht = khmer_args.create_countgraph(args)
# load contigs, connect into N partitions
pr
|
int('loading input reads from', inp)
ht.consume_seqfile(inp)
print('starting sweep.')
m = 0
K = ht.ksize()
instream = screed.open(readsfil
|
e)
for n, is_pair, read1, read2 in broken_paired_reader(instream):
if n % 10000 == 0:
print('...', n, m)
if is_pair:
count1 = ht.get_median_count(read1.sequence)[0]
count2 = ht.get_median_count(read2.sequence)[0]
if count1 or count2:
m += 1
write_record_pair(read1, read2, outfp)
else:
count = ht.get_median_count(read1.sequence)[0]
if count:
m += 1
write_record(read1, outfp)
if __name__ == '__main__':
main()
# vim: set filetype=python tabstop=4 softtabstop=4 shiftwidth=4 expandtab:
# vim: set textwidth=79:
|
iw518/fernando
|
app/main/hr/views.py
|
Python
|
gpl-3.0
| 859
| 0.001164
|
#!/usr/bin/env python
# encoding: utf-8
# -------------------------------------------------------------------------------
# version: ??
# author: fernando
#
|
license: MIT License
# contact: iw518@163.com
# purpose: views
# date: 2016-12-14
# copyright: copyright 2016 Xu, Aiwu
# ---------------------------------
|
----------------------------------------------
from flask import redirect, url_for, render_template
from app.model.models import Team
from . import hr
from .forms import RegisterForm
@hr.route('/team_manage', methods=['POST', 'GET'])
def team_manage():
form = RegisterForm()
if form.validate_on_submit():
Team(job_id=form.job_selections.data, user_id=form.user_selections.data)
return redirect(url_for('order.employee'))
return render_template('hr/team_manage.html', form=form)
|
zonble/lyg0vtw_client.py
|
lyg0vtw_client/lyg0vtw_client.py
|
Python
|
gpl-2.0
| 5,779
| 0.029071
|
#!/usr/bin/env python
# encoding: utf-8
'A simple client for accessing api.ly.g0v.tw.'
import json
import unittest
try:
import urllib.request as request
import urllib.parse as urlparse
except:
import urllib2 as request
import urllib as urlparse
def assert_args(func, *args):
def inner(*args):
required_arg = args[1]
assert(len(required_arg) > 0)
return func(*args)
return inner
class LY_G0V_Client:
BASE_URL = 'http://api-beta.ly.g0v.tw/v0/'
# BASE_URL = 'http://api.ly.g0v.tw/v0/'
def _fetch_data(self, url_path):
URL = LY_G0V_Client.BASE_URL + url_path
try:
f = request.urlopen(URL)
r = f.read()
r = r.decode('utf-8')
return json.loads(r)
except Exception as e:
print("Failed to call " + URL)
raise e
def fetch_all_bills(self):
'Fetch all bills.'
return self._fetch_data('collections/bills')
def fetch_all_motions(self):
'Fetch all motions.'
return self._fetch_data('collections/motions')
def fetch_all_sittings(self):
'Fetch all sittings.'
return self._fetch_data('collections/sittings')
@assert_args
def fetch_bill(self, bill_id):
'Fetch metadata of a specific bill.'
return self._fetch_data('collections/bills/' + str(bill_id))
@assert_args
def fetch_bill_data(self, bill_id):
'Fetch data of a specific bill.'
assert(len(bill_id) > 0)
return self._fetch_data('collections/bills/' + str(bill_id) + '/data')
@assert_args
def fetch_motions_related_with_bill(self, bill_id):
'Fetch motions related with a specific bill.'
query = json.dumps({'bill_ref': bill_id})
query = urlparse.quote(query)
return self._fetch_data('collections/motions/?q='+query)
@assert_args
def fetch_sitting(self, sitting_id):
'Fetch metadata of a specific bill.'
return self._fetch_data('collections/bills/' + str(bill_id))
class TestClient(unittest.TestCase):
def setUp(self):
import time
time.sleep(1)
self.client = LY_G0V_Client()
def _test_bill(self, bill):
self.assertTrue(isinstance(bill, dict), str(type(bill)))
keys = ('proposed_by', 'doc', 'abstract', 'sponsors',
'summary', 'bill_ref', 'motions', 'cosponsors',
'bill_id');
for key in keys:
self.assertTrue(key in bill)
if isinstance(bill['doc'], dict):
self.assertTrue('pdf' in bill['doc'])
self.assertTrue('doc' in bill['doc'])
def _test_bills(self, bills):
for key in ('entries', 'paging'):
self.assertTrue(key in bills)
for key in ('l', 'sk', 'count'):
self.assertTrue(key in bills['paging'])
for bill in bills['entries']:
self._test_bill(bill)
def _test_motion(self, motion):
self.assertTrue(isinstance(motion, dict), str(type(motion)))
keys = ('result', 'resolution', 'motion_class', 'bill_id',
'agenda_item', 'bill_ref', 'tts_id',
'subitem', 'status', 'sitting_id', 'item',
'summary', 'tts_seq', 'proposed_by', 'doc')
for key in keys:
self.assertTrue(key in motion, key)
if isinstance(motion['doc'], dict):
self.assertTrue('pdf' in motion['doc'])
self.assertTrue('doc' in motion['doc'])
def _test_motions(self, motions):
self.assertTrue(isinstance(motions, dict), str(type(motions)))
for key in ('entries', 'paging'):
self.assertTrue(key in motions)
for key in ('l', 'sk', 'count'):
self.assertTrue(key in motions['paging'])
for motion in motions['entries']:
self._test_motion(motion)
def _test_data(self, data):
for key in ('related', 'content'):
self.assertTrue(key in data)
self.assertTrue(isinstance(data['related'], list))
self.assertTrue(isinstance(data['content'], list))
for item in data['content']:
content_keys = ('name', 'type', 'content', 'header')
for content_key in content_keys:
self.assertTrue(content_key in item)
self.assertTrue(len(item['name']) > 0)
self.assertTrue(isinstance(item['name'], str) or \
isinstance(item['name'], unicode))
self.assertTrue(len(item['type']) > 0)
|
self.assertTrue(isinstance(item['type'], str) or \
|
isinstance(item['type'], unicode))
self.assertTrue(len(item['content']) > 0)
self.assertTrue(isinstance(item['content'], list))
for content in item['content']:
self.assertTrue(isinstance(content, list))
for line in content:
self.assertTrue(isinstance(line, str))
self.assertTrue(len(item['header']) > 0)
self.assertTrue(isinstance(item['header'], list))
for header in item['header']:
self.assertTrue(isinstance(header, str) or \
isinstance(header, unicode))
def _test_sitting(self, sitting):
self.assertTrue(isinstance(sitting, dict), str(type(sitting)))
keys = ('dates', 'ad', 'videos', 'extra', 'motions',
'sitting', 'summary', 'session', 'committee', 'id',
'name')
for key in keys:
self.assertTrue(key in sitting, key)
def _test_sittings(self, sittings):
self.assertTrue(isinstance(sittings, dict), str(type(sittings)))
for key in ('entries', 'paging'):
self.assertTrue(key in sittings)
for key in ('l', 'sk', 'count'):
self.assertTrue(key in sittings['paging'])
for sitting in sittings['entries']:
self._test_sitting(sitting)
def test_all_bills(self):
bills = self.client.fetch_all_bills()
self._test_bills(bills)
def test_all_motions(self):
motions = self.client.fetch_all_motions()
self._test_motions(motions)
def test_all_sittings(self):
sittings = self.client.fetch_all_sittings()
self._test_sittings(sittings)
def test_fetch_bill(self):
bill = self.client.fetch_bill('1021021071000400')
self._test_bill(bill)
def test_fetch_bill_data(self):
data = self.client.fetch_bill_data('1021021071000400')
self._test_data(data)
def test_fetch_motions_related_with_bill(self):
motions = self.client.fetch_motions_related_with_bill('1021021071000400')
self._test_motions(motions)
if __name__ == '__main__':
unittest.main()
|
gdelt-analysis/worker
|
src/HeatMap.py
|
Python
|
gpl-3.0
| 1,473
| 0.002716
|
import redis
class BetaRedis(redis.StrictRedis):
def georadius(self, name, *values):
return self.execute_command('GEORADIUS', name, *values)
def geoadd(self, name, *values):
return self.execute_command('GEOADD', name, *values)
def geopos(self, name, *values):
return self.execute_command('GEOPOS', name, *values)
class RedisHeatMap:
REDIS_KEY = 'heatmap'
REDIS_KEY_GEO
|
= REDIS_KEY + '_G
|
EO'
REDIS_KEY_HASH = REDIS_KEY + '_HASH'
def __init__(self, host='localhost', port=6379, db=0):
self.r = BetaRedis(host=host, port=port, db=db)
self.r.flushdb()
def gen(self, data, distance=200000, min_sum=1):
for point in data:
try:
res = self.r.georadius(self.REDIS_KEY_GEO, point['lng'], point['lat'], distance, 'm')
if not res:
self.r.geoadd(self.REDIS_KEY_GEO, point['lng'], point['lat'], point['key'])
self.r.hset(self.REDIS_KEY_HASH, point['key'], 1)
else:
self.r.hincrby(self.REDIS_KEY_HASH, res[0])
except redis.exceptions.ResponseError as e:
pass
for key in self.r.hscan_iter(self.REDIS_KEY_HASH):
lng, lat = map(lambda x: x.decode(), self.r.geopos(self.REDIS_KEY_GEO, key[0].decode())[0])
if int(key[1]) >= min_sum:
yield {'key': key[0].decode(), 'lat': lat, 'lng': lng, 'sum': int(key[1])}
|
puttarajubr/commcare-hq
|
corehq/pillows/reportcase.py
|
Python
|
bsd-3-clause
| 1,031
| 0.00582
|
import copy
from corehq.pillows.case import CasePillow
from corehq.pillows.mappings.reportcase_mapping import REPORT_CASE_MAPPING, REPORT_CASE_INDEX
from django.conf import settings
from .base import convert_property_dict
class ReportCasePillow(CasePillow):
"""
Simple/Common Case properties Indexer
an extension to CasePillow that provides for indexing of custom case properties
"""
es_alias = "report_cases"
es_type = "report_case"
es_index = REPORT_CASE_INDEX
default_mapping = REPORT_CASE_MAPPING
def get_unique_id(self):
return self.calc_meta()
def change_transform(self, doc_dict):
if self.get_domain(doc_dict) not in getattr(settings, 'ES_CASE_FU
|
LL_INDEX_DOMAINS', []):
#full indexing is only enabled for select domains on an opt-in basis
return None
doc_ret = copy.deepcopy(doc_dict)
convert
|
_property_dict(doc_ret, self.default_mapping, override_root_keys=['_id', 'doc_type', '_rev', '#export_tag'])
return doc_ret
|
rg3915/orcamentos
|
orcamentos/urls.py
|
Python
|
mit
| 341
| 0.002933
|
from django.ur
|
ls import include, path
from django.contrib import admin
urlpatterns = [
path('', include('orcamentos.core.urls', namespace='core')),
path('crm/', include('orcamentos.crm.urls', namespace='crm'))
|
,
path('proposal/', include('orcamentos.proposal.urls', namespace='proposal')),
path('admin/', admin.site.urls),
]
|
wagtail/wagtail
|
wagtail/images/views/serve.py
|
Python
|
bsd-3-clause
| 2,857
| 0.0014
|
import imghdr
from wsgiref.util import FileWrapper
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.http import (
HttpResponse,
HttpResponsePermanentRedirect,
StreamingHttpResponse,
)
from django.shortcuts import get_object_or_404
from django.urls import reverse
from django.utils.decorators import classonlymethod
from django.views.generic import View
from wagtail.images import get_image_model
from wagtail.images.exceptions import InvalidFilterSpecError
from wagtail.images.models import SourceImageIOError
from wagtail.images.utils import generate_signature, verify_signature
from wagtail.utils.sendfile import sendfile
def generate_image_url(image, filter_spec, viewname="wagtailimages_serve", key=None):
signature = generate_signature(image.id, filter_
|
spec, key)
url = reverse(viewname, args=(signature, image.id, filter_spec))
url += image.file.name[len("original_images/") :]
return url
class ServeView(View):
model = get_image_model()
action = "serve"
key = No
|
ne
@classonlymethod
def as_view(cls, **initkwargs):
if "action" in initkwargs:
if initkwargs["action"] not in ["serve", "redirect"]:
raise ImproperlyConfigured(
"ServeView action must be either 'serve' or 'redirect'"
)
return super(ServeView, cls).as_view(**initkwargs)
def get(self, request, signature, image_id, filter_spec, filename=None):
if not verify_signature(
signature.encode(), image_id, filter_spec, key=self.key
):
raise PermissionDenied
image = get_object_or_404(self.model, id=image_id)
# Get/generate the rendition
try:
rendition = image.get_rendition(filter_spec)
except SourceImageIOError:
return HttpResponse(
"Source image file not found", content_type="text/plain", status=410
)
except InvalidFilterSpecError:
return HttpResponse(
"Invalid filter spec: " + filter_spec,
content_type="text/plain",
status=400,
)
return getattr(self, self.action)(rendition)
def serve(self, rendition):
# Open and serve the file
rendition.file.open("rb")
image_format = imghdr.what(rendition.file)
return StreamingHttpResponse(
FileWrapper(rendition.file), content_type="image/" + image_format
)
def redirect(self, rendition):
# Redirect to the file's public location
return HttpResponsePermanentRedirect(rendition.url)
serve = ServeView.as_view()
class SendFileView(ServeView):
backend = None
def serve(self, rendition):
return sendfile(self.request, rendition.file.path, backend=self.backend)
|
grubbcode/minplot
|
svgdatashapes_dt.py
|
Python
|
mit
| 9,956
| 0.028726
|
#
# SVGdatashapes_dt 0.3.6 SVGdatashapes.com github.com/pepprseed/svgdatashapes
# Copyright 2016-8 Stephen C. Grubb stevegrubb@gmail.com MIT License
#
# This module provides date / time support for svgdatashapes
#
import svgdatashapes
from svgdatashapes import p_dtformat
import collections
import datetime as d
import time
import calendar
class AppDt_Error(Exception): pass
def dateformat( format=None ):
# set the format string to be used for parsing datetimes found in the input data
# format codes explained here: https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior
# Note that when they say zero-padded this refers to output only; parsing can handle eg. 3/4/2015
global p_dtformat
if format == None: raise AppDt_Error( "dateformat() expecting 'format' arg" )
p_dtformat = format
return True
def toint( dateval=None ):
# for the given date/time string in whatever format, return the int utime value
# toint( "1970-01-01.00:00" ) == 0
if dateval == None: return None
try:
tt = d.datetime.strptime( dateval, p_dtformat ).timetuple() # parse out the components
utime = calendar.timegm( tt )
except: raise AppDt_Error( "toint() got bad datetime value: " + str(dateval) + " (expecting format of " + p_dtformat + ")" )
return utime
def make( utime, fmt=None ):
# format the given dt value as per fmt...
if utime == None: return None
if fmt == None: fmt = p_dtformat
try:
# tt = time.gmtime( utime )
outstr = d.datetime.utcfromtimestamp(utime).strftime( fmt )
except: raise AppDt_Error( "nicedt error on utime: " + str(utime) + " and format: " + p_dtformat )
return outstr
def datediff( val1, val2, result="days" ):
# return integer number of days difference (dt1 - dt2)
try: dt1 = d.datetime.strptime( val1, p_dtformat )
except: raise AppDt_Error( "datediff() invalid val1 arg: " + str(val1) )
try: dt2 = d.datetime.strptime( val2, p_dtformat )
except: raise AppDt_Error( "datediff() invalid val2 arg: " + str(val2) )
if result != "seconds":
dt1 = dt1.replace( second=0, microsecond=0 )
dt2 = dt2.replace( second=0, microsecond=0 )
if result == "days":
dt1 = dt1.replace( hour=0, minute=0 )
dt2 = dt2.replace( hour=0, minute=0 )
div = 86400
elif result == "hours":
dt1 = dt1.replace( minute=0 )
dt2 = dt2.replace( minute=0 )
div = 3600
elif result == "minutes": div = 60
elif result == "seconds": div = 1
return int(calendar.timegm( dt1.timetuple() ) - calendar.timegm( dt2.timetuple() ) ) / div
def daterange( column=None, datarows=None, nearest=None, inc=None, stubformat=None,
inc2=None, stub2format=None, stub2place="append", stub2first=True ):
dfindex = svgdatashapes._getdfindex( column, datarows )
if nearest == None: raise AppDt_Error( "findrange() requires a nearest= arg " )
if inc == None: inc = nearest
# if inc != nearest:
# if nearest == "year" and inc == "month": pass
# elif nearest == "month" and inc == "day": pass
# elif nearest == "day" and inc == "hour": pass
# else: raise AppDt_Error( "findrange() invalid nearest= and inc= combination" )
if stubformat == None: stubformat = p_dtformat
# find raw min and max
dmin = 999999999999999999999999999; dmax = -999999999999999999999999999;
for row in datarows:
if dfindex == -1: strval = row[column] # dict rows
else: strval = row[dfindex]
utime = toint( strval )
if utime < dmin: dmin = utime
if utime > dmax: dmax = utime
dtmin = d.datetime.utcfromtimestamp( dmin ).replace( second=0, microsecond=0 ) # always zero out seconds and ms
dtmax = d.datetime.utcfromtimestamp( dmax ).replace( second=0, microsecond=0 )
if nearest[-6:] != "minute": dtmin.replace( minute=0 ); dtmax.replace( minute=0 ) # usually zero out minutes
if nearest == "year":
dtmin = dtmin.replace( month=1, day=1, hour=0 )
yr = dtmax.year;
dtmax = dtmax.replace( year=yr+1, month=1, day=1, hour=0 )
elif nearest == "3month":
newmon = ((dtmin.month / 4) * 3) + 1
dtmin = dtmin.replace( month=newmon, day=1, hour=0 )
newmon = (((dtmax.month / 4)+1) * 3) + 1
yr = dtmax.year
if newmon >= 12: newmon = 1; yr += 1;
dtmax = dtmax.replace( year=yr, month=newmon, day=1, hour=0 )
elif nearest == "month":
dtmin = dtmin.replace( day=1, hour=0 )
mon = dtmax.month; yr = dtmax.year;
if mon == 12: dtmax = dtmax.replace( year=yr+1, month=1, day=1, hour=0 )
else: dtmax = dtmax.replace( month=mon+1, day=1, hour=0 )
elif nearest == "week" or nearest[:8] == "week_day": # week = Monday-based week; or week_dayN where N=1 for Tues; N=6 for Sun, etc
wday = time.gmtime( dmin ).tm_wday # struct_time tm_wday convention is that 0 = monday
dmin -= (wday*86400) # move timestamp back by necessary no. of days to reach opening week boundary (86400 sec per day)
if nearest[:8] == "week_day": dmin -= ((7 - int(nearest[-1:])) * 86400)
dtmin = d.datetime.utcfromtimestamp( dmin ).replace( hour=0 )
wday = 7 - time.gmtime( dmax ).tm_wday
dmax += (wday*86400) # move timestamp fwd by necessary no. of days to reach the next week boundary
if nearest[:8] == "week_day": dmax += ((7 - int(nearest[-1:])) * 86400)
dtmax = d.datetime.utcfromtimestamp( dmax ).replace( hour=0 )
elif nearest == "day":
dtmin = dtmin.replace( hour=0 )
dmax += 86400 # jump forward one day
dtmax = d.datetime.utcfromtimestamp( dmax ).replace( hour=0 )
elif nearest in ["12hour", "6hour", "4hour", "3hour"]:
nhr = int(nearest[:-4])
newhr = (dtmin.hour / nhr) * nhr
dtmin = dtmin.replace( hour=newhr )
newhr = ((dtmax.hour / nhr)+1) * nhr
day = dtmax.day
if newhr >= 24: newhr = 0; day += 1
dtmax = dtmax.replace( day=day, hour=newhr )
elif nearest == "hour":
dtmin = dtmin.replace( minute=0 )
hr = dtmax.hour
if hr == 23:
dmax += 3600 # jump forward one hour (there are 3600 sec per hour)
dtmax = d.datetime.utcfromtimestamp( dmax ) # no replace necessary
else: dtmax = dtmax.replace( hour=hr+1, minute=0 )
elif nearest in [ "30minute", "10minute" ]:
nmin = int(nearest[:-6])
newmin = (dtmin.minute / nmin ) * nmin
dtmin = dtmin.replace( minute=newmin )
newmin = ((dtmax.minute / nmin)+1) * nmin
hr = dtmax.hour
if newmin >= 60: newmin = 0; hr += 1 # date rollover not imp.
dtmax = dtmax.replace( hour=hr, minute=newmin )
elif nearest == "minute":
# dtmin is all set, just compute dtmax...
newmin = dtmax.minute + 1
hr = dtmax.hour
if newmin >= 60: newmin = 0; hr += 1
dtmax = dtmax.replace( hour=hr, minute=newmin )
else: raise AppDt_Error( "findrange got unrecognized nearest= arg: " + str(nearest) )
axmin = calendar.timegm( dtmin.timetuple() )
axmax = calendar.timegm( dtmax.timetuple() )
# at this point, dtmin and dtmax are the axis min and max as datetime type
# and axmin and axmax are the axis min and max as int timestamps
# now build a list of ready-to-render stubs with int positions...
# will eventually add options for month rollover, year rollover, day rollover, etc.
stublist = []
iloop = 0
dtcur = dtmin
utime = axmin
stub = dtcur.strftime( stubformat ) # do the first stub
if inc2 != None and stub2first == True:
stub2 =
|
dtcur.strftime( stub2format )
if stub2place == "prepend": stub
|
= stub2 + stub
elif stub2place == "replace": stub = stub2
else: stub = stub + stub2
stublist.append( [utime, stub] )
while iloop < 500: # sanity backstop
yr = dtcur.year
mon = dtcur.month
day = dtcur.day
if inc == "mont
|
jsonbrazeal/tictactoe
|
tictactoe/urls.py
|
Python
|
mit
| 1,037
| 0
|
"""tictactoe URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.u
|
rls import include, path
2. Add a URL to urlpatterns: path('blog/', incl
|
ude('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf.urls.static import static
from tictactoe import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('tictactoe/', include('tictactoe.game.urls'), name='game'),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
# serving static files like this should not be done in production
|
wildchildyn/autism-website
|
yanni_env/lib/python3.6/site-packages/sqlalchemy/testing/schema.py
|
Python
|
gpl-3.0
| 3,556
| 0
|
# testing/schema.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from . import exclusions
from .. import schema, event
from . import config
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
test_opts = dict([(k, kw.pop(k)) for k in list(kw)
if k.startswith('test_')])
kw.update(table_options)
if exclusions.against(config._current, 'mysql'):
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
else:
kw['mysql_engine'] = 'MyISAM'
# Apply some default cascading rules for self-referential foreign keys.
# MySQL InnoDB has som
|
e issues around seleting self-refs too.
if exclusions.against(config._current, 'firebird'):
table_name = args[0]
unpack = (config.db.di
|
alect.
identifier_preparer.unformat_identifiers)
# Only going after ForeignKeys in Columns. May need to
# expand to ForeignKeyConstraint too.
fks = [fk
for col in args if isinstance(col, schema.Column)
for fk in col.foreign_keys]
for fk in fks:
# root around in raw spec
ref = fk._colspec
if isinstance(ref, schema.Column):
name = ref.table.name
else:
# take just the table name: on FB there cannot be
# a schema, so the first element is always the
# table name, possibly followed by the field name
name = unpack(ref)[0]
if name == table_name:
if fk.ondelete is None:
fk.ondelete = 'CASCADE'
if fk.onupdate is None:
fk.onupdate = 'CASCADE'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
test_opts = dict([(k, kw.pop(k)) for k in list(kw)
if k.startswith('test_')])
if not config.requirements.foreign_key_ddl.enabled_for_config(config):
args = [arg for arg in args if not isinstance(arg, schema.ForeignKey)]
col = schema.Column(*args, **kw)
if test_opts.get('test_needs_autoincrement', False) and \
kw.get('primary_key', False):
if col.default is None and col.server_default is None:
col.autoincrement = True
# allow any test suite to pick up on this
col.info['test_needs_autoincrement'] = True
# hardcoded rule for firebird, oracle; this should
# be moved out
if exclusions.against(config._current, 'firebird', 'oracle'):
def add_seq(c, tbl):
c._init_items(
schema.Sequence(_truncate_name(
config.db.dialect, tbl.name + '_' + c.name + '_seq'),
optional=True)
)
event.listen(col, 'after_parent_attach', add_seq, propagate=True)
return col
def _truncate_name(dialect, name):
if len(name) > dialect.max_identifier_length:
return name[0:max(dialect.max_identifier_length - 6, 0)] + \
"_" + hex(hash(name) % 64)[2:]
else:
return name
|
Som-Energia/invoice-janitor
|
admin/Baixa_Socis/unsubscribe_members.py
|
Python
|
agpl-3.0
| 4,279
| 0.002104
|
# -*- encoding: utf-8 -*-
import argparse
import sys
import traceback
from hashlib import md5
import mailchimp_marketing as MailchimpMarketing
import requests
from consolemsg import step, error, success
from erppeek import Client
import time
import configdb
ERP_CLIENT = Client(**configdb.erppeek)
MAILCHIMP_CLIENT = MailchimpMarketing.Client(
dict(api_key=configdb.MAILCHIMP_APIKEY, server=configdb.MAILCHIMP_SERVER_PREFIX)
)
def get_member_category_id():
module = 'som_partner_account'
semantic_id = 'res_partner_category_soci'
IrModelData = ERP_CLIENT.model('ir.model.data')
member_category_relation = IrModelData.get_object_reference(
module, semantic_id
)
if member_category_relation:
return member_category_relation[-1]
def get_not_members_email_list():
Soci = ERP_CLIENT.model('somenergia.soci')
ResPartnerAddress = ERP_CLIENT.model('res.partner.address')
category_id = get_member_category_id()
not_members = Soci.search([
('category_id', 'not in', [category_id]),
('ref', 'like', 'S%')
])
not_members_partner_ids = [
soci['partner_id'][0] for soci in Soci.read(not_members, ['partner_id'])
]
address_list
|
= ResPartnerAddress.search(
[('partner_id', 'in', not_members_partner_ids)]
)
emails_list =
|
[
address.get('email', 'not found')
for address in ResPartnerAddress.read(address_list, ['email'])
]
return emails_list
def get_mailchimp_list_id(list_name):
all_lists = MAILCHIMP_CLIENT.lists.get_all_lists(
fields=['lists.id,lists.name'],
count=100
)['lists']
for l in all_lists:
if l['name'] == list_name:
return l['id']
raise Exception("List: <{}> not found".format(list_name))
def get_subscriber_hash(email):
subscriber_hash = md5(email.lower()).hexdigest()
return subscriber_hash
def archive_members_from_list(list_name, email_list):
list_id = get_mailchimp_list_id(list_name)
operations = []
for email in email_list:
operation = {
"method": "DELETE",
"path": "/lists/{list_id}/members/{subscriber_hash}".format(
list_id=list_id,
subscriber_hash=get_subscriber_hash(email)
),
"operation_id": email,
}
operations.append(operation)
payload = {
"operations": operations
}
try:
response = MAILCHIMP_CLIENT.batches.start(payload)
except ApiClientError as error:
msg = "An error occurred an archiving batch request, reason: {}"
error(msg.format(error.text))
else:
batch_id = response['id']
while response['status'] != 'finished':
time.sleep(2)
response = MAILCHIMP_CLIENT.batches.status(batch_id)
step("Archived operation finished!!")
step("Total operations: {}, finished operations: {}, errored operations: {}".format(
response['total_operations'],
response['finished_operations'],
response['errored_operations']
))
result_summary = requests.get(response['response_body_url'])
result_summary.raise_for_status()
return result_summary.content
def archieve_members_in_list(list_name):
email_list = get_not_members_email_list()
result = archive_members_from_list(list_name, email_list)
return result
def main(list_name, output):
result = archieve_members_in_list(list_name.strip())
with open(output, 'w') as f:
f.write(result)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Archivieren Sie E-Mails in großen Mengen'
)
parser.add_argument(
'--list',
dest='list_name',
required=True,
help="nom de la llista de mailchimp"
)
parser.add_argument(
'--output',
dest='output',
required=True,
help="Fitxer de sortida amb els resultats"
)
args = parser.parse_args()
try:
main(args.list_name, args.output)
except Exception as e:
traceback.print_exc(file=sys.stdout)
error("El proceso no ha finalizado correctamente: {}", str(e))
else:
success("Script finalizado")
|
yordan-desta/QgisIns
|
python/plugins/processing/gui/ScriptEditorDialog.py
|
Python
|
gpl-2.0
| 7,417
| 0.000404
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
EditScriptDialog.py
---------------------
Date : December 2012
Copyright : (C) 2012 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from processing.modeler.ModelerUtils import ModelerUtils
__author__ = 'Alexander Bruy'
__date__ = 'December 2012'
__copyright__ = '(C) 2012, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import codecs
import sys
import json
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.Qsci import *
from qgis.core import *
from qgis.utils import iface
from processing.gui.ParametersDialog import ParametersDialog
from processing.gui.HelpEditionDialog import HelpEditionDialog
from processing.algs.r.RAlgorithm import RAlgorithm
from processing.algs.r.RUtils import RUtils
from processing.script.ScriptAlgorithm import ScriptAlgorithm
from processing.script.ScriptUtils import ScriptUtils
from processing.ui.ui_DlgScriptEditor import Ui_DlgScriptEditor
import processing.resources_rc
class ScriptEditorDialog(QDialog, Ui_DlgScriptEditor):
SCRIPT_PYTHON = 0
SCRIPT_R = 1
hasChanged = False
def __init__(self, algType, alg):
QDialog.__init__(self)
self.setupUi(self)
self.setWindowFlags(Qt.WindowMinimizeButtonHint |
Qt.WindowMaximizeButtonHint |
Qt.WindowCloseButtonHint)
# Set icons
self.btnSave.setIcon(
QgsApplication.getThemeIcon('/mActionFileSave.svg'))
self.btnSaveAs.setIcon(
QgsApplication.getThemeIcon('/mActionFileSaveAs.svg'))
self.btnEditHelp.setIcon(QIcon(':/processing/images/edithelp.png'))
self.btnRun.setIcon(QIcon(':/processing/images/runalgorithm.png'))
self.btnCut.setIcon(QgsApplication.getThemeIcon('/mActionEditCut.png'))
self.btnCopy.setIcon(
QgsApplication.getThemeIcon('/mActionEditCopy.png'))
self.btnPaste.setIcon(
QgsApplication.getThemeIcon('/mActionEditPaste.png'))
self.btnUndo.setIcon(QgsApplication.getThemeIcon('/mActionUndo.png'))
self.btnRedo.setIcon(QgsApplication.getThemeIcon('/mActionRedo.png'))
# Connect signals and slots
self.btnSave.clicked.connect(self.save)
self.btnSaveAs.clicked.connect(self.saveAs)
self.btnEditHelp.clicked.connect(self.editHelp)
self.btnRun.clicked.connect(self.runAlgorithm)
self.btnCut.clicked.connect(self.editor.cut)
self.btnCopy.clicked.connect(self.editor.copy)
self.btnPaste.clicked.connect(self.editor.paste)
self.btnUndo.clicked.connect(self.editor.undo)
self.btnRedo.clicked.connect(self.editor.redo)
self.editor.textChanged.connect(lambda: self.setHasChanged(True))
self.alg = alg
self.algType = algType
if self.alg is not None:
self.filename = self.alg.descriptionFile
self.editor.setText(self.alg.script)
else:
self.filename = None
self.update = False
self.help = None
self.setHasChanged(False)
self.editor.setLexerType(self.algType)
def editHelp(self):
if self.alg is None:
if self.algType == self.SCRIPT_PYTHON:
alg = ScriptAlgorithm(None, unicode(self.editor.text()))
elif self.algType == self.SCRIPT_R:
alg = RAlgorithm(None, unicode(self.editor.text()))
else:
alg = self.alg
dlg = HelpEditionDialog(alg)
dlg.exec_()
# We store the description string in case there were not saved
# because there was no filename defined yet
if self.alg is None and dlg.descriptions:
self.help = dlg.descriptions
def save(self):
self.saveScript(False)
def saveAs(self):
self.saveScript(True)
|
def saveScript(self, saveAs):
if self.filename is None or saveAs:
if self.algType == self.SCRIPT_PYTHON:
scriptDir = ScriptUtils.scriptsFolder()
filterName = self.tr('Python scripts (*.py)')
elif self.algType == self.SCRIPT_R:
scriptDir = RUtils.RScriptsFolder()
filterName = self.tr('Processing R script (*.rsx)')
self.filename = uni
|
code(QFileDialog.getSaveFileName(self,
self.tr('Save script'), scriptDir,
filterName))
if self.filename:
if self.algType == self.SCRIPT_PYTHON \
and not self.filename.lower().endswith('.py'):
self.filename += '.py'
if self.algType == self.SCRIPT_R \
and not self.filename.lower().endswith('.rsx'):
self.filename += '.rsx'
text = unicode(self.editor.text())
if self.alg is not None:
self.alg.script = text
try:
with codecs.open(self.filename, 'w', encoding='utf-8') as fout:
fout.write(text)
except IOError:
QMessageBox.warning(self, self.tr('I/O error'),
self.tr('Unable to save edits. Reason:\n %s')
% unicode(sys.exc_info()[1]))
return
self.update = True
# If help strings were defined before saving the script for
# the first time, we do it here
if self.help:
with open(self.filename + '.help', 'w') as f:
json.dump(self.help, f)
self.help = None
self.setHasChanged(False)
else:
self.filename = None
def setHasChanged(self, hasChanged):
self.hasChanged = hasChanged
self.btnSave.setEnabled(hasChanged)
def runAlgorithm(self):
if self.algType == self.SCRIPT_PYTHON:
alg = ScriptAlgorithm(None, unicode(self.editor.text()))
alg.provider = ModelerUtils.providers['script']
if self.algType == self.SCRIPT_R:
alg = RAlgorithm(None, unicode(self.editor.text()))
alg.provider = ModelerUtils.providers['r']
dlg = alg.getCustomParametersDialog()
if not dlg:
dlg = ParametersDialog(alg)
canvas = iface.mapCanvas()
prevMapTool = canvas.mapTool()
dlg.show()
dlg.exec_()
if canvas.mapTool() != prevMapTool:
try:
canvas.mapTool().reset()
except:
pass
canvas.setMapTool(prevMapTool)
|
piger/managesieve-cli
|
managesieve/sieveshell.py
|
Python
|
gpl-3.0
| 13,053
| 0.003601
|
#!/usr/bin/python
"""
sieveshell - remotely manipulate sieve scripts
SYNOPSIS
sieveshell [--user=user] [--authname=authname] [--realm=realm]
[--exec=script] [--auth-mech=mechanism] server
sieveshell --help
sieveshell allows users to manipulate their scripts on a remote server.
It works via MANAGESIEVE, a work in progress protocol.
Use --help to get a list of the currently supported authentication
mechanisms.
The following commands are recognized:
list - list scripts on server
put <filename> [<target name>]
- upload script to server
get <name> [<filename>]
- get script. if no filename display to stdout
edit <name> - edit a script, if not existant, create on save
delete <name> - delete script.
activate <name> - set a script as the active script
deactivate - deactivate all scripts
quit - quit
"""
__version__ = "0.4"
__author__ = "Hartmut Goebel <h.goebel@crazy-compilers.com>"
__copyright__ = "Copyright (C) 2003-2011 by Hartmut Goebel <h.goebel@crazy-compilers.com>"
__license__ = "GPL"
import sys
import getpass
import inspect
import managesieve
import os
from .utils import read_config_defaults, exec_command
sieve = None
SUPPRESS = '--suppress--' # token for suppressing 'OK' after cmd execution
### the order of functions determines the order for 'help' ###
def cmd_help(cmd=None):
"""help - this screen (shortcut '?')
help <command> - help on command"""
## output order is the same as the sourcecode order
if cmd:
if __command_map.has_key(cmd):
cmd = __command_map[cmd]
if __commands.has_key('cmd_%s' % cmd):
print __commands['cmd_%s' % cmd].__doc__
else:
print 'Unknown command', repr(cmd)
print "Type 'help' for list of commands"
else:
cmds = __commands.values()
cmds.sort(lambda a,b: cmp(a.func_code.co_firstlineno,
b.func_code.co_firstlineno))
for c in cmds:
print c.__doc__
return SUPPRESS
def cmd_list():
"""list - list scripts on server"""
res, scripts = sieve.listscripts()
if res == 'OK':
for scriptname, active in scripts:
if active: print scriptname, '\t<<-- active'
else: print scriptname
res = SUPPRESS
return res
def cmd_put(filename, scriptname=None):
"""put <filename> [<target name>]
- upload script to server"""
if not scriptname: scriptname = filename
try:
scriptdata = open(filename).read()
except IOError, e:
print "Can't read local file %s:" % filename, e.args[1]
return SUPPRESS
return sieve.putscript(scriptname, scriptdata)
def cmd_get(scriptname, filename=None):
"""get <name> [<filename>]
- get script. if no filename display to stdout"""
res, scriptdata = sieve.getscript(scriptname)
if res == 'OK':
if filename:
try:
open(filename, 'w').write(scriptdata)
except IOError, e:
print "Can't write local file %s:" % filename, e.args[1]
return SUPPRESS
else:
print scriptdata
res = SUPPRESS
return res
def cmd_edit(scriptname):
"""edit <name> - edit a script, not existant, create on save"""
def Choice(msg, choices):
while 1:
sys.stdout.writelines((msg, ' '))
answer = sys.stdin.readline().strip()[:1].lower()
i = choices.find(answer)
if i >= 0:
# valid answer
return i
# else: continue loop
def YesNoQuestion(msg):
# Order 'ny' will return boolen values (y=1)
return Choice(msg + ' (y/n)', 'ny')
def SaveToFile(msg, scriptname, tmpname):
if not YesNoQuestion('%s Save script to file?' % msg):
return
scriptname = os.path.join(os.getcwd(), scriptname)
sys.stdout.write('Enter filename (defa
|
ult %s):' % scriptname)
filename = sys.stdin.readline().strip()
if filename == '':
filename = scriptname
scriptdata = open(tmpname).read()
open(filename, 'w').write(scriptdata)
res, scripts = sieve.listscripts()
if res != 'OK': return res
for name, active in scripts:
if name =
|
= scriptname:
res, scriptdata = sieve.getscript(scriptname)
if res != 'OK': return res
break
else:
if not YesNoQuestion('Script not on server. Create new?'):
return 'OK'
# else: script will be created when saving
scriptdata = ''
import tempfile
filename = tempfile.mktemp('.siv')
open(filename, 'w').write(scriptdata)
editor = os.environ.get('EDITOR', 'vi')
while 1:
res = os.system('%s %s' % (editor, filename))
if res: # error editing
if not YesNoQuestion('Editor returned failture. Continue?'):
os.remove(filename)
return SUPPRESS
else:
continue # re-edit
# else: editing okay
while 1:
scriptdata = open(filename).read()
res = sieve.putscript(scriptname, scriptdata)
if res == 'OK':
return res
# res is NO, BYE
print res, sieve.response_text or sieve.response_code
if res == 'NO':
res = Choice('Upload failed. (E)dit/(R)etry/(A)bort?', 'era')
if res == 0: break # finish inner loop, return to 'edit'
elif res == 1: # retry upload
continue
SaveToFile('', scriptname, filename)
else: # BYE
SaveToFile('Server closed connection.', scriptname, filename)
print 'Deleting tempfile.'
os.remove(filename)
return SUPPRESS
raise "Should not come here."
if os.name != 'posix':
del cmd_edit
def cmd_delete(scriptname):
"""delete <name> - delete script."""
return sieve.deletescript(scriptname)
def cmd_activate(scriptname):
"""activate <name> - set a script as the active script"""
return sieve.setactive(scriptname)
def cmd_deactivate():
"""deactivate - deactivate all scripts"""
return sieve.setactive('')
def cmd_quit(*args):
"""quit - quit"""
print 'quitting.'
if sieve:
try:
# this mysteriously fails at times
sieve.logout()
except:
pass
raise SystemExit()
# find all commands (using introspection)
# NB: edit os only available when running on a posix system
__commands = dict([c
for c in inspect.getmembers(sys.modules[__name__],
inspect.isfunction)
if c[0].startswith('cmd_')
])
# command aliases/shortcuts
__command_map = {
'?': 'help',
'h': 'help',
'q': 'quit',
'l': 'list',
'del': 'delete',
}
def shell(auth, user=None, passwd=None, realm=None,
authmech='', server='', use_tls=0, port=managesieve.SIEVE_PORT):
"""Main part"""
def cmd_loop():
"""Command loop: read and execute lines from stdin."""
global sieve
while 1:
sys.stdout.write('> ')
line = sys.stdin.readline()
if not line:
# EOF/control-d
cmd_quit()
break
line = line.strip()
if not line: continue
# todo: parse command line correctly
line = line.split()
cmd = __command_map.get(line[0], line[0])
cmdfunc = __commands.get('cmd_%s' % cmd)
if not cmdfunc:
print 'Unknown command', repr(cmd)
else:
if __debug__: result = None
try:
result = cmdfunc(*line[1:])
except TypeError, e:
if str(e).startswith('%s() takes' % cmdfunc.__name__):
|
COSMOGRAIL/PyCS
|
pycs/gen/spl.py
|
Python
|
gpl-3.0
| 40,478
| 0.046766
|
"""
Module defining the Spline class, something easy to wrap around SciPy splines.
Includes BOK algorithms (Mollinari et al)
Some rules of splrep (k = 3)
- do not put more then 2 knots between data points.
- splrep wants inner knots only, do not give extremal knots, even only "once".
"""
import numpy as np
import sys
import pycs.gen.util
import copy as pythoncopy
import matplotlib.pyplot as plt
import scipy.optimize as spopt
import scipy.interpolate as si
class DataPoints():
"""
An ultralight version of a lightcurve, made for fast computations.
Can be "merged" from a list of lightcurves, see factory function below.
A Spline object has such a DataPoints object as attribute.
ATTENTION
Datapoints are expected to be ALWAYS SORTED BY JDS, and no two datapoints have the same jd !
See the splitup option of the constructor.
Note that this is not the case for lightcurves ! Hence the existence of datapoints.
Should be enforced in every function that builds datapoints.
ABOUT STAB POINTS
With scipy splines, we always get the last knots at the extrema of data points.
So to get knots "outside" of the real datapoints, we have to insert fake points.
And while we are at it, these fake points can also be used to stabilize the spline in
gaps.
The mask is used to differentiate between actual data points and "stabilization points"
that are inserted to make the spline behave well at the extrema and in season gaps.
It is modified by the two addgappts and addextpts.
The info about stabpoints is written into the object,
so that they can be reconstrucuted from any new jds and mags.
"""
def __init__(self, jds, mags, magerrs, splitup=True, deltat=0.000001, sort=True, stab=False,
stabext=300.0, stabgap = 30.0, stabstep = 5.0, stabmagerr = -2.0, stabrampsize = 0, stabrampfact = 1.0):
"""
Constructor
Always leave splitup and sort on True ! Only if you know that you are already
sorted
|
you can skip them.
You cannot specify a mask, I do this myself. (co
|
uld be done in principle).
stab : do you want stabilization points ?
Don't forget to run splitup, sort, and addstab again if you change the data !
"""
self.jds = jds
self.mags = mags
self.magerrs = magerrs
self.stab = stab
self.stabext = stabext
self.stabgap = stabgap
self.stabstep = stabstep
self.stabmagerr = stabmagerr
self.stabrampsize = stabrampsize
self.stabrampfact = stabrampfact
self.mask = np.ones(len(self.jds), dtype=np.bool) # an array of True
self.deltat = deltat
if splitup:
self.splitup()
elif sort: # If we do the splitup, we sort anyway.
self.sort()
self.putstab()
# def update(self, jds, mags, magerrs):
# """
# NOT NEEDED ANYMORE, JUST CALL MERGE AND GIVE AN OLDDP. SAFER.
#
# Give me some new datapoints (no stabs) (already splitup and sorted, by definition), I'll update myself.
# In fact everything might move !
# """
# if newdatapoints.stab = True:
# raise RuntimeError("Give me points without stab !")
# self.jds = newdatapoints.jds
# self.mags = newdatapoints.mags
# self.magerrs = newdatapoints.magerrs
# self.mask = np.ones(len(self.jds), dtype=np.bool)
# self.addstab() # runs only if stab = True
def splitup(self):
"""
TO WRITE !!!
We avoid that two points get the same jds...
Note that this might change the order of the jds,
but only of very close ones, so one day it would be ok to leave the mags as they are.
"""
self.jds += self.deltat * np.random.randn(len(self.jds))
self.sort()
def sort(self):
"""
Absolutely mandatory, called in the constructor.
"""
sortedindices = np.argsort(self.jds)
self.jds = self.jds[sortedindices]
self.mags = self.mags[sortedindices]
self.magerrs = self.magerrs[sortedindices]
self.mask = self.mask[sortedindices]
self.validate()
def validate(self):
"""
We check that the datapoint jds are increasing strictly :
"""
first = self.jds[:-1]
second = self.jds[1:]
if not np.alltrue(np.less(first,second)): # Not less_equal ! Strictly increasing !
raise RuntimeError, "These datapoints don't have strcitly increasing jds !"
def rmstab(self):
"""
Deletes all stabilization points
"""
self.jds = self.jds[self.mask]
self.mags = self.mags[self.mask]
self.magerrs = self.magerrs[self.mask]
self.mask = np.ones(len(self.jds), dtype=np.bool)
def putstab(self):
"""
Runs only if stab is True.
I will :
add datapoints (new jds, new mags, new magerrs)
modify the mask = False for all those new datapoints.
"""
if self.stab == True:
# We start by deleting any previous stab stuff :
self.rmstab()
self.addgappts()
self.addextpts()
else:
pass
def calcstabmagerr(self):
"""
Computes the mag err of the stabilisation points.
"""
if self.stabmagerr >= 0.0:
return self.stabmagerr
else:
return - self.stabmagerr * np.median(self.magerrs)
def addgappts(self):
"""
We add stabilization points with low weights into the season gaps
to avoid those big excursions of the splines.
This is done by a linear interpolation across the gaps.
"""
absstabmagerr = self.calcstabmagerr()
gaps = self.jds[1:] - self.jds[:-1] # has a length of len(self.jds) - 1
gapindices = np.arange(len(self.jds) - 1)[gaps > self.stabgap] # indices of those gaps that are larger than stabgap
for n in range(len(gapindices)):
i = gapindices[n]
a = self.jds[i]
b = self.jds[i+1]
newgapjds = np.linspace(a, b, float(b-a)/float(self.stabstep))[1:-1]
newgapindices = i + 1 + np.zeros(len(newgapjds))
newgapmags = np.interp(newgapjds, [a, b], [self.mags[i], self.mags[i+1]])
newgapmagerrs = absstabmagerr * np.ones(newgapmags.shape)
newgapmask = np.zeros(len(newgapjds), dtype=np.bool)
self.jds = np.insert(self.jds, newgapindices, newgapjds)
self.mags = np.insert(self.mags, newgapindices, newgapmags)
self.magerrs = np.insert(self.magerrs, newgapindices, newgapmagerrs)
self.mask = np.insert(self.mask, newgapindices, newgapmask)
gapindices += newgapjds.size # yes, as we inserted some points the indices change.
# If you change this structure, be sure to check SplineML.settargetmags as well !
self.validate()
def addextpts(self):
"""
We add stabilization points at both extrema of the lightcurves
This is done by "repeating" the extremal points, and a ramp in the magerrs
"""
absstabmagerr = self.calcstabmagerr()
extjds = np.arange(self.jds[0], self.jds[0] - self.stabext, -1*self.stabstep)[::-1][:-1]
extmags = self.mags[0] * np.ones(extjds.shape)
extmagerrs = absstabmagerr * np.ones(extjds.shape)
for i in range(1, self.stabrampsize+1):
extmagerrs[-i] += (self.stabrampsize +1 -i) * absstabmagerr * self.stabrampfact
extindices = np.zeros(extjds.shape)
mask = np.zeros(len(extjds), dtype=np.bool)
self.jds = np.insert(self.jds, extindices, extjds)
self.mags = np.insert(self.mags, extindices, extmags)
self.magerrs = np.insert(self.magerrs, extindices, extmagerrs)
self.mask = np.insert(self.mask, extindices, mask)
# And the same at the other end :
extjds = np.arange(self.jds[-1], self.jds[-1] + self.stabext, self.stabstep)[1:]
extmags = self.mags[-1] * np.ones(extjds.shape)
extmagerrs = absstabmagerr * np.ones(extjds.shape)
for i in range(0, self.stabrampsize):
extmagerrs[i] += (self.stabrampsize -i) * absstabmagerr * self.stabrampfact
extindices = len(self.jds) + np.zeros(extjds.shape)
mask = np.zeros(len(extjds), dtype=np.bool)
self.jds = np.insert(self.jds, extindices, extjds)
self.mags = np.insert(self.mags, extindices, extmags)
self.magerrs = np.insert(self.magerrs, extindices, extmagerrs)
self.mask = np.insert(self.mask, extindices, mask)
self.validate()
def getmaskbounds(self):
"""
Returns the upper and lower bounds of the regions containing stabilization points.
This is used when placing knots, so to put fewer knots in these regions.
Crazy stuff...
"""
maskindices = np.where(self.mask == False)[0]
#print maskindices
if len(maskindices) < 3:
print "Hmm, not much masked here ..."
r
|
luofei98/qgis
|
python/plugins/processing/algs/qgis/ftools/FixedDistanceBuffer.py
|
Python
|
gpl-2.0
| 3,213
| 0
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
FixedDistanceBuffer.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import *
from qgis.core import *
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterNumber
from processing.core.outputs import OutputVector
import Buffer as buff
from processing.tools import dataobjects
class FixedDistanceBuffer(GeoAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
FIELD = 'FIELD'
DISTANCE = 'DISTANCE'
SEGMENTS = 'SEGMENTS'
DISSOLVE = 'DISSOLVE'
# =========================================================================
# def getIcon(self):
# return QtGui.QIcon(os.path.dirname(__file__) + "/icons/buffer.png")
# =========================================================================
def defineCharacteristics(self):
self.name = 'Fixed distance buffer'
self.group = 'Vector geometry tools'
self.addParameter(ParameterVector(self.INPUT, 'Input layer',
[ParameterVector.VECTOR_TYPE_ANY]))
self.addParameter(ParameterNumber(self.DISTANCE, 'Distance',
default=10.0))
self.addParameter(ParameterNumber(self.SEGMENTS, 'Segments', 1,
default=5))
self.addParameter(ParameterBoolean(self.DISSOLVE, 'Dissolve result',
False))
self.addOutput(OutputVector(self.OUTPUT, 'Buffer'))
def processAlgorithm(self, progress):
layer = dataobjects.getObjectFromUri(
self.getParameterValue(self.INPUT))
distance = self.getParameterValue(self.DISTANCE)
dissolve = self.getParameterValue(self.DISSOLVE)
segments = int(s
|
elf.getParameterValue(self.SEGMENTS))
|
writer = self.getOutputFromName(
self.OUTPUT).getVectorWriter(layer.pendingFields().toList(),
QGis.WKBPolygon, layer.crs())
buff.buffering(progress, writer, distance, None, False, layer,
dissolve, segments)
|
phil-lopreiato/the-blue-alliance
|
models/event.py
|
Python
|
mit
| 20,440
| 0.002544
|
import datetime, json, re
from consts.playoff_type import PlayoffType
from consts.district_type import DistrictType
from consts.event_type import EventType
from google.appengine.ext import ndb
from google.appengine.ext.ndb.tasklets import Future
from models.district import District
from models.event_details import EventDetails
from models.location import Location
class Event(ndb.Model):
"""
Events represent FIRST Robotics Competition events, both official and unofficial.
key_name is like '2010ct'
"""
name = ndb.StringProperty()
event_type_enum = ndb.IntegerProperty(required=True)
short_name = ndb.StringProperty(indexed=False) # Should not contain "Regional" or "Division", like "Hartford"
event_short = ndb.StringProperty(required=True, indexed=False) # Smaller abbreviation like "CT"
first_code = ndb.StringProperty() # Event code used in FIRST's API, if different from event_short
year = ndb.IntegerProperty(required=True)
event_district_enum = ndb.IntegerProperty(default=DistrictType.NO_DISTRICT) # Deprecated, use district_key instead
district_key = ndb.KeyProperty(kind=District)
start_date = ndb.DateTimeProperty()
end_date = ndb.DateTimeProperty()
playoff_type = ndb.IntegerProperty()
# venue, venue_addresss, city, state_prov, country, and postalcode are from FIRST
venue = ndb.StringProperty(indexed=False) # Name of the event venue
venue_address = ndb.StringProperty(indexed=False) # Most detailed venue address (includes venue, street, and location separated by \n)
city = ndb.StringProperty() # Equivalent to locality. From FRCAPI
state_prov = ndb.StringProperty() # Equivalent to region. From FRCAPI
country = ndb.StringProperty() # From FRCAPI
postalcode = ndb.StringProperty() # From ElasticSearch only. String because it can be like "95126-1215"
# Normalized address from the Google Maps API, constructed using the above
normalized_location = ndb.StructuredProperty(Location)
timezone_id = ndb.StringProperty() # such as 'America/Los_Angeles' or 'Asia/Jerusalem'
official = ndb.BooleanProperty(default=False) # Is the event FIRST-official?
first_eid = ndb.StringProperty() # from USFIRST
parent_event = ndb.KeyProperty() # This is the division -> event champs relationship
divisions = ndb.KeyProperty(repeated=True) # event champs -> all divisions
facebook_eid = ndb.StringProperty(indexed=False) # from Facebook
custom_hashtag = ndb.StringProperty(indexed=False) # Custom HashTag
website = ndb.StringProperty(indexed=False)
webcast_json = ndb.TextProperty(indexed=False) # list of dicts, valid keys include 'type' and 'channel'
enable_predictions = ndb.BooleanProperty(default=False)
remap_teams = ndb.JsonProperty() # Map of temporary team numbers to pre-rookie and B teams
created = ndb.DateTimeProperty(auto_now_add=True, indexed=False)
updated = ndb.DateTimeProperty(auto_now=True, indexed=False)
def __init__(self, *args, **kw):
# store set of affected references referenced keys for cache clearing
# keys must be model properties
self._affected_references = {
'key': set(),
'year': set(),
'district_key': set()
}
self._awards = None
self._details = None
self._location = None
self._city_state_country = None
self._matches = None
self._teams = None
self._venue_address_safe = None
self._webcast = None
self._updated_attrs = [] # Used in EventManipulator to track what changed
self._week = None
super(Event, self).__init__(*args, **kw)
@ndb.tasklet
def get_awards_async(self):
from database import award_query
self._awards = yield award_query.EventAwardsQuery(self.key_name).fetch_async()
@property
def alliance_selections(self):
if self.details is None:
return None
else:
return self.details.alliance_selections
@property
def alliance_teams(self):
"""
Load a list of team keys playing in elims
"""
alliances = self.alliance_selections
if alliances is None:
return []
teams = []
for alliance in alliances:
for pick in alliance['picks']:
teams.append(pick)
return teams
@property
def awards(self):
if self._awards is None:
self.get_awards_async().wait()
return self._awards
@property
def details(self):
if self._details is None:
self._details = EventDetails.get_by_id(self.key.id())
elif type(self._details) == Future:
self._details = self._details.get_result()
return self._details
def prep_details(self):
if self._details is None:
self._details = ndb.Key(EventDetails, self.key.id()).get_async()
@property
def district_points(self):
if self.details is None:
return None
else:
return self.details.district_points
@property
def playoff_advancement(self):
if self.details is None:
return None
else:
return self.details.playoff_advancement.get(
"advancement") if self.details.playoff_advancement else None
@property
def playoff_bracket(self):
if self.details is None:
return None
else:
return self.details.playoff_advancement.get(
"bracket") if self.details.playoff_advancement else None
@ndb.tasklet
def get_matches_async(self):
if self._matches is None:
from database import match_query
self._matches = yield match_query.EventMatchesQuery(self.key_name).fetch_async()
def prep_matches(self):
if self._matches is None:
from database import match_query
self._matches = match_query.EventMatchesQuery(self.key_name).fetch_async()
@property
def matches(self):
if self._matches is None:
self.get_matches_async().wait()
elif type(self._matches) == Future:
self._matches = self._matches.get_result()
return self._matches
def time_as_utc(self, time):
import pytz
if self.timezone_id is not None:
tz = pytz.timezone(self.timezone_id)
try:
|
time = time - tz.utcoffset(time)
except (pytz.NonExistentTimeError, pytz.AmbiguousTimeError): # may happen during DST
time = time - tz.utcoffset(time + datetime.timedelta(hours=1)) # add offset to get out of non-existant time
return time
def local_time(self):
import pytz
now = datetime.datetime.now()
if self.timezone_id is not None:
tz = pytz.timezone(self.timezone_id)
try:
|
now = now + tz.utcoffset(now)
except (pytz.NonExistentTimeError, pytz.AmbiguousTimeError): # may happen during DST
now = now + tz.utcoffset(now + datetime.timedelta(hours=1)) # add offset to get out of non-existant time
return now
def withinDays(self, negative_days_before, days_after):
if not self.start_date or not self.end_date:
return False
now = self.local_time()
after_start = self.start_date.date() + datetime.timedelta(days=negative_days_before) <= now.date()
before_end = self.end_date.date() + datetime.timedelta(days=days_after) >= now.date()
return (after_start and before_end)
@property
def now(self):
if self.timezone_id is not None:
return self.withinDays(0, 0)
else:
return self.within_a_day # overestimate what is "now" if no timezone
@property
def within_a_day(self):
return self.withinDays(-1, 1)
@property
def past(self):
return self.end_date.date() < self.local_time().date() and not self.now
@property
def future(self):
return self.start_date.date() > self.local_time().date() and not self.now
@p
|
openstack/python-designateclient
|
designateclient/v2/utils.py
|
Python
|
apache-2.0
| 2,334
| 0
|
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Author: Endre Karlson <endre.karlson@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from urllib.parse import parse_qs
from urllib.parse import urlparse
from designateclient import exceptions
def resolve_by_name(func, name, *args):
"""
Helper to resolve a "name" a'la foo.com to it's ID by using REST api's
query support and filtering on name.
"""
if uuidutils.is_uuid_like(name):
return name
results = func(criterion={"name": "%s" % name}, *args)
length = len(results)
if length == 1:
return results[0]["id"]
elif length == 0:
raise exceptions.NotFound("Name %s didn't resolve" % name)
else:
|
msg = "Multiple matches found for %s, please use ID instead." % name
raise exceptions.NoUniqueMatch(msg)
def parse_query_from_url(url):
"""
Helper to get key bits of data from the "next" url returned
from the API on collections
:param url:
:return: dict
"""
values = parse_qs(urlparse(url)[4])
return {k: values[k][0] for k
|
in values.keys()}
def get_all(function, criterion=None, args=None):
"""
:param function: Function to be called to get data
:param criterion: dict of filters to be applied
:param args: arguments to be given to the function
:return: DesignateList()
"""
criterion = criterion or {}
args = args or []
data = function(*args, criterion=criterion)
returned_data = data
while True:
if data.next_page:
for k, v in data.next_link_criterion.items():
criterion[k] = v
data = function(*args, criterion=criterion)
returned_data.extend(data)
else:
break
return returned_data
|
IsCoolEntertainment/debpkg_python-wheel
|
wheel/test/test_tool.py
|
Python
|
mit
| 746
| 0.013405
|
from .. import tool
def test_keygen():
def get_keyring():
WheelKeys, keyring = tool.get_keyring()
class WheelKeysTest(WheelKeys):
def save(self):
pass
class ke
|
yringTest:
backend = keyring.backend
@classmethod
def get_keyring(cls):
class keyringTest2:
pw = None
de
|
f set_password(self, a, b, c):
self.pw = c
def get_password(self, a, b):
return self.pw
return keyringTest2()
return WheelKeysTest, keyringTest
tool.keygen(get_keyring=get_keyring)
|
pexip/os-foolscap
|
foolscap/crypto.py
|
Python
|
mit
| 4,123
| 0.003638
|
# -*- test-case-name: foolscap.test.test_crypto -*-
available = False # hack to deal with half-broken imports in python <2.4
from OpenSSL import SSL
# we try to use ssl support classes from Twisted, if it is new enough. If
# not, we pull them from a local copy of sslverify. The funny '_ssl' import
# stuff is used to appease pyflakes, which otherwise complains that we're
# redefining an imported name.
from twisted.internet import ssl
if hasattr(ssl, "DistinguishedName"):
# Twisted-2.5 will contain these names
_ssl = ssl
CertificateOptions = ssl.CertificateOptions
else:
# but it hasn't been released yet (as of 16-Sep-2006). Without them, we
# cannot use any encrypted Tubs. We fall back to using a private copy of
# sslverify.py, copied from the Divmod tree.
import sslverify
_ssl = sslverify
from sslverify import OpenSSLCertificateOptions as CertificateOptions
DistinguishedName = _ssl.DistinguishedName
KeyPair = _ssl.KeyPair
Certificate = _ssl.Certificate
PrivateCertificate = _ssl.PrivateCertificate
from twisted.internet import error
if hasattr(error, "CertificateError"):
# Twisted-2.4 contains this, and it is used by twisted.internet.ssl
CertificateError = error.CertificateError
else:
class CertificateError(Exception):
"""
We did not find a certificate where we expected to find one.
"""
from foolscap import base32
peerFromTransport = Certificate.peerFromTransport
class MyOptions(CertificateOptions):
def _makeContext(self):
ctx = CertificateOptions._makeContext(self)
def alwaysValidate(conn, cert, errno, depth, preverify_ok):
# This function is called to validate the certificate received by
# the other end. OpenSSL calls it multiple times, each time it
# see something funny, to ask if it should proceed.
# We do not care about certificate authorities or revocation
# lists, we just want to know that the certificate has a valid
# signature and follow the chain back to one which is
# self-signed. The TubID will be the digest of one of these
# certificates. We need to protect against forged signatures, but
# not the usual SSL concerns about invalid CAs or revoked
# certificates.
# these constants are from openssl-0.9.7g/crypto/x509/x509_vfy.h
# and do not appear to be exposed by pyopenssl. Ick. TODO. We
# could just always return '1' here (ignoring all errors), but I
# think that would ignore forged signatures too, which would
# obviously be a security hole.
things_are_ok = (0, # X509_V_OK
9, # X509_V_ERR_CERT_NOT_YET_VALID
10, # X509_V_ERR_CERT_HAS_EXPIRED
18, # X509_
|
V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT
19, # X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN
)
if errno in things_
|
are_ok:
return 1
# TODO: log the details of the error, because otherwise they get
# lost in the PyOpenSSL exception that will eventually be raised
# (possibly OpenSSL.SSL.Error: certificate verify failed)
# I think that X509_V_ERR_CERT_SIGNATURE_FAILURE is the most
# obvious sign of hostile attack.
return 0
# VERIFY_PEER means we ask the the other end for their certificate.
# not adding VERIFY_FAIL_IF_NO_PEER_CERT means it's ok if they don't
# give us one (i.e. if an anonymous client connects to an
# authenticated server). I don't know what VERIFY_CLIENT_ONCE does.
ctx.set_verify(SSL.VERIFY_PEER |
#SSL.VERIFY_FAIL_IF_NO_PEER_CERT |
SSL.VERIFY_CLIENT_ONCE,
alwaysValidate)
return ctx
def digest32(colondigest):
digest = "".join([chr(int(c,16)) for c in colondigest.split(":")])
digest = base32.encode(digest)
return digest
available = True
|
redhat-openstack/python-neutronclient
|
quantumclient/tests/unit/test_casual_args.py
|
Python
|
apache-2.0
| 2,739
| 0
|
# Copyright 2012 OpenStack LLC.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import unittest
from quantumclient.common import exceptions
from quantumclient.quantum import v2_0 as quantumV20
|
class CLITestArgs(unittest.TestCase):
def test_empty(self):
_mydict = quantumV20.parse_args_to_dict([])
self.assertEqual({}, _mydict)
def test_default_bool(self):
_specs = ['--my_bool', '--arg1', 'value1']
_mydict = quantumV20.parse_args_to_dict(_specs)
self.assertTrue(_mydict['my_bool'])
def test_bool_true(self):
_specs = ['--my-bool', 'type=bool', 'true', '--arg1', 'value1']
_mydict = quantumV20.
|
parse_args_to_dict(_specs)
self.assertTrue(_mydict['my_bool'])
def test_bool_false(self):
_specs = ['--my_bool', 'type=bool', 'false', '--arg1', 'value1']
_mydict = quantumV20.parse_args_to_dict(_specs)
self.assertFalse(_mydict['my_bool'])
def test_nargs(self):
_specs = ['--tag', 'x', 'y', '--arg1', 'value1']
_mydict = quantumV20.parse_args_to_dict(_specs)
self.assertTrue('x' in _mydict['tag'])
self.assertTrue('y' in _mydict['tag'])
def test_badarg(self):
_specs = ['--tag=t', 'x', 'y', '--arg1', 'value1']
self.assertRaises(exceptions.CommandError,
quantumV20.parse_args_to_dict, _specs)
def test_arg(self):
_specs = ['--tag=t', '--arg1', 'value1']
self.assertEqual('value1',
quantumV20.parse_args_to_dict(_specs)['arg1'])
def test_dict_arg(self):
_specs = ['--tag=t', '--arg1', 'type=dict', 'key1=value1,key2=value2']
arg1 = quantumV20.parse_args_to_dict(_specs)['arg1']
self.assertEqual('value1', arg1['key1'])
self.assertEqual('value2', arg1['key2'])
def test_list_of_dict_arg(self):
_specs = ['--tag=t', '--arg1', 'type=dict',
'list=true', 'key1=value1,key2=value2']
arg1 = quantumV20.parse_args_to_dict(_specs)['arg1']
self.assertEqual('value1', arg1[0]['key1'])
self.assertEqual('value2', arg1[0]['key2'])
|
live4thee/zstack-utility
|
kvmagent/kvmagent/plugins/prometheus.py
|
Python
|
apache-2.0
| 4,603
| 0.008473
|
from kvmagent import kvmagent
from zstacklib.utils import jsonobject
from zstacklib.utils import http
from zstacklib.utils import log
from zstacklib.utils.bash import *
from zstacklib.utils import linux
from zstacklib.utils import thread
from jinja2 import Template
import os.path
import re
import time
import traceback
from prometheus_client.core import GaugeMetricFamily,REGISTRY
from prometheus_client import start_http_server
logger = log.get_logger(__name__)
class PrometheusPlugin(kvmagent.KvmAgent):
COLLECTD_PATH = "/prometheus/collectdexporter/start"
@kvmagent.replyerror
@in_bash
def start_collectd_exporter(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
rsp = kvmagent.AgentResponse()
eths = bash_o("ls /sys/class/net").split()
interfaces = []
for eth in eths:
eth = eth.strip(' \t\n\r')
if eth == 'lo': continue
elif eth.startswith('vnic'): continue
elif eth.startswith('outer'): contin
|
ue
elif eth.startswith('br_'): continue
elif not eth: continue
else:
interfaces.append(eth)
conf_path = os.path.join(os.path.dirname(cmd.binaryPath), 'collectd.conf')
conf = '''Int
|
erval {{INTERVAL}}
FQDNLookup false
LoadPlugin syslog
LoadPlugin aggregation
LoadPlugin cpu
LoadPlugin disk
LoadPlugin interface
LoadPlugin memory
LoadPlugin network
LoadPlugin virt
<Plugin aggregation>
<Aggregation>
#Host "unspecified"
Plugin "cpu"
#PluginInstance "unspecified"
Type "cpu"
#TypeInstance "unspecified"
GroupBy "Host"
GroupBy "TypeInstance"
CalculateNum false
CalculateSum false
CalculateAverage true
CalculateMinimum false
CalculateMaximum false
CalculateStddev false
</Aggregation>
</Plugin>
<Plugin cpu>
ReportByCpu true
ReportByState true
ValuesPercentage true
</Plugin>
<Plugin disk>
Disk "/^sd/"
Disk "/^hd/"
Disk "/^vd/"
IgnoreSelected false
</Plugin>
<Plugin "interface">
{% for i in INTERFACES -%}
Interface "{{i}}"
{% endfor -%}
IgnoreSelected false
</Plugin>
<Plugin memory>
ValuesAbsolute true
ValuesPercentage false
</Plugin>
<Plugin virt>
Connection "qemu:///system"
RefreshInterval {{INTERVAL}}
HostnameFormat name
PluginInstanceFormat name
</Plugin>
<Plugin network>
Server "localhost" "25826"
</Plugin>
'''
tmpt = Template(conf)
conf = tmpt.render({
'INTERVAL': cmd.interval,
'INTERFACES': interfaces,
})
need_restart_collectd = False
if os.path.exists(conf_path):
with open(conf_path, 'r') as fd:
old_conf = fd.read()
if old_conf != conf:
with open(conf_path, 'w') as fd:
fd.write(conf)
need_restart_collectd = True
else:
with open(conf_path, 'w') as fd:
fd.write(conf)
need_restart_collectd = True
pid = linux.find_process_by_cmdline(['collectd', conf_path])
if not pid:
bash_errorout('collectd -C %s' % conf_path)
else:
if need_restart_collectd:
bash_errorout('kill -9 %s' % pid)
bash_errorout('collectd -C %s' % conf_path)
pid = linux.find_process_by_cmdline([cmd.binaryPath])
if not pid:
EXPORTER_PATH = cmd.binaryPath
LOG_FILE = os.path.join(os.path.dirname(EXPORTER_PATH), cmd.binaryPath + '.log')
ARGUMENTS = cmd.startupArguments
if not ARGUMENTS:
ARGUMENTS = ""
bash_errorout('chmod +x {{EXPORTER_PATH}}')
bash_errorout("nohup {{EXPORTER_PATH}} {{ARGUMENTS}} >{{LOG_FILE}} 2>&1 < /dev/null &\ndisown")
return jsonobject.dumps(rsp)
def install_colletor(self):
class Collector(object):
def collect(self):
try:
ret = []
for c in kvmagent.metric_collectors:
ret.extend(c())
return ret
except Exception as e:
content = traceback.format_exc()
err = '%s\n%s\n' % (str(e), content)
logger.warn(err)
return []
REGISTRY.register(Collector())
def start(self):
http_server = kvmagent.get_http_server()
http_server.register_async_uri(self.COLLECTD_PATH, self.start_collectd_exporter)
self.install_colletor()
start_http_server(7069)
def stop(self):
pass
|
paulrouget/servo
|
tests/wpt/web-platform-tests/webdriver/tests/perform_actions/validity.py
|
Python
|
mpl-2.0
| 2,188
| 0
|
import pytest
from tests.support.asserts import assert_error, assert_success
def perform_actions(session, actions):
return session.transport.send(
"POST",
"/session/{session_id}/actions".format(session_id=session.session_id),
{"actions": actions})
@pytest.mark.parametrize("action_type", ["none", "key", "pointer"])
def test_pause_positive_integer(session, action_type):
for valid_duration in [0, 1]:
actions = [{
"type": action_type,
"id": "foobar",
"actions": [{
"type": "pause",
"duration": valid_duration
}]
}]
response = perform_actions(session, actions)
assert_success(response)
actions = [{
"type": action_type,
"id": "foobar",
"actions": [{
"type": "pause",
"duration": -1
}]
}]
response = perform_actions(session, actions)
assert_error(respo
|
nse, "invalid argument")
@pytest.mark.parametrize("action_type", ["none", "key", "pointer"])
def test_pause_invalid_types(session, action_type):
for invalid_type in [0.0, None, "foo", True, [], {}]:
actions = [{
|
"type": action_type,
"id": "foobar",
"actions": [{
"type": "pause",
"duration": invalid_type
}]
}]
response = perform_actions(session, actions)
assert_error(response, "invalid argument")
@pytest.mark.parametrize("action_type", ["none", "key", "pointer"])
def test_pause_without_duration(session, action_type):
actions = [{
"type": action_type,
"id": "foobar",
"actions": [{
"type": "pause",
}]
}]
response = perform_actions(session, actions)
assert_success(response)
@pytest.mark.parametrize("action_type", ["none", "key", "pointer"])
def test_action_without_id(session, action_type):
actions = [{
"type": action_type,
"actions": [{
"type": "pause",
"duration": 1
}]
}]
response = perform_actions(session, actions)
assert_error(response, "invalid argument")
|
cmezh/PokemonGo-Bot
|
pokemongo_bot/cell_workers/pokemon_hunter.py
|
Python
|
mit
| 6,128
| 0.003427
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import time
from geopy.distance import great_circle
from s2sphere import Cell, CellId, LatLng
from pokemongo_bot import inventory
from pokemongo_bot.base_task import BaseTask
from pokemongo_bot.item_list import Item
from pokemongo_bot.walkers.polyline_walker import PolylineWalker
from pokemongo_bot.walkers.step_walker import StepWalker
from pokemongo_bot.worker_result import WorkerResult
class PokemonHunter(BaseTask):
SUPPORTED_TASK_API_VERSION = 1
def __init__(self, bot, config):
super(PokemonHunter, self).__init__(bot, config)
def initialize(self):
self.destination = None
self.walker = None
self.search_cell_id = None
self.search_points = []
self.lost_counter = 0
self.no_log_until = 0
self.config_max_distance = self.config.get("max_distance", 2000)
self.config_hunt_all = self.config.get("hunt_all", False)
self.config_hunt_vip = self.config.get("hunt_vip", True)
self.config_hunt_pokedex = self.config.get("hunt_pokedex", True)
def work(self):
if not self.enabled:
return WorkerResult.SUCCESS
if self.get_pokeball_count() <= 0:
self.destination = None
self.last_cell_id = None
return WorkerResult.SUCCESS
now = time.time()
pokemons = self.get_nearby_pokemons()
if self.destination is None:
worth_pokemons = self.get_worth_pokemons(pokemons)
if len(worth_pokemons) > 0:
self.destination = worth_pokemons[0]
self.lost_counter = 0
self.logger.info("New destination at %(distance).2f meters: %(name)s", self.destination)
self.no_log_until = now + 60
if self.destination["s2_cell_id"] != self.search_cell_id:
self.search_points = self.get_search_points(self.destination["s2_cell_id"])
self.walker = PolylineWalker(self.bot, self.search_points[0][0], self.search_points[0][1])
self.search_cell_id = self.destination["s2_cell_id"]
self.search_points = self.search_points[1:] + self.search_points[:1]
else:
if self.no_log_until < now:
self.logger.info("There is no nearby pokemon worth hunting down [%s]", ", ".join(p["name"] for p in pokemons))
self.no_log_until = now + 120
self.last_cell_id = None
return WorkerResult.SUCCESS
if any(self.destination["encounter_id"] == p["encounter_id"] for p in self.bot.cell["catchable_pokemons"] + self.bot.cell["wild_pokemons"]):
self.destination = None
elif self.walker.step():
if not any(self.destination["encounter_id"] == p["encounter_id"] for p in pokemons):
self.lost_counter += 1
else:
self.lost_counter = 0
if self.lost_counter >= 3:
self.destination
|
= None
else:
self.logger.info("Now searching for %(name)s", self.destination)
self.walker = StepWalker(self.bot, self.search_points[0][0], self.search_points[0][1])
self.search_points = self.search_points[1:] + sel
|
f.search_points[:1]
elif self.no_log_until < now:
distance = great_circle(self.bot.position, (self.walker.dest_lat, self.walker.dest_lng)).meters
self.logger.info("Moving to destination at %s meters: %s", round(distance, 2), self.destination["name"])
self.no_log_until = now + 30
return WorkerResult.RUNNING
def get_pokeball_count(self):
return sum([inventory.items().get(ball.value).count for ball in [Item.ITEM_POKE_BALL, Item.ITEM_GREAT_BALL, Item.ITEM_ULTRA_BALL]])
def get_nearby_pokemons(self):
radius = self.config_max_distance
pokemons = [p for p in self.bot.cell["nearby_pokemons"] if self.get_distance(self.bot.start_position, p) <= radius]
for pokemon in pokemons:
pokemon["distance"] = self.get_distance(self.bot.position, p)
pokemon["name"] = inventory.pokemons().name_for(pokemon["pokemon_id"])
pokemons.sort(key=lambda p: p["distance"])
return pokemons
def get_worth_pokemons(self, pokemons):
if self.config_hunt_all:
worth_pokemons = pokemons
else:
worth_pokemons = []
if self.config_hunt_vip:
worth_pokemons += [p for p in pokemons if p["name"] in self.bot.config.vips]
if self.config_hunt_pokedex:
worth_pokemons += [p for p in pokemons if (p not in worth_pokemons) and any(not inventory.pokedex().seen(fid) for fid in self.get_family_ids(p))]
worth_pokemons.sort(key=lambda p: inventory.candies().get(p["pokemon_id"]).quantity)
return worth_pokemons
def get_family_ids(self, pokemon):
family_id = inventory.pokemons().data_for(pokemon["pokemon_id"]).first_evolution_id
ids = [family_id]
ids += inventory.pokemons().data_for(family_id).next_evolutions_all[:]
return ids
def get_distance(self, location, pokemon):
return great_circle(location, (pokemon["latitude"], pokemon["longitude"])).meters
def get_search_points(self, cell_id):
points = []
# For cell level 15
for c in Cell(CellId(cell_id)).subdivide():
for cc in c.subdivide():
latlng = LatLng.from_point(cc.get_center())
point = (latlng.lat().degrees, latlng.lng().degrees)
points.append(point)
points[0], points[1] = points[1], points[0]
points[14], points[15] = points[15], points[14]
point = points.pop(2)
points.insert(7, point)
point = points.pop(13)
points.insert(8, point)
closest = min(points, key=lambda p: great_circle(self.bot.position, p).meters)
index = points.index(closest)
return points[index:] + points[:index]
|
xbmcmegapack/plugin.video.megapack.dev
|
resources/lib/menus/home_languages_persian.py
|
Python
|
gpl-3.0
| 1,111
| 0.002705
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This file is part of XBMC Mega Pack Addon.
Copyright (C) 2014 Wolverine (xbmcmegapack@gmail.com)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You sho
|
uld have received a co
|
py of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/gpl-3.0.html
"""
class Languages_Persian():
'''Class that manages this specific menu context.'''
def open(self, plugin, menu):
menu.add_xplugins(plugin.get_xplugins(dictionaries=["Channels",
"Events", "Live", "Movies", "Sports", "TVShows"],
languages=["Persian"]))
|
henry-ngo/VIP
|
vip_hci/negfc/simplex_optim.py
|
Python
|
mit
| 16,676
| 0.010794
|
#! /usr/bin/env python
"""
Module with simplex (Nelder-Mead) optimization for defining the flux and
position of a companion using the Negative Fake Companion.
"""
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import minimize
from .simplex_fmerit import chisquare
from ..var import frame_center
from ..conf import time_ini, timing, sep
__all__ = ['firstguess_from_coord',
'firstguess_simplex',
'firstguess']
def firstguess_from_coord(planet, center, cube, angs, PLSC, psf,
fwhm, annulus_width, aperture_radius, ncomp,
cube_ref=None, svd_mode='lapack', scaling=None,
fmerit='sum', collapse='median', f_range=None,
display=False, verbose=True, save=False, **kwargs):
"""
Determine a first guess for the flux of a companion at a given position
in the cube by doing a simple grid search evaluating the reduced chi2.
Parameters
----------
planet: numpy.array
The (x,y) position of the planet in the pca processed cube.
center: numpy.array
The (x,y) position of the cube center.
cube: numpy.array
The cube of fits images expressed as a numpy.array.
angs: numpy.array
The parallactic angle fits image expressed as a numpy.array.
PLSC: float
The platescale, in arcsec per pixel.
psf: numpy.array
The scaled psf expressed as a numpy.array.
fwhm : float
The FHWM in pixels.
annulus_width: int, optional
The width in terms of the FWHM of the annulus on which the PCA is done.
aperture_radius: int, optional
The radius of the circular aperture in terms of the FWHM.
ncomp: int
The number of principal components.
cube_ref : array_like, 3d, optional
Reference library cube. For Reference Star Differential Imaging.
svd_mode : {'lapack', 'randsvd', 'eigen', 'arpack'}, str optional
Switch for different ways of computing the SVD and selected PCs.
scaling : {'temp-mean', 'temp-standard'} or None, optional
With None, no scaling is performed on the input data before SVD. With
"temp-mean" then temporal px-wise mean subtraction is done and with
"temp-standard" temporal mean centering plus scaling to unit variance
is done.
fmerit : {'sum', 'stddev'}, string optional
Chooses the figure of merit to be used. stddev works better for close in
companions sitting on top of speckle noise.
collapse : {'median', 'mean', 'sum', 'trimmean', None}, str or None, optional
Sets the way of collapsing the frames for producing a final image. If
None then the cube of residuals is used when measuring the function of
merit (instead of a single final frame).
f_range: numpy.array, optional
The range of flux tested values. If None, 20 values between 0 and 5000
are tested.
display: boolean, optional
If True, the figure chi2 vs. flux is displayed.
verbose: boolean
If True, display intermediate info in the shell.
save: boolean, optional
If True, the figure chi2 vs. flux is saved.
kwargs: dict, optional
Additional parameters are passed to the matplotlib plot method.
Returns
-------
out : numpy.array
The radial coordinates and the flux of the companion.
"""
xy = planet-center
r0= np.sqrt(xy[0]**2+xy[1]**2)
theta0 = np.mod(np.arctan2(xy[1],xy[0])/np.pi*180,360)
if f_range is not None:
n = f_range.shape[0]
else:
n = 20
f_range = np.linspace(0,5000,n)
chi2r = []
if verbose:
print('Step | flux | chi2r')
counter = 0
for j, f_guess in enumerate(f_range):
chi2r.append(chisquare((r0,theta0,f_guess), cube, angs, PLSC, psf,
fwhm, annulus_width, aperture_radius,(r0,theta0),
ncomp, cube_ref, svd_mode, scaling, fmerit,
collapse))
if chi2r[j] > chi2r[j-1]: counter+=1
if counter == 4: break
if verbose:
print('{}/{} {:.3f} {:.3f}'.format(j+1,n,f_guess,chi2r[j]))
chi2r = np.array(chi2r)
f0 = f_range[chi2r.argmin()]
if display:
plt.figure(figsize=kwargs.pop('figsize',(8,4)))
plt.title(kwargs.pop('title',''))
plt.xlim(f_range[0], f_range[:chi2r.shape[0]].max())
plt.ylim(chi2r.min()*0.9, chi2r.max()*1.1)
plt.plot(f_range[:chi2r.shape[0]],chi2r,
linestyle = kwargs.pop('linestyle','-'),
color = kwargs.pop('color','gray'),
marker = kwargs.pop('marker','.'),
markerfacecolor='r', markeredgecolor='r', **kwargs)
plt.xlabel('flux')
plt.ylabel(r'$\chi^2_{r}$')
plt.grid('on')
if save:
plt.savefig('chi2rVSflux.pdf')
if display:
plt.show()
return (r0,theta0,f0)
def firstguess_simplex(p, cube, angs, psf, plsc, ncomp, fwhm, annulus_width,
aperture_radius, cube_ref=None, svd_mode='lapack',
scaling=None, fmerit='sum', collapse='median', p_ini=None,
options=None, verbose=False, **kwargs):
"""
Determine the position of a companion using the negative fake companion
technique and a standard minimization algorithm (Default=Nelder-Mead) .
Parameters
----------
p : np.array
Estimate of the candidate position.
cube: numpy.array
The cube of fits images expressed as a numpy.array.
angs: numpy.array
The parallactic angle fits image expressed as a numpy.array.
psf: numpy.array
The scaled psf expressed as a numpy.array.
plsc: float
The platescale, in arcsec per pixel.
ncomp: int
The number of principal components.
fwhm : float
The FHWM in pixels.
annulus_width: int, optional
The width in terms of the FWHM of the annulus on which the PCA is done.
aperture_radius: int, optional
The radius of the circular aperture in terms of the FWHM.
cube_ref : array_like, 3d, optional
Reference library cube. For Reference Star Differential Imaging.
svd_mode : {'lapack', 'randsvd', 'eigen', 'arpack'}, str optional
Switch for different ways of computing the SVD and selected PCs.
scaling : {'temp-mean', 'temp-standard'} or None, optional
With None, no scaling is performed on the input data before SVD. With
"temp-mean" then temporal px-wise mean subtraction is done and with
"temp-standard" temporal mean centering plus scaling to unit variance
is done.
fmerit : {'sum', 'stddev'}, string optional
Chooses the figure of merit to be used. stddev works better for close in
companions sitting on top of speckle noise.
collapse : {'median', 'mean', 'sum', 'trimmean', None}, str or None, optional
Sets the way of collapsing the frames for producing a final image. If
None then the cube of residuals is used when measuring the function of
merit (instead of a single final frame).
p_ini : np.array
Position (r, theta) of the circular aperture center.
options: dict, optional
The scipy.optimize.minimize options.
verbose : boolean, optional
If True, informations are displayed in the shell.
Returns
-------
|
out : scipy.optimize.minimize solution object
The solution of th
|
e minimization algorithm.
"""
if verbose:
print('')
print('{} minimization is running...'.format(options.get('method','Nelder-Mead')))
if p_ini is None:
p_ini = p
solu = minimize(chisquare, p, args=(cube, angs, plsc, psf, fwhm, annulus_width,
aperture_radius, p_ini, ncomp, cube_ref,
|
jzitelli/yawvrb.js
|
test/tornado_server.py
|
Python
|
mit
| 1,909
| 0.00681
|
import os.path
import logging
_logger = logging.getLogger(__name__)
from operator import itemgetter
from tornado.web import Application, RequestHandler, StaticFileHandler
from tornado.ioloop import IOLoop
config = {
'DEBUG': True,
'PORT' : 5000
}
HANDLERS = []
ROOT_DIR = os.path.abspath(os.path.join(os.path.split(__file__)[0], os.path.pardir))
GFXTABLET_DIR = os.path.join(ROOT_DIR, "node_modules", "gfxtablet")
if os.path.exists(GFXTABLET_DIR):
import sys
sys.path.insert(0, GFXTABLET_DIR)
from GfxTablet import GfxTabletHandler
HANDLERS.append((r'/gfxtablet', GfxTabletHandler))
class MainHandler(RequestHandler):
def get(self):
self.render("index.html")
def main():
global HANDLERS
HANDLERS += [(r'/(.+)', StaticFileHandler, {'path': ROOT_DIR}),
(r'/', MainHandler)]
app = Application(HANDLERS,
debug=config.get('DEBUG', False), static_path=ROOT_DIR)
_logger.info("app.settings:\n%s" % '\n'.join(['%s: %s' % (k, str(v))
for k, v in sorted(app.settings.items(),
key=itemgetter(0))]))
port = config.get('PORT', 5000)
app.listen(port)
_logger.info("""
listening on port %d
press CTRL-c to terminate the server
-----------
Y A W V R B
*********************
|
****
*********************************
STARTING TORNADO APP!!!!!!!!!!!!!
*********************************
*************************
Y
|
A W V R B
-----------
""" % port)
IOLoop.instance().start()
if __name__ == "__main__":
logging.basicConfig(level=(logging.DEBUG if config.get('DEBUG') else logging.INFO),
format="%(asctime)s: %(levelname)s %(name)s %(funcName)s %(lineno)d: %(message)s")
main()
|
zeroonegit/python
|
python_programming/password1.py
|
Python
|
mit
| 786
| 0.005089
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#######################################################################
|
########
# Author: Quincey Sun
# Mail: zeroonegit@gmail.com
# Created Time: 2016-06-21 23:14:26
###############################################################################
## This programs asks a us
|
er for a name and a password.
# It then checks them to make sure that the user is allowed in .
# Note that this is a simple and insecure example,
# real password code should never be implemented this way.
name = input("What is your name? ")
password = input("What is the password? ")
if name == "Josh" and password == "Friday":
print ("Welcome Josh")
elif name == "Fred" and password == "Rock":
print ("Welcome Fred")
else:
print ("I don't know you.")
|
fernandoe/the-comics
|
tests/marvel/iterables/test_baseIterable.py
|
Python
|
gpl-3.0
| 816
| 0
|
import os
from unittest import TestCase
import mock
from marvel.iterabl
|
es import BaseIterable
class FooIterable(BaseIterable):
def
|
__init__(self):
self.total_pages = 20
super(FooIterable, self).__init__()
def get_items(self):
if self.total_pages == 0:
raise StopIteration
else:
self.total_pages = self.total_pages - 1
return [self.total_pages]
class TestBaseIterable(TestCase):
def test_limit_pages_not_defined(self):
count = 0
for _ in FooIterable():
count = count + 1
assert count == 20
@mock.patch.dict(os.environ, {'TC_LIMIT_PAGES': '3'})
def test_limit_pages_with_3(self):
count = 0
for _ in FooIterable():
count = count + 1
assert count == 3
|
rdiaz82/mqttSqlLite
|
mqttsqlite/core/topics_controller.py
|
Python
|
mit
| 2,034
| 0.00295
|
from mqttsqlite.orm.models import Topic
import json
from mqttsqlite.settings.private_settings import MANAGEMENT_PASSWORD, QUERY_PASSWORD
from .utils import Payload, Utils
class TopicsController (object):
def add_topic(self, msg):
received_data = json.loads(msg.payload)
payload = Utils().validate_data(received_data, MANAGEMENT_PASSWORD, ['password', 'client'])
if payload.result == 'OK':
new_topic, created = Topic.get_or_create(name=str(received_data['topic']))
saved_topics = []
for topic in Topic.select():
saved_topics.append(topic.name)
payload.topics = saved_topics
return payload.get_json()
def remove_topic(self, msg):
received_data = json.loads(msg.payload)
payload = Utils().validate_data(received_data, MANAGEMENT_PASSWORD, ['password', 'client'])
if payload.result == 'OK':
topic = Topic.select().where(Topic.name == str(received_data['topic']))
if topic.count() > 0:
topic[0].delete
|
_instance()
else:
payload.result = 'KO'
payload.error = 'Topic not found'
saved_topics = []
for topic in Topic.select():
saved_topics.append(topic.name)
payload.topics = saved_topics
return payload.get_json()
def list_topics(self, msg):
received_data = json.loads(msg.payload)
payload =
|
Utils().validate_data(received_data, QUERY_PASSWORD, ['password', 'client'], topic=False)
if payload.result == 'OK':
saved_topics = []
for topic in Topic.select():
saved_topics.append(topic.name)
payload.topics = saved_topics
return payload.get_json()
def get_storaged_topics(self):
return Topic.select()
def is_topic_subscribed(self, topic):
if Topic.select().where(Topic.name == topic).count():
return True
else:
return False
|
ZhangXinNan/tensorflow
|
tensorflow/python/kernel_tests/control_flow_ops_py_test.py
|
Python
|
apache-2.0
| 121,396
| 0.014259
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OiR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-long-lambda
"""Tests for tensorflow.ops.control_flow_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import math
import time
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import device_lib
from tensorflow.python.client import session
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python
|
.ops import gen_array_ops
from tensorflow.python.ops import gen_control_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import gen_logging_ops
from tensorflow.python.ops import gen_state_ops
from tensorflow.python.ops import
|
gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import logging_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_grad # pylint: disable=unused-import
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
# pylint: disable=unused-import
import tensorflow.python.ops.tensor_array_grad
# pylint: enable=unused-import
from tensorflow.python.platform import test
from tensorflow.python.training import adam
from tensorflow.python.training import gradient_descent
from tensorflow.python.util import nest
def check_consumers(graph):
"""Sanity check on the consumer list of the tensors."""
consumer_count = {}
for op in graph.get_operations():
for v in op.inputs:
cnt = consumer_count.get(v, 0)
consumer_count[v] = cnt + 1
for k, v in consumer_count.items():
if len(k.consumers()) != v:
return False
return True
def all_fetchables():
tensor_names = []
graph = ops.get_default_graph()
for op in graph.get_operations():
for t in op.outputs:
if graph.is_fetchable(t):
tensor_names.append(t.name)
return tensor_names
def all_feedables():
feedable_tensors = []
graph = ops.get_default_graph()
for op in graph.get_operations():
for t in op.inputs:
if graph.is_feedable(t):
feedable_tensors.append(t)
return feedable_tensors
def opt_cfg():
return config_pb2.ConfigProto(
allow_soft_placement=True,
graph_options=config_pb2.GraphOptions(
optimizer_options=config_pb2.OptimizerOptions(
opt_level=config_pb2.OptimizerOptions.L1,
do_function_inlining=True,
do_constant_folding=True)))
def isum(s, maximum_iterations=None):
i = constant_op.constant(0, name="i")
c = lambda i, s: math_ops.less(i, 10)
b = lambda i, s: [math_ops.add(i, 1), math_ops.add(i, s)]
_, r_s = control_flow_ops.while_loop(
c, b, [i, s], maximum_iterations=maximum_iterations)
return r_s
class ControlFlowTest(test.TestCase):
def testRefIdentity(self):
with self.test_session():
v = variables.Variable(7)
v = control_flow_ops._Identity(v)
op = state_ops.assign(v, 9)
v2 = control_flow_ops.with_dependencies([op], v)
self.assertTrue(isinstance(v2, ops.Tensor))
variables.global_variables_initializer().run()
self.assertEqual(9, v2.eval())
def testRefEnter(self):
with self.test_session():
v = variables.Variable(7)
enter_v = control_flow_ops._Enter(v, "foo_1", is_constant=True)
nine = constant_op.constant(9)
enter_nine = gen_control_flow_ops.enter(nine, "foo_1")
op = state_ops.assign(enter_v, enter_nine)
v2 = control_flow_ops.with_dependencies([op], enter_v)
v3 = control_flow_ops.exit(v2)
variables.global_variables_initializer().run()
self.assertEqual(9, v3.eval())
def testRefSwitch(self):
with self.test_session():
v = variables.Variable(7)
p = constant_op.constant(True)
v1 = control_flow_ops._SwitchRefOrTensor(v._ref(), p) # pylint: disable=protected-access
v2 = state_ops.assign(v1[1], 9)
variables.global_variables_initializer().run()
self.assertEqual(9, v2.eval())
def testEnterMulExit(self):
with self.test_session():
data = constant_op.constant([1, 2, 3, 4, 5, 6], name="data")
enter_data = gen_control_flow_ops.enter(data, "foo_1", False)
five = constant_op.constant(5)
enter_five = gen_control_flow_ops.enter(five, "foo_1", False)
mul_op = math_ops.multiply(enter_data, enter_five)
exit_op = control_flow_ops.exit(mul_op)
result = exit_op.eval()
self.assertAllEqual(np.array([x * 5 for x in [1, 2, 3, 4, 5, 6]]), result)
def testEnterShapePropagation(self):
with self.test_session():
v = variables.Variable([0.0, 0.0], dtype=dtypes.float32)
# If is_constant=True, the shape information should be propagated.
enter_v_constant = gen_control_flow_ops.enter(
v, "frame1", is_constant=True)
self.assertEqual(enter_v_constant.shape, [2])
# Otherwise, the shape should be unknown.
enter_v_non_constant = gen_control_flow_ops.enter(
v, "frame2", is_constant=False)
self.assertEqual(enter_v_non_constant.shape, None)
def testSwitchMergeIndexedSlices(self):
with self.test_session():
values = constant_op.constant([1, 2, 3, 4, 5, 6])
indices = constant_op.constant([0, 2, 4, 6, 8, 10])
data = ops.IndexedSlices(values, indices)
pred = ops.convert_to_tensor(True)
switch_op = control_flow_ops.switch(data, pred)
merge_op = control_flow_ops.merge(switch_op)[0]
val = merge_op.values.eval()
ind = merge_op.indices.eval()
self.assertAllEqual(np.arange(1, 7), val)
self.assertAllEqual(np.arange(0, 12, 2), ind)
def testSwitchDeadBranch(self):
with self.test_session():
data = constant_op.constant([1, 2, 3, 4, 5, 6], name="data")
ports = ops.convert_to_tensor(True, name="ports")
switch_op = control_flow_ops.switch(data, ports)
dead_branch = array_ops.identity(switch_op[0])
with self.assertRaisesWithPredicateMatch(
errors_impl.InvalidArgumentError,
lambda e: "Retval[0] does not have value" in str(e)):
dead_branch.eval()
def testSwitchMergeLess(self):
with self.test_session():
data = constant_op.constant([1, 2, 3, 4, 5, 6], name="data")
zero = ops.convert_to_tensor(0)
one = ops.convert_to_tensor(1)
less_op = math_ops.less(zero, one)
switch_op = control_flow_ops.switch(data, less_op)
merge_op = control_flow_ops.merge(switch_op)[0]
result = merge_op.eval()
self.assertAllEqual(np.arange(1, 7), result)
def testSwitchMergeAddIdentity(self):
with s
|
AutorestCI/azure-sdk-for-python
|
azure-batch/azure/batch/models/job_get_options.py
|
Python
|
mit
| 3,278
| 0.000305
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MI
|
T License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization impo
|
rt Model
class JobGetOptions(Model):
"""Additional parameters for get operation.
:param select: An OData $select clause.
:type select: str
:param expand: An OData $expand clause.
:type expand: str
:param timeout: The maximum time that the server can spend processing the
request, in seconds. The default is 30 seconds. Default value: 30 .
:type timeout: int
:param client_request_id: The caller-generated request identity, in the
form of a GUID with no decoration such as curly braces, e.g.
9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
:type client_request_id: str
:param return_client_request_id: Whether the server should return the
client-request-id in the response. Default value: False .
:type return_client_request_id: bool
:param ocp_date: The time the request was issued. Client libraries
typically set this to the current system clock time; set it explicitly if
you are calling the REST API directly.
:type ocp_date: datetime
:param if_match: An ETag value associated with the version of the resource
known to the client. The operation will be performed only if the
resource's current ETag on the service exactly matches the value specified
by the client.
:type if_match: str
:param if_none_match: An ETag value associated with the version of the
resource known to the client. The operation will be performed only if the
resource's current ETag on the service does not match the value specified
by the client.
:type if_none_match: str
:param if_modified_since: A timestamp indicating the last modified time of
the resource known to the client. The operation will be performed only if
the resource on the service has been modified since the specified time.
:type if_modified_since: datetime
:param if_unmodified_since: A timestamp indicating the last modified time
of the resource known to the client. The operation will be performed only
if the resource on the service has not been modified since the specified
time.
:type if_unmodified_since: datetime
"""
def __init__(self, select=None, expand=None, timeout=30, client_request_id=None, return_client_request_id=False, ocp_date=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None):
self.select = select
self.expand = expand
self.timeout = timeout
self.client_request_id = client_request_id
self.return_client_request_id = return_client_request_id
self.ocp_date = ocp_date
self.if_match = if_match
self.if_none_match = if_none_match
self.if_modified_since = if_modified_since
self.if_unmodified_since = if_unmodified_since
|
BD2KGenomics/cactus
|
src/progressiveCactus.py
|
Python
|
gpl-3.0
| 16,648
| 0.003844
|
#!/usr/bin/env python
# Progressive Cactus Package
# Copyright (C) 2009-2012 by Glenn Hickey (hickey@soe.ucsc.edu)
# and Benedict Paten (benedictpaten@gmail.com)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import xml.etree.ElementTree as ET
import math
import time
import random
import copy
from optparse import OptionParser
from optparse import OptionGroup
import imp
import socket
import signal
import traceback
import datetime
from sonLib.bioio import logger
from sonLib.bioio import setLoggingFromOptions
from sonLib.bioio import getTempDirectory
from sonLib.bioio import system
from sonLib.bioio import popenCatch
from jobTree.scriptTree.target import Target
from jobTree.scriptTree.stack import Stack
from jobTree.src.master import getJobFileDirName, getConfigFileName
from jobTree.src.jobTreeStatus import parseJobFiles
from cactus.progressive.multiCactusProject import MultiCactusProject
from cactus.shared.experimentWrapper import ExperimentWrapper
from cactus.shared.configWrapper import ConfigWrapper
from seqFile import SeqFile
from projectWrapper import ProjectWrapper
from jobStatusMonitor import JobStatusMonitor
def initParser():
usage = "usage: runProgressiveCactus.sh [options] <seqFile> <workDir> <outputHalFile>\n\n"\
"Required Arguments:\n"\
" <seqFile>\t\tFile containing newick tree and seqeunce paths"\
" paths.\n"\
"\t\t\t(see documetation or examples for format).\n"\
" <workDir>\t\tWorking directory (which can grow "\
"exteremely large)\n"\
" <outputHalFile>\tPath of output alignment in .hal format."
parser = OptionParser(usage=usage)
#JobTree Options (method below now adds an option group)
Stack.addJobTreeOptions(parser)
#Progressive Cactus will handle where the jobtree path is
parser.remove_option("--jobTree")
#Progressive Cactus Options
parser.add_option("--optionsFile", dest="optionsFile",
help="Text file containing command line options to use as"\
" defaults", default=None)
parser.add_option("--database", dest="database",
help="Database type: tokyo_cabinet or kyoto_tycoon"
" [default: %default]",
default="kyoto_tycoon")
parser.add_option("--outputMaf", dest="outputMaf",
help="[DEPRECATED use hal2maf on the ouput file instead] Path of output alignment in .maf format. This option should be avoided and will soon be removed. It may cause sequence names to be mangled, and use a tremendous amount of memory. ",
default=None)
parser.add_option("--configFile", dest="configFile",
help="Specify cactus configuration file",
default=None)
parser.add_option("--legacy", dest="legacy", action="store_true", help=
"Run cactus directly on all input sequences "
"without any progressive decomposition (ie how it "
"was originally published in 2011)",
default=False)
parser.add_option("--autoAbortOnDeadlock", dest="autoAbortOnDeadlock",
action="store_true",
help="Abort automatically when jobTree monitor" +
" suspects a deadlock by deleting the jobTree folder." +
" Will guarantee no trailing ktservers but still " +
" dangerous to use until we can more robustly detect " +
" deadlocks.",
default=False)
parser.add_option("--overwrite", dest="overwrite", action="store_true",
help="Re-align nodes in the tree that have already" +
" been successfully aligned.",
default=False)
parser.add_option("--rootOutgroupDists", dest="rootOutgroupDists",
help="root outgroup distance (--rootOutgroupPaths must " +
"be given as well)", default=None)
parser.add_option("--rootOutgroupPaths", dest="rootOutgroupPaths", type=str,
help="root outgroup path (--rootOutgroup must be given " +
"as well)", default=None)
parser.add_option("--root", dest="root", help="Name of ancestral node (which"
" must appear in NEWICK tree in <seqfile>) to use as a "
"root for the alignment. Any genomes not below this node "
"in the tree may be used as outgroups but will never appear"
" in the output. If no root is specifed then the root"
" of the tree is used. ", default=None)
#Kyoto Tycoon Options
ktGroup = OptionGroup(parser, "kyoto_tycoon Options",
"Kyoto tycoon provides a client/server framework "
"for large in-memory hash tables and is available "
"via the --database option.")
ktGroup.add_option("--ktPort", dest="ktPort",
help="starting port (lower bound of range) of ktservers"
" [default: %default]",
default=1978)
ktGroup.add_option("--ktHost", dest="ktHost",
help="The hostname to use for connections to the "
|
"ktserver (this just specifies where nodes will attempt"
" to find the server, *not* where the ktserver will be"
" run)",
default=None)
ktGroup.add_option("--ktType", dest="ktType",
help="Kyoto Tycoon server type "
|
"(memory, snapshot, or disk)"
" [default: %default]",
default='memory')
# sonlib doesn't allow for spaces in attributes in the db conf
# which renders this options useless
#ktGroup.add_option("--ktOpts", dest="ktOpts",
# help="Command line ktserver options",
# default=None)
ktGroup.add_option("--ktCreateTuning", dest="ktCreateTuning",
help="ktserver options when creating db "\
"(ex #bnum=30m#msiz=50g)",
default=None)
ktGroup.add_option("--ktOpenTuning", dest="ktOpenTuning",
help="ktserver options when opening existing db "\
"(ex #opts=ls#ktopts=p)",
default=None)
parser.add_option_group(ktGroup)
return parser
# Try to weed out errors early by checking options and paths
def validateInput(workDir, outputHalFile, options):
try:
if workDir.find(' ') >= 0:
raise RuntimeError("Cactus does not support spaces in pathnames: %s"
% workDir)
if not os.path.isdir(workDir):
os.makedirs(workDir)
if not os.path.isdir(workDir) or not os.access(workDir, os.W_OK):
raise
except:
raise RuntimeError("Can't write to workDir: %s" % workDir)
try:
open(outputHalFile, "w")
except:
raise RuntimeError("Unable to write to hal: %s" % outputHalFile)
if options.database != "tokyo_cabinet" and\
options.database != "kyoto_tycoon":
raise RuntimeError("Invalid database type: %s" % options.database)
if options.outputMaf is not None:
try:
open(optio
|
rcocetta/kano-toolset
|
kano/profiling_late.py
|
Python
|
gpl-2.0
| 3,436
| 0.00291
|
# profiling_late.py
#
# Copyright (C) 2015 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU General Public License v2
#
#
'''
Module to enable profiling timepoints. This module is loaded
only if the configuration file exists, see profiling.py for more information
'''
import os
import sys
import yaml
import cProfile
from kano.logging import logger
from kano.profiling import CONF_FILE
# load the configuration file
with open(CONF_FILE, 'r') as inp_conf:
conf = yaml.load(inp_conf)
myProfile = cProfile.Profile()
app_name = sys.argv[0]
point_current = ""
def has_key(d, k):
return type(d) is dict and k in d
def declare_timepoint(name, isStart):
global myProfile
global point_current
cmd = None
pythonProfile = False
# Check if the app is contained in the profiling conf file
if has_key(conf, app_name):
# Check if the timepoint name is contained in the profiling conf file
if has_key(conf[app_name], name):
ct = conf[app_name][name]
# Check if python profiler should be started for this timepoint
if has_key(ct, 'python'):
pythonProfile = True
if isStart:
if point_current:
logger.error('Stop profiling for point "{0}" and do "{1}" instead'.format(point_current, name))
myProfile.disable()
myProfile.clear()
point_current = name
myProfile.enable()
else:
if point_current != name:
logger.error('Can\'t stop point "{0}" since a profiling session for "{1}" is being run'.format(name, point_current))
else:
myProfile.disable()
# Check if the statfile location in specified
if ct['python']['statfile']:
try:
myProfile.dump_stats(ct['python']['statfile'])
except IOError as e:
if e.errno == 2:
logger.error('Path to "{}" probably does not exist'.format(ct['python']['statfile']))
else:
logger.error('dump_stats IOError: errno:{0}: {1} '.format(e.errno, e.strerror))
else:
logger.error('No statfile entry in profiling conf file "{}"'.format(CONF_FILE))
myProfile.clear()
point_current = ""
else:
logger.info('Profiling conf file doesnt enable the Python profiler for point {} at app {}'.format(name, app_name))
# Check if we want to run some other command at this timepoint
if isStart and has_key(ct, 'start_exec'):
cmd = ct['start_exec']
os.system(cmd)
if not isStart and has_key(ct, 'end_exec'):
cmd = ct['end_exec']
os.system(cmd)
else:
logger.info('Pro
|
filing conf
|
file doesnt include point:{} for app {}'.format(name, app_name))
else:
logger.info('Profiling conf file doesnt include app:{}'.format(app_name))
logger.debug('timepoint '+name, transition=name, isStart=isStart, cmd=cmd, pythonProfile=pythonProfile)
|
jradavenport/cubehelix
|
setup.py
|
Python
|
bsd-2-clause
| 605
| 0
|
#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="cubehelix",
version="0.1.0",
author="James Davenport",
# author_email="",
description="Cubehelix colormaps for matplotlib",
long_description=read('README.md'),
# license=
|
"BSD",
py_modules=['cubehelix'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Scientific/Engineering :: Visualization",
# "Li
|
cense :: OSI Approved :: BSD License",
]
)
|
Azure/azure-sdk-for-python
|
sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/aio/operations/_data_masking_rules_operations.py
|
Python
|
mit
| 12,027
| 0.004074
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._data_masking_rules_operations import build_create_or_update_request, build_get_request, build_list_by_sql_pool_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DataMaskingRulesOperations:
"""DataMaskingRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.synapse.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def create_or_update(
self,
resource_group_name: str,
workspace_name: str,
sql_pool_name: str,
data_masking_rule_name: str,
parameters: "_models.DataMas
|
kingRule",
**kwargs: Any
) -> "_models.DataMaskingRule":
"""Creates or upd
|
ates a Sql pool data masking rule.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param sql_pool_name: SQL pool name.
:type sql_pool_name: str
:param data_masking_rule_name: The name of the data masking rule.
:type data_masking_rule_name: str
:param parameters: The required parameters for creating or updating a data masking rule.
:type parameters: ~azure.mgmt.synapse.models.DataMaskingRule
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DataMaskingRule, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.DataMaskingRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataMaskingRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'DataMaskingRule')
request = build_create_or_update_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
sql_pool_name=sql_pool_name,
data_masking_rule_name=data_masking_rule_name,
content_type=content_type,
json=_json,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('DataMaskingRule', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('DataMaskingRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/dataMaskingPolicies/{dataMaskingPolicyName}/rules/{dataMaskingRuleName}'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
workspace_name: str,
sql_pool_name: str,
data_masking_rule_name: str,
**kwargs: Any
) -> "_models.DataMaskingRule":
"""Gets the specific Sql pool data masking rule.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param sql_pool_name: SQL pool name.
:type sql_pool_name: str
:param data_masking_rule_name: The name of the data masking rule.
:type data_masking_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DataMaskingRule, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.DataMaskingRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataMaskingRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
sql_pool_name=sql_pool_name,
data_masking_rule_name=data_masking_rule_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DataMaskingRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{w
|
lablup/sorna-agent
|
src/ai/backend/kernel/python/sitecustomize.py
|
Python
|
lgpl-3.0
| 1,693
| 0.000591
|
import os
import socket
import sys
input_host = '127.0.0.1'
input_port = 65000
batch_enabled = int(os.environ.get('_BACKEND_BATCH_MODE', '0'))
if batch_enabled:
# Since latest Python 2 has `builtins`and `input`,
# we cannot detect Python 2 with the existence of them.
if sys.version_info.major > 2:
import builtins
def _input(prompt=''):
sys.stdout.write(prompt)
sys.stdout.flush()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
try:
sock.connect((input_host, input_port))
userdata = sock.recv(1024
|
)
|
except ConnectionRefusedError:
userdata = b'<user-input-unavailable>'
return userdata.decode()
builtins._input = input # type: ignore
builtins.input = _input
else:
# __builtins__ is an alias dict for __builtin__ in modules other than __main__.
# Thus, we have to explicitly import __builtin__ module in Python 2.
import __builtin__
builtins = __builtin__
def _raw_input(prompt=''):
sys.stdout.write(prompt)
sys.stdout.flush()
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((input_host, input_port))
userdata = sock.recv(1024)
except socket.error:
userdata = b'<user-input-unavailable>'
finally:
sock.close()
return userdata.decode()
builtins._raw_input = builtins.raw_input # type: ignore
builtins.raw_input = _raw_input # type: ignore
|
TshepangRas/tshilo-dikotla
|
td_maternal/admin/maternal_arv_post_admin.py
|
Python
|
gpl-2.0
| 4,570
| 0.004376
|
from collections import OrderedDict
from django.contrib import admin
from edc_export.actions import export_as_csv_action
from edc_base.modeladmin.admin import BaseTabularInline
from ..forms import MaternalArvPostForm, MaternalArvPostMedForm, MaternalArvPostAdhForm
from ..models import MaternalVisit, MaternalArvPost, MaternalArvPostMed, MaternalArvPostAdh
from .base_maternal_model_admin import BaseMaternalModelAdmin
class MaternalArvPostModInlineAdmin(BaseTabularInline):
model = MaternalArvPostMed
form = MaternalArvPostMedForm
extra = 1
class MaternalArvPostModAdmin(BaseMaternalModelAdmin):
form = MaternalArvPostMedForm
list_display = ('maternal_arv_post', 'arv_code', 'dose_status', 'modification_date', 'modification_code')
radio_fields = {
"arv_code": admin.VERTICAL,
"dose_status": admin.VERTICAL,
"modification_code": admin.VERTICAL,
}
actions = [
export_as_csv_action(
description="CSV Export of Maternal ARV Post with list",
fields=[],
delimiter=',',
exclude=['created', 'modified', 'user_created', 'user_modified', 'revision', 'id', 'hostname_created',
'hostname_modified'],
extra_fields=OrderedDict(
{'subject_identifier':
'maternal_arv_post__maternal_visit__appointment__registered_subject__subject_identifier',
'gender': 'maternal_arv_post__maternal_visit__appointment__registered_subject__gender',
'dob': 'maternal_arv_post__maternal_visit__appointment__registered_subject__dob',
'on_arv_since': 'mater
|
nal_arv_post__on_arv_since',
'on_arv_reason': 'maternal_arv_post__on_arv_reason',
'on_arv_reason_other': 'maternal_arv_post__on_arv_reason_other',
'arv_status': 'maternal_arv_post__arv_status',
}),
)]
admin.site.register(MaternalArvPostMed, MaternalArvPostModAdmin)
|
class MaternalArvPostAdmin(BaseMaternalModelAdmin):
form = MaternalArvPostForm
fields = (
"maternal_visit",
"on_arv_since",
"on_arv_reason",
"on_arv_reason_other",
"arv_status")
radio_fields = {
"on_arv_since": admin.VERTICAL,
"on_arv_reason": admin.VERTICAL,
"arv_status": admin.VERTICAL}
inlines = [MaternalArvPostModInlineAdmin, ]
actions = [
export_as_csv_action(
description="CSV Export of Maternal ARV Post",
fields=[],
delimiter=',',
exclude=['created', 'modified', 'user_created', 'user_modified', 'revision', 'id', 'hostname_created',
'hostname_modified'],
extra_fields=OrderedDict(
{'subject_identifier': 'maternal_visit__appointment__registered_subject__subject_identifier',
'gender': 'maternal_visit__appointment__registered_subject__gender',
'dob': 'maternal_visit__appointment__registered_subject__dob',
}),
)]
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "maternal_visit":
if request.GET.get('maternal_visit'):
kwargs["queryset"] = MaternalVisit.objects.filter(id=request.GET.get('maternal_visit'))
return super(MaternalArvPostAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
admin.site.register(MaternalArvPost, MaternalArvPostAdmin)
class MaternalArvPostAdhAdmin(BaseMaternalModelAdmin):
form = MaternalArvPostAdhForm
fields = (
"maternal_visit",
"missed_doses",
"missed_days",
"missed_days_discnt",
"comment")
actions = [
export_as_csv_action(
description="CSV Export of Maternal ARVs Post: Adherence",
fields=[],
delimiter=',',
exclude=['created', 'modified', 'user_created', 'user_modified', 'revision', 'id', 'hostname_created',
'hostname_modified'],
extra_fields=OrderedDict(
{'subject_identifier': 'maternal_visit__appointment__registered_subject__subject_identifier',
'gender': 'maternal_visit__appointment__registered_subject__gender',
'dob': 'maternal_visit__appointment__registered_subject__dob',
'registered': 'maternal_visit__appointment__registered_subject__registration_datetime'}),
)]
admin.site.register(MaternalArvPostAdh, MaternalArvPostAdhAdmin)
|
yujikato/DIRAC
|
src/DIRAC/Interfaces/scripts/dirac_admin_sync_users_from_file.py
|
Python
|
gpl-3.0
| 1,909
| 0.012572
|
#!/usr/bin/env python
########################################################################
# File : dirac-admin-sync-users-from-file
# Author : Adrian Casajus
########################################################################
"""
Sync users in Configuration with the cfg contents.
Usage:
dirac-admin-sync-users-from-file [options] ... UserCfg
Arguments:
UserCfg: Cfg FileName with Users as sections containing DN, Groups, and other properties as options
Example:
$ dirac-admin-sync-users-from-file file_users.cfg
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from diraccfg import CFG
import DIRAC
from DIRAC.Core.Base imp
|
ort Script
from DIRAC.Core.Utilities.DIRACScript import DIRACScript
__RCSID__ = "$Id$"
@DIRACScript()
def main():
Script.registerSwitch("t", "test", "Only
|
test. Don't commit changes")
Script.parseCommandLine(ignoreErrors=True)
args = Script.getExtraCLICFGFiles()
if len(args) < 1:
Script.showHelp()
from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin
diracAdmin = DiracAdmin()
exitCode = 0
testOnly = False
errorList = []
for unprocSw in Script.getUnprocessedSwitches():
if unprocSw[0] in ("t", "test"):
testOnly = True
try:
usersCFG = CFG().loadFromFile(args[0])
except Exception as e:
errorList.append("file open", "Can't parse file %s: %s" % (args[0], str(e)))
errorCode = 1
else:
if not diracAdmin.csSyncUsersWithCFG(usersCFG):
errorList.append(("modify users", "Cannot sync with %s" % args[0]))
exitCode = 255
if not exitCode and not testOnly:
result = diracAdmin.csCommitChanges()
if not result['OK']:
errorList.append(("commit", result['Message']))
exitCode = 255
for error in errorList:
print("ERROR %s: %s" % error)
DIRAC.exit(exitCode)
if __name__ == "__main__":
main()
|
Jinwithyoo/han
|
hangulize/langs/fin/__init__.py
|
Python
|
bsd-3-clause
| 3,370
| 0
|
# -*- coding: utf-8 -*-
from hangulize import *
class Finnish(Language):
"""For transcribing Finnish."""
__iso639__ = {1: 'fi', 2: 'fin', 3: 'fin'}
__tmp__ = ',;%'
vowels = 'aAeioOuy'
ob = 'bdfgkpstT'
notation = Notation([
# Convention: A = ä, O = ö
('å', 'o'),
('ä', 'A'),
('ö', 'O'),
('w', 'v'),
('xx', 'x'),
('x', 'ks'),
('z', 's'),
('ds', 'T'),
('ts', 'T'),
('c{e|i|y}', 's'),
('c', 'k'),
('q', 'k'),
('ng', 'N'),
('nk', 'Nk'),
('mn{@}', 'm,n'),
('mn', 'm'),
('th', 't'),
('^j{@}', 'J'),
('{@}j{@}', 'J'),
('{h|s|T}j', '%J'),
('j', 'i'),
('aa', 'a'),
('bb', 'b'),
('dd', 'd'),
('ee', 'e'),
('AA', 'A'),
('ff', 'f'),
('gg', 'g'),
('hh', 'h'),
('ii', 'i'),
('jj', 'j'),
('kk', 'k'),
('ll', 'l'),
('{@}mm{@}', 'm,m'),
('mm', 'm'),
('{@}nn{@}', 'n,n'),
('nn', 'n'),
('oo', 'o'),
('pp', 'p'),
('rr', 'r'),
('ss', 's'),
('tt', 't'),
('uu', 'u'),
('vv', 'v'),
('yy', 'y'),
('zz', 'z'),
('{@}b{<ob>}', 'p,'),
('{@}g{<ob>}', 'k,'),
('{@}k{<ob>}', 'k,'),
('{@}p{<ob>}', 'p,'),
('{@}t{<ob>}', 't,'),
('^l', 'l;'),
('^m', 'm;'),
('^n', 'n;'),
('l$', 'l,'),
('m$', 'm,'),
('n$', 'n,'),
('l{@|m,|n,|N}', 'l;'),
('{,}l', 'l;'),
('m{@}', 'm;'),
('n{@}', 'n;'),
('l', 'l,'),
('m', 'm,'),
('n', 'n,'),
('N', 'N,'),
(',,', ','),
(',;', None),
(',l,', 'l,'),
(',m,', 'm,'),
(',n,', 'n,'),
(',N,', 'N,'),
('l{m;|n;}', 'l,'),
(';', None),
('b', Choseong(B)),
('d', Choseong(D)),
('f', Choseong(P)),
('g', Choseong(G)),
('h', Choseong(H)),
('k,', Jongseong(G)),
('k', Choseong(K)),
('^l', Choseong(L)),
('{,|-}l', Choseong(L)),
('-', None),
('l,', Jongseong(L)),
('l', Jongseong(L), Choseong
|
(L)),
('m,', Jongseong(M)),
('m', Choseong(M)),
('n,', Jongseong(N)),
('n', Choseong(N)),
('N', Jongseong(NG)),
('p,', Jongseong(B)),
('p', Choseong(P)),
('r', Choseong(L)),
('s', Choseong(S)),
('t,', Jongseong(S)),
('t
|
', Choseong(T)),
('T', Choseong(C)),
('v', Choseong(B)),
('%', Choseong(NG)),
('Ja', Jungseong(YA)),
('JA', Jungseong(YAE)),
('Je', Jungseong(YE)),
('Ji', Jungseong(I)),
('Jo', Jungseong(YO)),
('JO', Jungseong(OE)),
('Ju', Jungseong(YU)),
('Jy', Jungseong(WI)),
('a', Jungseong(A)),
('A', Jungseong(AE)),
('e', Jungseong(E)),
('i', Jungseong(I)),
('o', Jungseong(O)),
('u', Jungseong(U)),
('y', Jungseong(WI)),
('O', Jungseong(OE)),
])
def normalize(self, string):
return normalize_roman(string, {
'Å': 'å', 'Ǻ': 'å', 'ǻ': 'å', 'Ä': 'ä', 'Ö': 'ö'
})
__lang__ = Finnish
|
wyqwyq/mit6858-lab
|
submit.py
|
Python
|
mit
| 1,330
| 0.003008
|
#!/usr/bin/python
import os.path
import subprocess
import sys
import urllib
KEY_FILE = "submit.token"
def main(filename):
# Prompt for key if missing
if not os.path.exists(KEY_FILE):
print "Please visit http://css.csail.mit.edu/6.858/2014/labs/handin.html"
print "and enter your API key."
key = raw_input("Key: ").strip()
with open(KEY_FILE, "w") as f:
f.write(key + "\n")
print "API key wri
|
tten to %s" % KEY_FILE
# Read the key.
with open(KEY_FILE) as f:
key = f.read().strip()
# Shell out to curl. urllib2 doesn't deal with multipart attachments. Throw
# away the output; you just get a random HTML page.
with open("/dev/null", "a") as null:
subprocess.check_call(["
|
curl", "-f",
"-F", "file=@%s" % filename,
"-F", "key=%s" % key,
"http://6858.scripts.mit.edu/submit/handin.py/upload"],
stdout=null, stderr=null)
print "Submitted %s." % filename
print "Please visit http://css.csail.mit.edu/6.858/2014/labs/handin.html"
print "to verify the upload."
if __name__ == "__main__":
if len(sys.argv) < 2:
print "Usage: %s TARBALL" % sys.argv[0]
sys.exit(1)
main(sys.argv[1])
|
ddurieux/alignak
|
test/test_service_description_inheritance.py
|
Python
|
agpl-3.0
| 2,454
| 0.00163
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2015: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see <http://www.gnu.org/licenses/>.
#
#
# This file incorporates work covered by the following copyright and
# permission notice:
#
# Copyright (C) 2009-2014:
# Jean Gabes, naparuba@gmail.com
# Sebastien Coavoux, s.coavoux@free.fr
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY;
|
without even the
|
implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from alignak_test import *
class TestServiceDescriptionInheritance(AlignakTest):
def setUp(self):
self.setup_with_file('etc/alignak_service_description_inheritance.cfg')
def test_service_description_inheritance(self):
self.print_header()
svc = self.sched.services.find_srv_by_name_and_hostname("MYHOST", "SSH")
self.assertIsNotNone(svc)
def test_service_description_inheritance_multihosts(self):
self.print_header()
for hname in ["MYHOST2", "MYHOST3"]:
svc = self.sched.services.find_srv_by_name_and_hostname(hname, "SSH")
self.assertIsNotNone(svc)
if __name__ == '__main__':
unittest.main()
|
kylepjohnson/cltk
|
src/cltk/morphology/utils.py
|
Python
|
mit
| 1,473
| 0.001358
|
"""Misc helper functions for extracting morphological
info from CLTK data structures.
"""
from typing import List, Optional, Tuple, Union
from cltk.core.data_types import Word
from cltk.core.exceptions import CLTKException
from cltk.morphology.universal_dependencies_features import (
NOMINAL_FEATURES,
VERBAL_FEATURES,
MorphosyntacticFeature,
)
ALL_POSSIBLE_FEATURES = NOMINAL_FEATURES + VERBAL_FEATURES
def get_pos(word: Optional[Word]) -> Union[str, None]:
"""Take word, return structured info."""
if not word:
return None
return word.pos.name
def get_features(
word: Optional[Word],
prepend_to_label: str = None,
) -> Tuple[List[str], List[Union[str, int, float, None]]]:
"""Take a word, return a list of feature labels."""
features_present = list() # type: List[Union[str, None]]
feature_variables = list() # type: List[str]
for pos
|
sible_feature in ALL_POSSIBLE_FEATURES:
feature_variables.append(str(possible_feature).lower())
|
if not word:
features_present.append(None)
continue
try:
feat = word.__getattr__(possible_feature)[0] # type: MorphosyntacticFeature
features_present.append(str(feat.name))
except CLTKException:
features_present.append(None)
if prepend_to_label:
feature_variables = [prepend_to_label + name for name in feature_variables]
return feature_variables, features_present
|
xrotwang/pyimeji
|
pyimeji/api.py
|
Python
|
apache-2.0
| 4,739
| 0.002532
|
"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
|
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
|
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
|
bluecap-se/yarr.client
|
tests/app_tests.py
|
Python
|
mit
| 539
| 0.001855
|
# -*- coding: utf-8 -*-
import pytest
from flask import url_for
def test_config(app):
assert ap
|
p.debug, 'App is in debug mode'
assert not app.config.get('MINIFY_HTML'), 'App does minify html'
as
|
sert app.config.get('ASSETS_DEBUG'), 'App does build assets'
assert app.config.get('YARR_URL'), 'App doesn\'t have Yarr! URL specified'
def test_routes(client):
assert client.get(url_for('index')).status_code == 200
assert client.get(url_for('search')).status_code == 302, 'Empty query should throw redirect'
|
alanjw/GreenOpenERP-Win-X86
|
python/Lib/test/test_urllibnet.py
|
Python
|
agpl-3.0
| 8,057
| 0.001614
|
#!/usr/bin/env python
import unittest
from test import test_support
import socket
import urllib
import sys
import os
import time
mimetools = test_support.import_module("mimetools", deprecated=True)
def _open_with_retry(func, host, *args, **kwargs):
# Connecting to remote hosts is flaky. Make it more robust
# by retrying the connection several times.
for i in range(3):
try:
return func(host, *args, **kwargs)
except IOError, last_exc:
continue
except:
raise
raise last_exc
class URLTimeoutTest(unittest.TestCase):
TIMEOUT = 10.0
def setUp(self):
socket.setdefaulttimeout(self.TIMEOUT)
def tearDown(self):
socket.setdefaulttimeout(None)
def testURLread(self):
f = _open_with_retry(urllib.urlopen, "http://www.python.org/")
x = f.read()
class urlopenNetworkTests(unittest.TestCase):
"""Tests urllib.urlopen using the network.
These tests are not exhaustive. Assuming that testing using files does a
good job overall of some of the basic interface features. There are no
tests exercising the optional 'data' and 'proxies' arguments. No tests
for transparent redirection have been written.
setUp is not used for always constructing a connection to
http://www.python.org/ since there a few tests that don't use that address
and making a connection is expensive enough to warrant minimizing unneeded
connections.
"""
def urlopen(self, *args):
return _open_with_retry(urllib.urlopen, *args)
def test_basic(self):
# Simple test expected to pass.
open_url = self.urlopen("http://www.python.org/")
for attr in ("read", "readline", "readlines", "fileno", "close",
"info", "geturl"):
self.assertTrue(hasattr(open_url, attr), "object returned from "
"urlopen lacks the %s attribute" % attr)
try:
self.assertTrue(open_url.read(), "calling 'read' failed")
finally:
open_url.close()
def test_readlines(self):
# Test both readline and readlines.
open_url = self.urlopen("http://www.python.org/")
try:
self.assertIsInstance(open_url.readline(), basestring,
"readline did not return a string")
self.assertIsInstance(open_url.readlines(), list,
"readlines did not return a list")
finally:
open_url.close()
def test_info(self):
# Test 'info'.
open_url = self.urlopen("http://www.python.org/")
try:
info_obj = open_url.info()
finally:
open_url.close()
self.assertIsInstance(info_obj, mimetools.Message,
"object returned by 'info' is not an "
"instance of mimetools.Message")
self.assertEqual(info_obj.getsubtype(), "html")
def test_geturl(self):
# Make sure same URL as opened is returned by geturl.
URL = "http://www.python.org/"
open_url = self.urlopen(URL)
try:
gotten_url = open_url.geturl()
finally:
open_url.close()
self.assertEqual(gotten_url, URL)
def test_getcode(self):
# test getcode() with the fancy opener to get 404 error codes
URL = "http://www.python.org/XXXinvalidXXX"
open_url = urllib.FancyURLopener().open(URL)
try:
code = open_url.getcode()
finally:
open_url.close()
self.assertEqual(code, 404)
def test_fileno(self):
if (sys.platform in ('win32',) or
not hasattr(os, 'fdopen')):
# On Windows, socket handles are not file descriptors; this
# test can't pass on Windows.
return
# Make sure fd returned by fileno is valid.
open_url = self.urlopen("http://www.python.org/")
fd = open_url.fileno()
FILE = os.fdopen(fd)
try:
self.assertTrue(FILE.read(), "reading from file created using fd "
"returned by fileno failed")
finally:
FILE.close()
def test_bad_address(self):
# Make sure proper exception is raised when connecting to a bogus
# address.
bogus_domain = "sadflkjsasf.i.nvali.d"
try:
socket.gethostbyname(bogus_domain)
except socket.gaierror:
pass
else:
# This happens with some overzealous DNS providers such as OpenDNS
self.skipTest("%r should not resolve for test to work" % bogus_domain)
self.assertRaises(IOError,
# SF patch 809915: In Sep 2003, VeriSign started
# highjacking invalid .com and .net addresses to
# boost traffic to their own site. This test
# started failing then. One hopes the .invalid
# domain will be spared to serve its defined
# purpose.
# urllib.urlopen, "http://www.sadflkjsasadf.com/")
urllib.urlopen, "http://sadflkjsasf.i.nvali.d/")
class urlretrieveNetworkTests(unittest.TestCase):
"""Tests urllib.urlretrieve using the network."""
def urlretrieve(self, *args):
return _open_with_retry(urllib.urlretrieve, *args)
def test_basic(self):
# Test basic functionality.
file_location,info = self.urlretrieve("http://www.python.org/")
self.assertTrue(os.path.exists(file_location), "file location returned by"
" urlretrieve is not a valid path")
FILE = file(file_location)
try:
self.assertTrue(FILE.read(), "reading from the file location returned"
" by urlretrieve failed")
finally:
FILE.close()
os.unlink(file_location)
def test_specified_path(self):
# Make sure that specifying the location of the file to write to works.
file_location,info = self.urlretrieve("http://www.python.org/",
test_support.TESTFN)
self.assertEqual(file_location, test_support.TESTFN)
self.assertTrue(os.path.exists(file_location))
FILE = file(file_location)
try:
self.assertTrue(FILE.read(), "reading from temporary file failed")
finally:
FILE.close
|
()
os.unlink(file_location)
def test_header(self):
# Make sure header returned as 2nd value from urlretrieve is good.
file_location, header = self.urlretrieve("http://www.python.org/")
os.unlink(file_location)
self.assertIsInstance(header, mimetoo
|
ls.Message,
"header is not an instance of mimetools.Message")
def test_data_header(self):
logo = "http://www.python.org/community/logos/python-logo-master-v3-TM.png"
file_location, fileheaders = self.urlretrieve(logo)
os.unlink(file_location)
datevalue = fileheaders.getheader('Date')
dateformat = '%a, %d %b %Y %H:%M:%S GMT'
try:
time.strptime(datevalue, dateformat)
except ValueError:
self.fail('Date value not in %r format', dateformat)
def test_main():
test_support.requires('network')
with test_support.check_py3k_warnings(
("urllib.urlopen.. has been removed", DeprecationWarning)):
test_support.run_unittest(URLTimeoutTest,
urlopenNetworkTests,
urlretrieveNetworkTests)
if __name__ == "__main__":
test_main()
|
sjkingo/python-auspost-pac
|
setup.py
|
Python
|
bsd-2-clause
| 900
| 0.001111
|
from setuptools import find_packages, setup
from auspost_pac import __version__ as version
setup(
name='python-auspost-pac',
version=version,
license='BSD',
author='Sam Kingston',
author_email='sam@sjkwi.com.au',
description='Python API for Australia Post\'s Postage Assessment Calculator (pac).',
url='https://github.com/sjkingo/python-auspost-pac',
install_requires=[
'cached_prope
|
rty',
'frozendict',
'requests',
],
packages=find_packages(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
|
,
'Programming Language :: Python',
],
)
|
jiadaizhao/LeetCode
|
0801-0900/0865-Smallest Subtree with all the Deepest Nodes/0865-Smallest Subtree with all the Deepest Nodes.py
|
Python
|
mit
| 610
| 0
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def subtreeWithAllDeepest(self, root: TreeNode) -> TreeNode:
def dfs(root):
if root is None:
return None, 0
left, ld = dfs(root.left)
right, rd = dfs(root.right)
if ld < rd:
|
return right, rd + 1
elif ld > rd:
return left, ld + 1
else:
return ro
|
ot, ld + 1
return dfs(root)[0]
|
j2a/django-simprest
|
simprest/emitters.py
|
Python
|
bsd-3-clause
| 1,874
| 0.001601
|
try:
from cStringIO import StringIO
exce
|
pt ImportError:
from StringIO import StringIO
from d
|
jango.utils import simplejson
from django.core.serializers.json import DateTimeAwareJSONEncoder
from django.utils.xmlutils import SimplerXMLGenerator
from django.utils.encoding import smart_unicode
EMITTERS = {}
def get_emitter(format):
try:
return EMITTERS[format]
except KeyError:
raise ValueError('No emitter registered for type %s' % format)
def register_emitter(name=None, content_type='text/plain'):
'''Decorator to register an emitter.
Parameters::
- ``name``: name of emitter ('json', 'xml', ...)
- ``content_type``: content type to serve response as
'''
def inner(func):
EMITTERS[name or func.__name__] = (func, content_type)
return inner
@register_emitter(content_type='application/json; charset=utf-8')
def json(request, data):
cb = request.GET.get('callback')
data = simplejson.dumps(data, cls=DateTimeAwareJSONEncoder,
ensure_ascii=False, indent=4)
return cb and ('%s(%s)' % (cb, data)) or data
@register_emitter(content_type='text/xml; charset=utf-8')
def xml(request, data):
stream = StringIO()
xml = SimplerXMLGenerator(stream, 'utf-8')
xml.startDocument()
xml.startElement('response', {})
to_xml(xml, data)
xml.endElement('response')
xml.endDocument()
return stream.getvalue()
def to_xml(xml, data):
if isinstance(data, (list, tuple)):
for item in data:
xml.startElement('resource', {})
to_xml(xml, item)
xml.endElement('resource')
elif isinstance(data, dict):
for key, value in data.iteritems():
xml.startElement(key, {})
to_xml(xml, value)
xml.endElement(key)
else:
xml.characters(smart_unicode(data))
|
kevinconway/venvctrl
|
venvctrl/venv/relocate.py
|
Python
|
mit
| 3,455
| 0
|
"""Virtual environment relocatable mixin."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
import shutil
class RelocateMixin(object):
"""Mixin which adds the ability to relocate a virtual environment."""
def relocate(self, destination):
"""Configure the virtual environment for another path.
Args:
destination (str): The target path of the virtual environment.
Note:
This does not actually move the virtual environment. Is only
rewrites the metadata required to support a move.
"""
for activate in self.bin.activates:
activate.vpath = destination
for binfile in self.bin.files:
shebang = binfile.shebang
if shebang:
shebang = shebang.strip().split(os.linesep)
if len(shebang) == 1 and (
"python" in shebang[0] or "pypy" in shebang[0]
):
binfile.shebang = "#!{0}".format(
os.path.join(destination, "bin", "python")
)
elif len(shebang) == 3 and (
"python" in shebang[1] or "pypy" in shebang[1]
):
shebang[1] = "'''exec' {0} \"$0\" \"$@\"".format(
os.path.join(destination, "bin", "python")
)
binfile.shebang = os.linesep.join(shebang)
# Even though wheel is the official format, there are still several
# cases in the wild where eggs are being installed. Eggs come with the
# possibility of .pth files. Each .pth file contains the path to where
# a module can be found. To handle them we must recurse the entire
# venv file tree since they can b
|
e either at the root of the
# site-
|
packages, bundled within an egg directory, or both.
original_path = self.path
original_abspath = self.abspath
dirs = [self]
while dirs:
current = dirs.pop()
dirs.extend(current.dirs)
for file_ in current.files:
if file_.abspath.endswith(".pth"):
content = ""
with open(file_.abspath, "r") as source:
# .pth files are almost always very small. Because of
# this we read the whole file as a convenience.
content = source.read()
# It's not certain whether the .pth will have a relative
# or absolute path so we replace both in order of most to
# least specific.
content = content.replace(original_abspath, destination)
content = content.replace(original_path, destination)
with open(file_.abspath, "w") as source:
source.write(content)
def move(self, destination):
"""Reconfigure and move the virtual environment to another path.
Args:
destination (str): The target path of the virtual environment.
Note:
Unlike `relocate`, this method *will* move the virtual to the
given path.
"""
self.relocate(destination)
shutil.move(self.path, destination)
self._path = destination
|
linktlh/Toontown-journey
|
toontown/toon/DistributedNPCBanker.py
|
Python
|
apache-2.0
| 3,769
| 0.001592
|
from dir
|
ect.distributed.ClockDelta import *
from direct.interval.IntervalGlobal import *
from pandac.PandaModules import *
from DistributedNPCToonBase import *
from toontown.chat.ChatGlobals import *
from toontown.estate import BankGUI, BankGlobals
from toontown.nametag.NametagGlobals import *
from toontown.toonbase import TTLocalizer
class DistributedNPCBanker(DistributedNPCToonBase):
|
def __init__(self, cr):
DistributedNPCToonBase.__init__(self, cr)
self.jellybeanJar = None
self.bankGUI = None
def disable(self):
self.ignoreAll()
taskMgr.remove(self.uniqueName('popupBankingGUI'))
taskMgr.remove(self.uniqueName('lerpCamera'))
if self.bankGUI:
self.bankGUI.destroy()
self.av = None
base.localAvatar.posCamera(0, 0)
DistributedNPCToonBase.disable(self)
def resetClerk(self):
self.ignoreAll()
taskMgr.remove(self.uniqueName('popupBankingGUI'))
taskMgr.remove(self.uniqueName('lerpCamera'))
if self.bankGUI:
self.bankGUI.destroy()
self.clearMat()
self.startLookAround()
self.detectAvatars()
def handleCollisionSphereEnter(self, collEntry):
self.sendAvatarEnter()
self.nametag3d.setDepthTest(0)
base.cr.playGame.getPlace().setState('purchase')
self.nametag3d.setBin('fixed', 0)
def sendAvatarEnter(self):
self.sendUpdate('avatarEnter')
def setMovie(self, mode, avId, timestamp):
isLocalToon = avId == base.localAvatar.doId
timeStamp = globalClockDelta.localElapsedTime(timestamp)
self.remain = 60 - timeStamp
self.resetClerk()
if mode == BankGlobals.BANK_MOVIE_CLEAR:
if not avId:
self.setChatAbsolute('', CFSpeech | CFTimeout)
if isLocalToon:
self.freeAvatar()
elif mode == BankGlobals.BANK_MOVIE_TIMEOUT:
if isLocalToon:
self.cleanupBankingGUI()
self.freeAvatar()
self.setChatAbsolute(TTLocalizer.STOREOWNER_TOOKTOOLONG,
CFSpeech | CFTimeout)
elif mode == BankGlobals.BANK_MOVIE_DEPOSIT:
if isLocalToon:
self.cleanupBankingGUI()
self.freeAvatar()
self.setChatAbsolute(TTLocalizer.STOREOWNER_GOODBYE,
CFSpeech | CFTimeout)
elif mode == BankGlobals.BANK_MOVIE_GUI:
av = base.cr.doId2do.get(avId)
if av:
self.setupAvatars(av)
if isLocalToon:
self.hideNametag2d()
base.camera.wrtReparentTo(render)
seq = Sequence((base.camera.posQuatInterval(1, Vec3(-5, 9, self.getHeight() - 0.5),
Vec3(-150, -2, 0), other=self, blendType='easeOut',
name=self.uniqueName('lerpCamera'))))
seq.start()
taskMgr.doMethodLater(2.0, self.popupBankingGUI,
self.uniqueName('popupBankingGUI'))
self.setChatAbsolute(TTLocalizer.STOREOWNER_BANKING,
CFSpeech | CFTimeout)
def __handleBankingDone(self, transactionAmount):
self.sendUpdate('transferMoney', [transactionAmount])
def popupBankingGUI(self, task):
self.accept('bankDone', self.__handleBankingDone)
self.bankGUI = BankGUI.BankGUI('bankDone')
return task.done
def cleanupBankingGUI(self):
if self.bankGUI:
self.bankGUI.destroy()
self.bankGUI = None
def freeAvatar(self):
base.localAvatar.posCamera(0, 0)
if base.cr.playGame.getPlace():
base.cr.playGame.getPlace().setState('walk')
self.showNametag2d()
|
beckjake/python3-hglib
|
tests/test-branches.py
|
Python
|
mit
| 674
| 0.001484
|
from . import common
import hglib
class test_branches(common.basetest):
def test_empty(self):
self.assertEquals(self.client.branches(), [])
def test_basic(self):
|
self.append('a', 'a')
rev0 = self.client.commit('first', addremove=True)
self.client.branch('foo')
self.append('a', 'a')
rev1 = self.client.commit('second')
branches = self.client.branches()
expected = []
for r, n in (rev1, rev0):
r = self.client.log(r)[0]
expected.append((r.branch, int(r.rev), r.node[:12]))
self.assertEquals(branches, expected)
def test_active_
|
closed(self):
pass
|
macs03/autoservicio
|
clientes/urls.py
|
Python
|
mit
| 293
| 0.006826
|
from django.conf.u
|
rls import patterns, include, url
urlpatterns = [
url(r'^$', 'clientes.views.clientes', name='clientes'),
url(r'^edit/(\d+)$', 'clientes.views.clientes_edit', name='editCliente'),
url(r'^delete/(\d+)$', 'clientes.views.clientes_delete', name='deleteCliente
|
'),
]
|
stephengroat/miasm
|
miasm2/arch/msp430/disasm.py
|
Python
|
gpl-2.0
| 242
| 0
|
from miasm2.core.asmblock import disasmEngine
from m
|
iasm2.arch.msp430.arch import mn_msp430
class dis_msp430(disasmEngine):
def __init__(self, bs=None, **kwargs):
super(dis_msp430, self).__init__(mn_msp430, None, bs, **kwa
|
rgs)
|
spl0k/supysonic
|
supysonic/api/radio.py
|
Python
|
agpl-3.0
| 1,836
| 0.000545
|
# This file is part of Supysonic.
# Supysonic is a Python implementation of the Subsonic server API.
#
# Copyright (C) 2020 Alban 'spl0k' Féron
#
# Distributed under terms of the GNU AGPLv3 license.
from flask import request
from ..db import RadioStation
from . import get_entity, api_routing
from .exceptions import Forbidden, Missin
|
gParameter
@api_routing("/getInternetRadioStations")
def get_radio_stations():
query = RadioStation.select().sort_by(RadioStation.name)
return request.formatter(
"internetRadioStations",
{"internetRadioStation": [p.as_subsonic_station
|
() for p in query]},
)
@api_routing("/createInternetRadioStation")
def create_radio_station():
if not request.user.admin:
raise Forbidden()
stream_url, name, homepage_url = map(
request.values.get, ("streamUrl", "name", "homepageUrl")
)
if stream_url and name:
RadioStation(stream_url=stream_url, name=name, homepage_url=homepage_url)
else:
raise MissingParameter("streamUrl or name")
return request.formatter.empty
@api_routing("/updateInternetRadioStation")
def update_radio_station():
if not request.user.admin:
raise Forbidden()
res = get_entity(RadioStation)
stream_url, name, homepage_url = map(
request.values.get, ("streamUrl", "name", "homepageUrl")
)
if stream_url and name:
res.stream_url = stream_url
res.name = name
if homepage_url:
res.homepage_url = homepage_url
else:
raise MissingParameter("streamUrl or name")
return request.formatter.empty
@api_routing("/deleteInternetRadioStation")
def delete_radio_station():
if not request.user.admin:
raise Forbidden()
res = get_entity(RadioStation)
res.delete()
return request.formatter.empty
|
editxt/editxt
|
resources/syntax/sml.syntax.py
|
Python
|
gpl-3.0
| 1,797
| 0.003339
|
# -*- coding: UTF-8 -*-
# Syntax definition automatically generated by hljs2xt.py
# source: sml.js
name = 'SML'
file_patterns = ['*.sml', '*.ml']
built_in = """
array bool char exn int list option order real ref string substring
vector unit word
""".split()
keyword = """
abstype and andalso as case datatype do else end eqtype exception fn
fun functor handle if in include infix infixr let local nonfix of op
open orelse raise rec sharing sig signature struct structure then
type val with withtype where while
""".split()
literal = ['true', 'false', 'NONE', 'SOME', 'LESS', 'EQUAL', 'GREATER', 'nil']
class comment:
default_text_color = DELIMITER
rules = [
# ignore {'begin': {'pattern': "\\b(a|an|the|are|I|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|like)\\b", 'type': 'RegExp'}},
('doctag', [RE(r"(?:TODO|FIXME|NOTE|BUG|XXX):")]),
]
operator_escape = ('operator.escape', [RE(r"\\[\s\S]")])
class string:
default_text_color = DELIMITER
rules = [operator_escape]
number = [
RE(r
|
"\b(?:0[xX][a-fA-F0-9_]+[Lln]?|0[oO][0-7_]+[Lln]?|0[bB][01_]+[Lln]?|[0-9][0-9_]*(?:[Lln]|(?:\.[0-9_]*)?(?:[eE][-+]?[0-9_]+)?)?)"),
]
rules = [
('built_in', built_in),
('keyword', keyword),
('literal', literal),
('literal', [RE(r"\[(?:\|\|)?\]|\(\)")]),
('comm
|
ent', RE(r"\(\*"), [RE(r"\*\)")], comment),
('symbol', [RE(r"'[A-Za-z_](?!')[\w']*")]),
('type', [RE(r"`[A-Z][\w']*")]),
('type', [RE(r"\b[A-Z][\w']*")]),
# ignore {'begin': "[a-z_]\\w*'[\\w']*"},
('string', RE(r"'"), [RE(r"'")], string),
('string', RE(r"\""), [RE(r"\"")], string),
('number', number),
# ignore {'begin': {'pattern': '[-=]>', 'type': 'RegExp'}},
]
|
drcoms/drcom-generic
|
custom/drcom_for_dlnu_520(x).py
|
Python
|
agpl-3.0
| 12,107
| 0.016519
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import socket, struct, time
from hashlib import md5
import sys
import os
import random
# CONFIG
server = '172.16.192.111'
username=''
password=''
CONTROLCHECKSTATUS = '\x00'
ADAPTERNUM = '\x00'
host_ip = '0.210.30.0'
IPDOG = '\x00'
host_name = 'DRCOMFUCKER'
PRIMARY_DNS = '0.0.0.0'
dhcp_server = '0.0.0.0'
AUTH_VERSION = '\x20\x1a'
mac = 0xb888e3051680
host_os = 'WINDIAOS'
KEEP_ALIVE_VERSION = '\xdc\x02'
# CONFIG_END
nic_name = '' #Indicate your nic, e.g. 'eth0.2'.nic_name
bind_ip = '0.0.0.0'
class ChallengeException (Exception):
def __init__(self):
pass
class LoginException (Exception):
def __init__(self):
pass
def bind_nic():
try:
import fcntl
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
return get_ip_address(nic_name)
except ImportError as e:
print('Indicate nic feature need to be run under Unix based system.')
return '0.0.0.0'
except IOError as e:
print(nic_name + 'is unacceptable !')
return '0.0.0.0'
finally:
return '0.0.0.0'
if nic_name != '':
bind_ip = bind_nic()
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((bind_ip, 61440))
s.settimeout(3)
SALT = ''
IS_TEST = True
# specified fields based on version
CONF = "/etc/drcom.conf"
UNLIMITED_RETRY = True
EXCEPTION = False
DEBUG = False #log saves to file
LOG_PATH = '/var/log/drcom_client.log'
if IS_TEST:
DEBUG = True
LOG_PATH = 'drcom_client.log'
def log(*args, **kwargs):
s = ' '.join(args)
print s
if DEBUG:
with open(LOG_PATH,'a') as f:
f.write(s + '\n')
def challenge(svr,ran):
while True:
t = struct.pack("<H", int(ran)%(0xFFFF))
s.sendto("\x01\x02"+t+"\x09"+"\x00"*15, (svr, 61440))
try:
data, address = s.recvfrom(1024)
log('[challenge] recv',data.encode('hex'))
except:
log('[challenge] timeout, retrying...')
continue
if address == (svr, 61440):
break
else:
continue
log('[DEBUG] challenge:\n' + data.encode('hex'))
if data[0] != '\x02':
raise ChallengeException
log('[challenge] challenge packet sent.')
return data[4:8]
def md5sum(s):
m = md5()
m.update(s)
return m.digest()
def dump(n):
s = '%x' % n
if len(s) & 1:
s = '0' + s
return s.decode('hex')
# def ror(md5, pwd):
# ret = ''
# for i in range(len(pwd)):
# x = ord(md5[i]) ^ ord(pwd[i])
# ret += chr(((x<<3)&0xFF) + (x>>5))
# return ret
def keep_alive_package_builder(number,random,tail,type=1,first=False):
data = '\x07'+ chr(number) + '\x28\x00\x0b' + chr(type)
if first :
data += '\x0f\x27'
else:
data += KEEP_ALIVE_VERSION
data += '\x2f\x12' + '\x00' * 6
data += tail
data += '\x00' * 4
#data += struct.pack("!H",0xdc02)
if type == 3:
foo = ''.join([chr(int(i)) for i in host_ip.split('.')]) # host_ip
#CRC
# edited on 2014/5/12, filled zeros to checksum
# crc = packet_CRC(data+foo)
crc = '\x00' * 4
#data += struct.pack("!I",crc) + foo + '\x00' * 8
data += crc + foo + '\x00' * 8
else: #packet type = 1
data += '\x00' * 16
return data
# def packet_CRC(s):
# ret = 0
# for i in re.findall('..', s):
# ret ^= struct.unpack('>h', i)[0]
# ret &= 0xFFFF
# ret = ret * 0x2c7
# return ret
def keep_alive2(*args):
#first keep_alive:
#number = number (mod 7)
#status = 1: first packet user sended
# 2: first packet user recieved
# 3: 2nd packet user sended
# 4: 2nd packet user recieved
# Codes for test
tail = ''
packet = ''
svr = server
ran = random.randint(0,0xFFFF)
ran += random.randint(1,10)
# 2014/10/15 add by latyas, maybe svr sends back a file packet
svr_num = 0
packet = keep_alive_package_builder(svr_num,dump(ran),'\x00'*4,1,True)
while True:
log('[keep-alive2] send1',packet.encode('hex'))
s.sendto(packet, (svr, 61440))
data, address = s.recvfrom(1024)
log('[keep-alive2] recv1',data.encode('hex'))
if data.startswith('\x07\x00\x28\x00') or data.startswith('\x07' + chr(svr_num) + '\x28\x00'):
break
elif data[0] == '\x07' and data[2] == '\x10':
log('[keep-alive2] recv file, resending..')
svr_num = svr_num + 1
packet = keep_alive_package_builder(svr_num,dump(ran),'\x00'*4,1, False)
else:
log('[keep-alive2] recv1/unexpected',data.encode('hex'))
#log('[keep-alive2] recv1',data.encode('hex'))
ran += random.randint(1,10)
packet = keep_alive_package_builder(svr_num, dump(ran),'\x00'*4,1,False)
log('[keep-alive2] send2',packet.encode('hex'))
s.sendto(packet, (svr, 61440))
while True:
data, address = s.recvfrom(1024)
if data[0] == '\x07':
svr_num = svr_num + 1
break
else:
log('[keep-alive2] recv2/unexpected',data.encode('hex'))
log('[keep-alive2] recv2',data.encode('hex'))
tail = data[16:20]
ran += random.randint(1,10)
packet = keep_alive_package_builder(svr_num,dump(ran),tail,3,False)
log('[keep-alive2] send3',packet.encode('hex'))
s.sendto(packet, (svr, 61440))
while True:
data, address = s.recvfrom(1024)
if data[0] == '\x07':
svr_num = svr_num + 1
break
else:
log('[keep-alive2] recv3/unexpected',data.encode('hex'))
log('[keep-alive2] recv3',data.encode('hex'))
tail = data[16:20]
log("[keep-alive2] keep-alive2 loop was in daemon.")
i = svr_num
while True:
try:
ran += random.randint(1,10)
packet = keep_alive_package_builder(i,dump(ran),tail,1,False)
#log('DEBUG: keep_alive2,packet 4\n',packet.encode('hex'))
log('[keep_alive2] send',str(i),packet.encode('hex'))
s.sendto(packet, (svr, 61440))
data, address = s.recvfrom(1024)
log('[keep_alive2] recv',data.encode('hex'))
tail = data[16:20]
#log('DEBUG: keep_alive2,packet 4 return\n',data.encode('hex'))
ran += random.randint(1,10)
packet = keep_alive_package_builder(i+1,dump(ran),tail,3,False)
#log('DEBUG: keep_alive2,packet 5\n',packet.encode('hex'))
s.sendto(packet, (svr, 61440))
log('[keep_alive2] send',str(i+1),packet.encode('hex'))
data, address = s.recvfrom(1024)
log('[keep_alive2] recv',data.encode('hex'))
tail = data[16:20]
#log('DEBUG: keep_alive2,packet 5 return\n',data.encode('hex'))
i = (i+2) % 0xFF
time.sleep(20)
keep_alive1(*args)
except:
pass
import re
def checksum(s):
ret = 1234
for i in re.findall('....', s):
ret ^= int(i[::
|
-1].encode('hex'), 16)
ret = (1968 * ret) & 0xffffffff
return struct.pack('<I', ret)
def mkpkt(salt, usr, pwd, mac):
data = '\x03\x
|
01\x00'+chr(len(usr)+20)
data += md5sum('\x03\x01'+salt+pwd)
data += usr.ljust(36, '\x00')
data += CONTROLCHECKSTATUS
data += ADAPTERNUM
data += dump(int(data[4:10].encode('hex'),16)^mac).rjust(6,'\x00') #mac xor md51
data += md5sum("\x01" + pwd + salt + '\x00'*4) #md52
data += '\x01' # number of ip
#data += '\x0a\x1e\x16\x11' #your ip address1, 10.30.22.17
data += ''.join([chr(int(i)) for i in host_ip.split('.')]) #x.x.x.x ->
data += '\00'*4 #your ipaddress 2
data += '\00'*4 #your ipaddress 3
data += '\00'*4 #your ipaddress 4
data += md5sum(data + '\x14\x00\x07\x0b')[:8] #md53
|
jcsp/manila
|
manila/api/contrib/services.py
|
Python
|
apache-2.0
| 3,444
| 0
|
# Copyright 2012 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
import webob.exc
from manila.api import extensions
from manila import db
from manila import exception
from manila import utils
LOG = log.getLogger(__name__)
authorize = extensions.extension_authorizer('share', 'services')
class ServiceController(object):
def index(self, req):
"""Return a list of all running services."""
context = req.environ['manila.context']
authorize(context)
all_services = db.service_get_all(context)
services = []
for service in all_services:
service = {
'id': service['id'],
'binary': service['binary'],
'host': service['host'],
'zone': service['availability_zone']['name'],
|
'status': 'disabled' if service['disabled'] else 'enabled',
'state': 'up' if utils.service_is_up(service) else 'down',
'updated_at': service['updated_at'],
}
services.append(service)
search_opts = [
'host',
'
|
binary',
'zone',
'state',
'status',
]
for search_opt in search_opts:
value = ''
if search_opt in req.GET:
value = req.GET[search_opt]
services = [s for s in services if s[search_opt] == value]
if len(services) == 0:
break
return {'services': services}
def update(self, req, id, body):
"""Enable/Disable scheduling for a service."""
context = req.environ['manila.context']
authorize(context)
if id == "enable":
disabled = False
elif id == "disable":
disabled = True
else:
raise webob.exc.HTTPNotFound("Unknown action")
try:
host = body['host']
binary = body['binary']
except (TypeError, KeyError):
raise webob.exc.HTTPBadRequest()
try:
svc = db.service_get_by_args(context, host, binary)
if not svc:
raise webob.exc.HTTPNotFound('Unknown service')
db.service_update(context, svc['id'], {'disabled': disabled})
except exception.ServiceNotFound:
raise webob.exc.HTTPNotFound("service not found")
return {'host': host, 'binary': binary, 'disabled': disabled}
class Services(extensions.ExtensionDescriptor):
"""Services support."""
name = "Services"
alias = "os-services"
updated = "2012-10-28T00:00:00-00:00"
def get_resources(self):
resources = []
resource = extensions.ResourceExtension('os-services',
ServiceController())
resources.append(resource)
return resources
|
alexholcombe/spatiotopic-motion
|
dotLocalize.py
|
Python
|
mit
| 26,405
| 0.023859
|
from __future__ import print_function
from psychopy import sound, monitors, core, visual, event, data, gui, logging, info
import numpy as np
from copy import deepcopy
from math import atan, cos, sin, pi, sqrt, pow
import time, sys, platform, os, StringIO
from pandas import DataFrame
from calcUnderOvercorrect import calcOverCorrected
dirOrLocalize = True
autopilot = False
quitFinder = False
if quitFinder:
applescript="\'tell application \"Finder\" to quit\'" #quit Finder.
shellCmd = 'osascript -e '+applescript
os.system(shellCmd)
respClock = core.Clock(); myClock = core.Clock();
afterimageDurClock = core.Clock()
refreshRate = 75
ballStdDev = 0.8
autoLogging = False
participant = 'M'
fullscr=True
infoFirst = {'Participant':participant, 'Check refresh etc':True, 'Fullscreen (timing errors if not)': fullscr, 'Screen refresh rate':refreshRate }
OK = gui.DlgFromDict(dictionary=infoFirst,
title='Szinte & Cavanagh spatiotopic apparent motion',
order=[ 'Participant','Check refresh etc', 'Fullscreen (timing errors if not)'],
tip={'Check refresh etc': 'To confirm refresh rate and that can keep up, at least when drawing a grating'},
#fixed=['Check refresh etc'])#this attribute can't be changed by the user
)
if not OK.OK:
print('User cancelled from dialog box'); core.quit()
participant = infoFirst['Participant']
checkRefreshEtc = infoFirst['Check refresh etc']
fullscr = infoFirst['Fullscreen (timing errors if not)']
refreshRate = infoFirst['Screen refresh rate']
quitFinder = False
if checkRefreshEtc:
quitFinder = True
if quitFinder:
import os
applescript="\'tell application \"Finder\" to quit\'"
shellCmd = 'osascript -e '+applescript
os.system(shellCmd)
demo=False
respDeadline = 100
if autopilot:
respDeadline = 0.1
timeAndDateStr = time.strftime("%d%b%Y_%H-%M", time.localtime())
if os.path.isdir('.'+os.sep+'data'):
dataDir='data'
else:
print('"data" directory does not exist, so saving data in present working directory')
dataDir='.'
fileName = os.path.join(dataDir, participant+'_spatiotopicMotion_'+timeAndDateStr)
dataFile = open(fileName+'.txt', 'w') # sys.stdout #StringIO.StringIO()
saveCodeCmd = 'cp \'' + sys.argv[0] + '\' '+ fileName + '.py'
os.system(saveCodeCmd) #save a copy of the code as it was when that subject was run
logFname = fileName+'.log'
ppLogF = logging.LogFile(logFname,
filemode='w',#if you set this to 'a' it will append instead of overwriting
level=logging.INFO)#errors, data and warnings will be sent to this logfile
scrn=1 #1 means second screen
widthPix =1024#1024 #monitor width in pixels
heightPix =768#768 #monitor height in pixels
monitorwidth = 40. #28.5 #monitor width in centimeters
viewdist = 50.; #cm
pixelperdegree = widthPix/ (atan(monitorwidth/viewdist) / np.pi*180)
bgColor = [0,0,0] #"gray background"
allowGUI = False
waitBlank = False
windowAndMouseUnits = 'deg'
monitorname = 'mitsubishi' #in psychopy Monitors Center #Holcombe lab monitor
mon = monitors.Monitor(monitorname,width=monitorwidth, distance=viewdist)#fetch the most recent calib for this monitor
mon.setSizePix( (widthPix,heightPix) )
def openMyStimWindow(): #make it a function because have to do it several times, want to be sure is identical each time
myWin = visual.Window(monitor=mon,size=(widthPix,heightPix),allowGUI=allowGUI,units=windowAndMouseUnits,color=bgColor,colorSpace='rgb',fullscr=fullscr,
screen=scrn,waitBlanking=waitBlank) #Holcombe lab monitor
return myWin
myWin = openMyStimWindow()
myWin.recordFrameIntervals = True #required by RunTimeInfo?
refreshMsg2 = ''
refreshRateWrong = False
if not checkRefreshEtc:
refreshMsg1 = 'REFRESH RATE WAS NOT CHECKED'
else: #checkRefreshEtc
try:
runInfo = info.RunTimeInfo(
# if you specify author and version here, it overrides the automatic detection of __author__ and __version__ in your script
#author='<your name goes here, plus whatever you like, e.g., your lab or contact info>',
#version="<your experiment version info>",
win=myWin, ## a psychopy.visual.Window() instance; None = default temp window used; False = no win, no win.flips()
refreshTest='grating', ## None, True, or 'grating' (eye-candy to avoid a blank screen)
verbose=False, ## True means report on everything
userProcsDetailed=False ## if verbose and userProcsDetailed, return (command, process-ID) of the user's processes
#seems to require internet access, probably for process lookup
)
#print(runInfo)
logging.info(runInfo)
print('Finished runInfo- which assesses the refresh and processes of this computer')
runInfo_failed = False
except:
runInfo_failed = True
refreshMsg1 = ' runInfo call FAILED so dont know refresh rate'
if not runInfo_failed:
refreshSDwarningLevel_ms = 3 ##ms
if runInfo["windowRefreshTimeSD_ms"] > refreshSDwarningLevel_ms:
print("\nThe variability of the refresh rate is high (SD > %.2f ms)." % (refreshSDwarningLevel_ms))
## and here you could prompt the user with suggestions, possibly based on other info:
if runInfo["windowIsFullS
|
cr"]:
print("Your window is full-screen, which is good for timing.")
print('Possible issues: internet / wireless? bluetooth? recent startup (not finished)?')
#if len(runInfo['systemUserProcFlagged']): #doesnt work if no internet
# print('other programs running? (command, process-ID):',info['systemUserProcFlagged'])
|
medianHz = 1000./runInfo['windowRefreshTimeMedian_ms']
refreshMsg1= 'Median frames per second ~='+ str( np.round(medianHz,1) )
refreshRateTolerancePct = 3
pctOff = abs( (medianHz-refreshRate) / refreshRate )
refreshRateWrong = pctOff > (refreshRateTolerancePct/100.)
if refreshRateWrong:
refreshMsg1 += ' BUT'
refreshMsg1 += ' program assumes ' + str(refreshRate)
refreshMsg2 = 'which is off by more than' + str(round(refreshRateTolerancePct,0)) + '%!!'
else:
refreshMsg1 += ', which is close enough to desired val of ' + str( round(refreshRate,1) )
myWinRes = myWin.size
myWin.allowGUI =True
myWin.close() #have to close window to show dialog box
myDlg = gui.Dlg(title="Screen check", pos=(200,400))
myDlg.addText(refreshMsg1, color='Black')
if refreshRateWrong:
myDlg.addText(refreshMsg2, color='Red')
if refreshRateWrong:
logging.error(refreshMsg1+refreshMsg2)
else: logging.info(refreshMsg1+refreshMsg2)
if checkRefreshEtc and (not demo) and (myWinRes != [widthPix,heightPix]).any():
msgWrongResolution = 'Screen apparently NOT the desired resolution of '+ str(widthPix)+'x'+str(heightPix)+ ' pixels!!'
myDlg.addText(msgWrongResolution, color='Red')
logging.error(msgWrongResolution)
print(msgWrongResolution)
myDlg.addText('Note: to abort press ESC at a trials response screen', color=[-1.,1.,-1.]) # color='DimGrey') color names stopped working along the way, for unknown reason
myDlg.show()
if myDlg.OK: #unpack information from dialogue box
pass
else:
print('User cancelled from dialog box.')
logging.flush()
core.quit()
if not demo:
allowGUI = False
myWin = openMyStimWindow()
targetDot = visual.ImageStim(myWin,mask='circle',colorSpace='rgb', color = (-.5, .3, -.5), size=ballStdDev,autoLog=autoLogging, contrast=1, opacity = 1.0)
foilDot = visual.ImageStim(myWin,mask='circle',colorSpace='rgb', color = (0, 0, 0 ),size=ballStdDev,autoLog=autoLogging, contrast=1, opacity = 1.0)
blackDot = visual.ImageStim(myWin,mask='circle',colorSpace='rgb', color = (-1,-1,-1),size=ballStdDev,autoLog=autoLogging, contrast=0.5, opacity = 1.0)
mouseLocationMarker = visual.Circle(myWin,units=windowAndMouseUnits,radius=ballStdDev/2.)#,autoLog=auto
|
justyns/home-assistant
|
homeassistant/components/switch/rfxtrx.py
|
Python
|
mit
| 5,695
| 0
|
"""
Support for RFXtrx switches.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.rfxtrx/
"""
import logging
import homeassistant.components.rfxtrx as rfxtrx
from homeassistant.components.rfxtrx import (
ATTR_FIREEVENT, ATTR_NAME, ATTR_PACKETID, ATTR_STATE, EVENT_BUTTON_PRESSED)
from homeassistant.components.switch import SwitchDevice
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.util import slugify
DEPENDENCIES = ['rfxtrx']
SIGNAL_REPETITIONS = 1
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Setup the RFXtrx platform."""
import RFXtrx as rfxtrxmod
# Add switch from config file
switchs = []
signal_repetitions = config.get('signal_repetitions', SIGNAL_REPETITIONS)
for device_id, entity_info in config.get('devices', {}).items():
if device_id in rfxtrx.RFX_DEVICES:
continue
_LOGGER.info("Add %s rfxtrx.switch", entity_info[ATTR_NAME])
# Check if i must fire event
fire_event = entity_info.get(ATTR_FIREEVENT, False)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: fire_event}
rfxobject = rfxtrx.get_rfx_object(entity_info[ATTR_PACKETID])
newswitch = RfxtrxSwitch(
entity_info[ATTR_NAME], rfxobject, datas,
signal_repetitions)
rfxtrx.RFX_DEVICES[device_id] = newswitch
switchs.append(newswitch)
add_devices_callback(switchs)
def switch_update(event):
"""Callback for sensor updates from the RFXtrx gateway."""
if not isinstance(event.device, rfxtrxmod.LightingDevice) or \
event.device.known_to_be_dimmable:
return
# Add entity if not exist and the automatic_add is True
device_id = slugify(event.device.id_string.lower())
if device_id not in rfxtrx.RFX_DEVICES:
automatic_add = config.get('automatic_add', False)
if not automatic_add:
return
_LOGGER.info(
"Automatic add %s rfxtrx.switch (Class: %s Sub: %s)",
device_id,
event.device.__class__.__name__,
event.device.subtype
)
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
|
entity_name = "%s : %s" % (device_id, pkt_id)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: False}
signal_repetitions = config.get('signal_repetitions',
SIGNAL_REPETITIONS)
new_switch = RfxtrxSwitch(entity_name, event, datas,
signal_repe
|
titions)
rfxtrx.RFX_DEVICES[device_id] = new_switch
add_devices_callback([new_switch])
# Check if entity exists or previously added automatically
if device_id in rfxtrx.RFX_DEVICES:
_LOGGER.debug(
"EntityID: %s switch_update. Command: %s",
device_id,
event.values['Command']
)
if event.values['Command'] == 'On'\
or event.values['Command'] == 'Off':
# Update the rfxtrx device state
is_on = event.values['Command'] == 'On'
# pylint: disable=protected-access
rfxtrx.RFX_DEVICES[device_id]._state = is_on
rfxtrx.RFX_DEVICES[device_id].update_ha_state()
# Fire event
if rfxtrx.RFX_DEVICES[device_id].should_fire_event:
rfxtrx.RFX_DEVICES[device_id].hass.bus.fire(
EVENT_BUTTON_PRESSED, {
ATTR_ENTITY_ID:
rfxtrx.RFX_DEVICES[device_id].entity_id,
ATTR_STATE: event.values['Command'].lower()
}
)
# Subscribe to main rfxtrx events
if switch_update not in rfxtrx.RECEIVED_EVT_SUBSCRIBERS:
rfxtrx.RECEIVED_EVT_SUBSCRIBERS.append(switch_update)
class RfxtrxSwitch(SwitchDevice):
"""Representation of a RFXtrx switch."""
def __init__(self, name, event, datas, signal_repetitions):
"""Initialize the switch."""
self._name = name
self._event = event
self._state = datas[ATTR_STATE]
self._should_fire_event = datas[ATTR_FIREEVENT]
self.signal_repetitions = signal_repetitions
@property
def should_poll(self):
"""No polling needed for a RFXtrx switch."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def should_fire_event(self):
"""Return is the device must fire event."""
return self._should_fire_event
@property
def is_on(self):
"""Return true if light is on."""
return self._state
@property
def assumed_state(self):
"""Return true if unable to access real state of entity."""
return True
def turn_on(self, **kwargs):
"""Turn the device on."""
if not self._event:
return
for _ in range(self.signal_repetitions):
self._event.device.send_on(rfxtrx.RFXOBJECT.transport)
self._state = True
self.update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
if not self._event:
return
for _ in range(self.signal_repetitions):
self._event.device.send_off(rfxtrx.RFXOBJECT.transport)
self._state = False
self.update_ha_state()
|
lagadic/ViSP-images
|
Gaussian-filter/Gaussian_filter.py
|
Python
|
gpl-2.0
| 838
| 0.008353
|
from __future__ import print_functi
|
on
from scipy.ndimage import gaussian_filter
import numpy as np
from PIL import Image
img = np.asarray(Image.open('../Klimt/Klimt.ppm'))
img_gray = np.asarray(Image.open('../Klimt/Klimt.pgm'))
print('img:', img.shape)
sigmas = [0.5, 2, 5, 7]
for sigma in sigmas:
print('sigma:', sigma)
# # do not f
|
ilter across channels
# https://github.com/scikit-image/scikit-image/blob/fca9f16da4bd7420245d05fa82ee51bb9677b039/skimage/filters/_gaussian.py#L12-L126
img_blur = Image.fromarray(gaussian_filter(img, sigma=(sigma, sigma, 0), mode = 'nearest'))
img_blur.save('Klimt_RGB_Gaussian_blur_sigma={:.1f}.png'.format(sigma))
img_blur = Image.fromarray(gaussian_filter(img_gray, sigma=sigma, mode = 'nearest'))
img_blur.save('Klimt_gray_Gaussian_blur_sigma={:.1f}.png'.format(sigma))
|
eddieruano/Sentinel
|
assist/Adafruit_Python_MPR121/examples/simpletest.py
|
Python
|
apache-2.0
| 3,622
| 0.002485
|
# Copyright (c) 2014 Adafruit Industries
# Author: Tony DiCola
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
import time
import MPR121
print('Adafruit MPR121 Capacitive Touch Sensor Test')
# Create MPR121 instance.
cap = MPR121.MPR121()
# Initialize communication with MPR121 using default I2C bus of de
|
vic
|
e, and
# default I2C address (0x5A). On BeagleBone Black will default to I2C bus 0.
if not cap.begin():
print('Error initializing MPR121. Check your wiring!')
sys.exit(1)
#cap.set_thresholds(6, 12)
# Alternatively, specify a custom I2C address such as 0x5B (ADDR tied to 3.3V),
# 0x5C (ADDR tied to SDA), or 0x5D (ADDR tied to SCL).
#cap.begin(address=0x5B)
# Also you can specify an optional I2C bus with the bus keyword parameter.
#cap.begin(busnum=1)
# Main loop to print a message every time a pin is touched.
print('Press Ctrl-C to quit.')
while True:
current_touched = cap.touched()
print(current_touched)
# # Check each pin's last and current state to see if it was pressed or released.
# for i in range(12):
# # Each pin is represented by a bit in the touched value. A value of 1
# # means the pin is being touched, and 0 means it is not being touched.
# pin_bit = 1 << i
# # First check if transitioned from not touched to touched.
# if current_touched & pin_bit and not last_touched & pin_bit:
# print('{0} touched!'.format(i))
# # Next check if transitioned from touched to not touched.
# if not current_touched & pin_bit and last_touched & pin_bit:
# print('{0} released!'.format(i))
# # Update last state and wait a short period before repeating.
# last_touched = current_touched
# Alternatively, if you only care about checking one or a few pins you can
# call the is_touched method with a pin number to directly check that pin.
# This will be a little slower than the above code for checking a lot of pins.
# if cap.is_touched(2):
# print('Pin 2 is being touched!')
# elif cap.is_touched(7):
# print('Pin 7 is being touched!')
# elif cap.is_touched(8):
# print('Pin 8 is being touched!')
# else:
# # If you're curious or want to see debug info for each pin, uncomment the
#following lines:
# print ('\t\t\t\t\t\t\t\t\t\t\t\t\t 0x{0:0X}'.format(cap.touched()))
# filtered = [cap.filtered_data(i) for i in range(12)]
# print('Filt:', '\t'.join(map(str, filtered)))
# base = [cap.baseline_data(i) for i in range(12)]
# print('Base:', '\t'.join(map(str, base)))
time.sleep(0.1)
|
soltraconpotprojectNLDA/SoltraConpot
|
conpot/protocols/kamstrup/usage_simulator.py
|
Python
|
gpl-2.0
| 4,667
| 0.000214
|
# Copyright (C) 2014 Johnny Vestergaard <jkv@unixcluster.dk>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import logging
import gevent
import conpot.core as conpot_core
logger = logging.getLogger(__name__)
# Simulates power usage for a Kamstrup 382 meter
class UsageSimulator(object):
def __init__(self, *args):
# both highres, lowres will be calculated on the fly
self.energy_in = 0
self.energy_out = 0
# p1, p2, p3
self.voltage = [0, 0, 0]
self.current = [0, 0, 0]
self.power = [0, 0, 0]
gevent.spawn(self.initialize)
def usage_counter(self):
while True:
# since this is gevent, this is actually sleep for _at least_ 1 second
# TODO: measure last entry and figure it out
gevent.sleep(1)
for x in [0, 1, 2]:
self.energy_in += int(self.power[x] * 0.0036)
def initialize(self):
# we need the databus initialized before we can probe values
databus = conpot_core.get_databus()
databus.initialized.wait()
# accumulated counter
energy_in_register = 'register_13'
self.energy_in = databus.get_value(energy_in_register)
databus.set_value(energy_in_register, self._get_energy_in)
databus.set_value('register_1', self._get_energy_in_lo
|
wres)
energy_out_register = 'register_14'
self.energy_out = databus.get_value(energy_out_register)
databus.set_value(energy_out_register, self._get_energy_out)
databus.set_value('register_2', self._get_energy_out_lowres)
volt_1_register = 'register_1054'
|
self.voltage[0] = databus.get_value(volt_1_register)
databus.set_value(volt_1_register, self._get_voltage_1)
volt_2_register = 'register_1055'
self.voltage[1] = databus.get_value(volt_2_register)
databus.set_value(volt_2_register, self._get_voltage_2)
volt_3_register = 'register_1056'
self.voltage[2] = databus.get_value(volt_3_register)
databus.set_value(volt_3_register, self._get_voltage_3)
current_1_register = 'register_1076'
self.current[0] = databus.get_value(current_1_register)
databus.set_value(current_1_register, self._get_current_1)
current_2_register = 'register_1077'
self.current[1] = databus.get_value(current_2_register)
databus.set_value(current_2_register, self._get_current_2)
current_3_register = 'register_1078'
self.current[2] = databus.get_value(current_3_register)
databus.set_value(current_3_register, self._get_current_3)
power_1_register = 'register_1080'
self.power[0] = databus.get_value(power_1_register)
databus.set_value(power_1_register, self._get_power_1)
power_2_register = 'register_1081'
self.power[1] = databus.get_value(power_2_register)
databus.set_value(power_2_register, self._get_power_2)
power_3_register = 'register_1082'
self.power[2] = databus.get_value(power_3_register)
databus.set_value(power_3_register, self._get_power_3)
gevent.spawn(self.usage_counter)
def _get_energy_in(self):
return self.energy_in
def _get_energy_out(self):
return self.energy_out
def _get_energy_in_lowres(self):
return self.energy_in / 1000
def _get_energy_out_lowres(self):
return self.energy_out / 1000
def _get_voltage_1(self):
return self.voltage[0]
def _get_voltage_2(self):
return self.voltage[1]
def _get_voltage_3(self):
return self.voltage[2]
def _get_current_1(self):
return self.current[0]
def _get_current_2(self):
return self.current[1]
def _get_current_3(self):
return self.current[2]
def _get_power_1(self):
return self.power[0]
def _get_power_2(self):
return self.power[1]
def _get_power_3(self):
return self.power[2]
|
madoodia/codeLab
|
pyside/signal_slot_tests/test_signal_slot.py
|
Python
|
mit
| 752
| 0.00133
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
In this example, we connect a signal
of a QSlider to a slot
of a QLCDNumber.
"""
import sys
from PySide.QtGui import *
from PySide.QtCore import *
class Example(QWidget):
def __init__(self):
super(Example, self).__init__()
lcd = QLCDNumber()
sld = QSlider(Qt.Horizontal)
vbox = QVBoxLayout()
vbox.add
|
Widget(lcd)
vbox.addWidget(sld)
sld.valueChanged.connect(lcd.display)
self.setLayout(vbox)
self.setGeometry(300, 300, 250, 150)
self.setWindowTitle('Signal & slot')
def main():
app = QApplication(sys.argv)
ex = Example()
ex.show()
sys.exit(app.exec_(
|
))
if __name__ == '__main__':
main()
|
incnone/necrobot
|
necrobot/ladder/__init__.py
|
Python
|
mit
| 678
| 0.004425
|
"""
Package for managing a ranked lad
|
der, which is a special kind of ongoing League.
Package Requirements
--------------------
botbase
match
user
Dependencies
------------
cmd_ladder
botbase/
commandtype
ladder/
ratingsdb
match/
cmd_match
matchinfo
user/
userlib
ladder
util/
server
ladderadminchannel
botbase/
botchannel
cmd_seedgen
ladder/
|
cmd_ladder
race/
cmd_racestats
user/
cmd_user
rating
ratingsdb
database/
dbconnect
ladder/
rating
ratingutil
ratingutil
ladder/
rating
util/
console
"""
|
monetate/sqlalchemy
|
test/orm/inheritance/test_abc_polymorphic.py
|
Python
|
mit
| 3,596
| 0
|
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.fixtures import fixture_session
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
class ABCTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global a, b, c
a = Table(
"a",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("adata", String(30)),
Column("type", String(30)),
)
b = Table(
"b",
metadata,
Column("id", Integer, ForeignKey("a.id"), primary_key=True),
Column("bdata", String(30)),
|
)
c = Table(
"c",
metadata,
Column("id", Integer, ForeignKey("b.id"), primary_key=True)
|
,
Column("cdata", String(30)),
)
@testing.combinations(("union",), ("none",))
def test_abc_poly_roundtrip(self, fetchtype):
class A(fixtures.ComparableEntity):
pass
class B(A):
pass
class C(B):
pass
if fetchtype == "union":
abc = a.outerjoin(b).outerjoin(c)
bc = a.join(b).outerjoin(c)
else:
abc = bc = None
self.mapper_registry.map_imperatively(
A,
a,
with_polymorphic=("*", abc),
polymorphic_on=a.c.type,
polymorphic_identity="a",
)
self.mapper_registry.map_imperatively(
B,
b,
with_polymorphic=("*", bc),
inherits=A,
polymorphic_identity="b",
)
self.mapper_registry.map_imperatively(
C, c, inherits=B, polymorphic_identity="c"
)
a1 = A(adata="a1")
b1 = B(bdata="b1", adata="b1")
b2 = B(bdata="b2", adata="b2")
b3 = B(bdata="b3", adata="b3")
c1 = C(cdata="c1", bdata="c1", adata="c1")
c2 = C(cdata="c2", bdata="c2", adata="c2")
c3 = C(cdata="c2", bdata="c2", adata="c2")
sess = fixture_session()
for x in (a1, b1, b2, b3, c1, c2, c3):
sess.add(x)
sess.flush()
sess.expunge_all()
# for obj in sess.query(A).all():
# print obj
eq_(
[
A(adata="a1"),
B(bdata="b1", adata="b1"),
B(bdata="b2", adata="b2"),
B(bdata="b3", adata="b3"),
C(cdata="c1", bdata="c1", adata="c1"),
C(cdata="c2", bdata="c2", adata="c2"),
C(cdata="c2", bdata="c2", adata="c2"),
],
sess.query(A).order_by(A.id).all(),
)
eq_(
[
B(bdata="b1", adata="b1"),
B(bdata="b2", adata="b2"),
B(bdata="b3", adata="b3"),
C(cdata="c1", bdata="c1", adata="c1"),
C(cdata="c2", bdata="c2", adata="c2"),
C(cdata="c2", bdata="c2", adata="c2"),
],
sess.query(B).order_by(A.id).all(),
)
eq_(
[
C(cdata="c1", bdata="c1", adata="c1"),
C(cdata="c2", bdata="c2", adata="c2"),
C(cdata="c2", bdata="c2", adata="c2"),
],
sess.query(C).order_by(A.id).all(),
)
|
rjschwei/azure-sdk-for-python
|
azure-mgmt-logic/azure/mgmt/logic/operations/integration_account_schemas_operations.py
|
Python
|
mit
| 14,610
| 0.002806
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
import uuid
from .. import models
class IntegrationAccountSchemasOperations(object):
"""IntegrationAccountSchemasOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: The API version. Constant value: "2015-08-01-preview".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_
|
version = "2015-08-01-preview"
self.config = config
def list(
|
self, resource_group_name, integration_account_name, top=None, filter=None, custom_headers=None, raw=False, **operation_config):
"""Gets a list of integration account schemas.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param integration_account_name: The integration account name.
:type integration_account_name: str
:param top: The number of items to be included in the result.
:type top: int
:param filter: The filter to apply on the operation.
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`IntegrationAccountSchemaPaged
<azure.mgmt.logic.models.IntegrationAccountSchemaPaged>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/schemas'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.IntegrationAccountSchemaPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.IntegrationAccountSchemaPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def get(
self, resource_group_name, integration_account_name, schema_name, custom_headers=None, raw=False, **operation_config):
"""Gets an integration account schema.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param integration_account_name: The integration account name.
:type integration_account_name: str
:param schema_name: The integration account schema name.
:type schema_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`IntegrationAccountSchema
<azure.mgmt.logic.models.IntegrationAccountSchema>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/schemas/{schemaName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str'),
'schemaName': self._serialize.url("schema_name", schema_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('IntegrationAccountSchema', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
|
ininex/geofire-python
|
resource/lib/python2.7/site-packages/gcloud/monitoring/test_timeseries.py
|
Python
|
mit
| 6,799
| 0
|
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
METRIC_TYPE = 'compute.googleapis.com/instance/uptime'
METRIC_LABELS = {'instance_name': 'instance-1'}
RESOURCE_TYPE = 'gce_instance'
RESOURCE_LABELS = {
'project_id': 'my-project',
'zone': 'us-east1-a',
'instance_id': '1234567890123456789',
}
METRIC_KIND = 'DELTA'
VALUE_TYPE = 'DOUBLE'
TS0 = '2016-04-06T22:05:00.042Z'
TS1 = '2016-04-06T22:05:01.042Z'
TS2 = '2016-04-06T22:05:02.042Z'
class TestTimeSeries(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.timeseries import TimeSeries
return TimeSeries
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
from gcloud.monitoring.metric import Metric
from gcloud.monitoring.resource import Resource
from gcloud.monitoring.timeseries import Point
VALUE = 60 # seconds
METRIC = Metric(type=METRIC_TYPE, labels=METRIC_LABELS)
RESOURCE = Resource(type=RESOURCE_TYPE, labels=RESOURCE_LABELS)
POINTS = [
Point(start_time=TS0, end_time=TS1, value=VALUE),
Point(start_time=TS1, end_time=TS2, value=VALUE),
]
series = self._makeOne(metric=METRIC,
resource=RESOURCE,
metric_kind=METRIC_KIND,
value_type=VALUE_TYPE,
points=POINTS)
self.assertEqual(series.metric, METRIC)
self.assertEqual(series.resource, RESOURCE)
self.assertEqual(series.metric_kind, METRIC_KIND)
self.assertEqual(series.value_type, VALUE_TYPE)
self.assertEqual(series.points, POINTS)
def test_from_dict(self):
VALUE = 60 # seconds
info = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'points': [
{
'interval': {'startTime': TS0, 'endTime': TS1},
'value': {'doubleValue': VALUE},
},
{
'interval': {'startTime': TS1, 'endTime': TS2},
'value': {'doubleValue'
|
: VALUE},
},
],
}
series = self._getTargetClass()._from_dict(info)
|
self.assertEqual(series.metric.type, METRIC_TYPE)
self.assertEqual(series.metric.labels, METRIC_LABELS)
self.assertEqual(series.resource.type, RESOURCE_TYPE)
self.assertEqual(series.resource.labels, RESOURCE_LABELS)
self.assertEqual(series.metric_kind, METRIC_KIND)
self.assertEqual(series.value_type, VALUE_TYPE)
self.assertEqual(len(series.points), 2)
point1, point2 = series.points
self.assertEqual(point1.start_time, TS0)
self.assertEqual(point1.end_time, TS1)
self.assertEqual(point1.value, VALUE)
self.assertEqual(point2.start_time, TS1)
self.assertEqual(point2.end_time, TS2)
self.assertEqual(point2.value, VALUE)
def test_from_dict_no_points(self):
info = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
}
series = self._getTargetClass()._from_dict(info)
self.assertEqual(series.metric.type, METRIC_TYPE)
self.assertEqual(series.metric.labels, METRIC_LABELS)
self.assertEqual(series.resource.type, RESOURCE_TYPE)
self.assertEqual(series.resource.labels, RESOURCE_LABELS)
self.assertEqual(series.metric_kind, METRIC_KIND)
self.assertEqual(series.value_type, VALUE_TYPE)
self.assertEqual(series.points, [])
def test_labels(self):
info = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
}
series = self._getTargetClass()._from_dict(info)
labels = {'resource_type': RESOURCE_TYPE}
labels.update(RESOURCE_LABELS)
labels.update(METRIC_LABELS)
self.assertIsNone(series._labels)
self.assertEqual(series.labels, labels)
self.assertIsNotNone(series._labels)
self.assertEqual(series.labels, labels)
class TestPoint(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.timeseries import Point
return Point
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
VALUE = 3.14
point = self._makeOne(start_time=TS0, end_time=TS1, value=VALUE)
self.assertEqual(point.start_time, TS0)
self.assertEqual(point.end_time, TS1)
self.assertEqual(point.value, VALUE)
def test_from_dict(self):
VALUE = 3.14
info = {
'interval': {'startTime': TS0, 'endTime': TS1},
'value': {'doubleValue': VALUE},
}
point = self._getTargetClass()._from_dict(info)
self.assertEqual(point.start_time, TS0)
self.assertEqual(point.end_time, TS1)
self.assertEqual(point.value, VALUE)
def test_from_dict_defaults(self):
VALUE = 3.14
info = {
'interval': {'endTime': TS1},
'value': {'doubleValue': VALUE},
}
point = self._getTargetClass()._from_dict(info)
self.assertIsNone(point.start_time)
self.assertEqual(point.end_time, TS1)
self.assertEqual(point.value, VALUE)
def test_from_dict_int64(self):
VALUE = 2 ** 63 - 1
info = {
'interval': {'endTime': TS1},
'value': {'int64Value': str(VALUE)},
}
point = self._getTargetClass()._from_dict(info)
self.assertIsNone(point.start_time)
self.assertEqual(point.end_time, TS1)
self.assertEqual(point.value, VALUE)
|
rooshilp/CMPUT410Lab6
|
virt_env/virt1/lib/python2.7/site-packages/django/core/handlers/wsgi.py
|
Python
|
apache-2.0
| 9,514
| 0.000736
|
from __future__ import unicode_literals
import cgi
import codecs
import logging
import sys
from io import BytesIO
from threading import Lock
import warnings
from django import http
from django.conf import settings
from django.core import signals
from django.core.handlers import base
from django.core.urlresolvers import set_script_prefix
from django.utils import datastructures
from django.utils.deprecation import RemovedInDjango19Warning
from django.utils.encoding import force_str, force_text
from django.utils.functional import cached_property
from django.utils import six
# For backwards compatibility -- lots of code uses this in the wild!
from django.http.response import REASON_PHRASES as STATUS_CODE_TEXT # NOQA
logger = logging.getLogger('django.request')
# encode() and decode() expect the charset to be a native string.
ISO_8859_1, UTF_8 = str('iso-8859-1'), str('utf-8')
class LimitedStream(object):
'''
LimitedStream wraps another stream in order to not allow reading from it
past specified amount of bytes.
'''
def __init__(self, stream, limit, buf_size=64 * 1024 * 1024):
self.stream = stream
self.remaining = limit
self.buffer = b''
self.buf_size = buf_size
def _read_limited(self, size=None):
if size is None or size > self.remaining:
size = self.remaining
if size == 0:
return b''
result = self.stream.read(size)
self.remaining -= len(result)
return result
def read(self, size=None):
if size is None:
result = self.buffer + self._read_limited()
self.buffer = b''
elif size < len(self.buffer):
result = self.buffer[:size]
self.buffer = self.buffer[size:]
else: # size >= len(self.buffer)
result = self.buffer + self._read_limited(size - len(self.buffer))
self.buffer = b''
return result
def readline(self, size=None):
while b'\n' not in self.buffer and \
(size is None or len(self.buffer) < size):
if size:
# since size is not None here, len(self.buffer) < size
chunk = self._read_limited(size - len(self.buffer))
else:
chunk = self._read_limited()
if not chunk:
break
self.buffer += chunk
sio = BytesIO(self.buffer)
if size:
line = sio.readline(size)
else:
line = sio.readline()
self.buffer = sio.read()
return line
class WSGIRequest(http.HttpRequest):
def __init__(self, environ):
script_name = get_script_name(environ)
path_info = get_path_info(environ)
if not path_info:
# Sometimes PATH_INFO exists, but is empty (e.g. accessing
# the SCRIPT_NAME URL without a trailing slash). We really need to
# operate as if they'd requested '/'. Not amazingly nice to force
# the path like this, but should be harmless.
path_info = '/'
self.environ = environ
self.path_info = path_info
self.path = '%s/%s' % (script_name.rstrip('/'), path_info.lstrip('/'))
self.META = environ
self.META['PATH_INFO'] = path_info
self.META['SCRIPT_NAME'] = script_name
self.method = environ['REQUEST_METHOD'].upper()
_, content_params = cgi.parse_header(environ.get('CONTENT_TYPE', ''))
if 'charset' in content_params:
try:
codecs.lookup(content_params['charset'])
except LookupError:
pass
else:
self.encoding = content_params['charset']
self._post_parse_error = False
try:
content_length = int(environ.get('CONTENT_LENGTH'))
except (ValueError, TypeError):
content_length = 0
self._stream = LimitedStream(self.environ['wsgi.input'], content_length)
self._read_started = False
self.resolver_match = None
def _get_scheme(self):
return self.environ.get('wsgi.url_scheme')
def _get_request(self):
warnings.warn('`request.REQUEST` is deprecated, use `request.GET` or '
'`request.POST` instead.', RemovedInDjango19Warning, 2)
if not hasattr(self, '_request'):
self._request = datastructures.MergeDict(self.POST, self.GET)
return self._request
@cached_property
def GET(self):
# The WSGI spec says 'QUERY_STRING' may be absent.
raw_query_string = get_bytes_from_wsgi(self.environ, 'QUERY_STRING', '')
return http.QueryDict(raw_query_string, encoding=self._encoding)
def _get_post(self):
if not hasattr(self, '_post'):
self._load_post_and_files()
return self._post
def _set_post(self, post):
self._post = post
@cached_property
def COOKIES(self):
raw_cookie = get_str_from_wsgi(self.environ, 'HTTP_COOKIE', '')
return http.parse_cookie(raw_cookie)
def _get_files(self):
if not hasattr(self, '_files'):
self._load_post_and_files()
return self._files
POST = property(_get_post, _set_post)
FILES = property(_get_files)
REQUEST = property(_get_request)
class WSGIHandler(base.BaseHandler):
initLock = Lock()
request_class = WSGIRequest
def __call__(self, environ, start_response):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
with self.initLock:
try:
# Check that middleware is still uninitialized.
if self._request_middleware is None:
self.load_middleware()
except:
# Unload whatever middleware we got
self._request_middleware = None
raise
set_script_prefix(get_script_name(environ))
signals.request_started.send(sender=self.__class__)
try:
request = self.request_class(environ)
except UnicodeDecodeError:
logger.warning('Bad Request (UnicodeDecodeError)',
exc_info=sys.exc_info(),
extra={
'status_code': 400,
}
)
response = http.HttpResponseBadRequest()
else:
response = self.get_response(request)
response._handler_class = self.__class__
status = '%s %s' % (response.status_code, response.reason_phrase)
response_headers = [(str(k), str(v)) for k, v in response.items()]
for c in response.cookies.values():
response_headers.append((str('Set-Cookie'), str(c.output(header=''))))
start_response(force_str(status), response_headers)
return response
def get_path_info(environ):
"""
Returns the HTTP request's PATH_INFO as a unicode string.
"""
path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '/')
# It'd be better to implement URI-to-IRI decoding, see #19508.
return path_info.decode(UTF_8)
def get_script_name(environ):
"""
Returns the equivalent of the HTTP request's SCRIPT_NAME environment
variable. If Apache mod_rewrite ha
|
s been used, returns what would have been
the script name prior to any rewriting (so it's the script name as seen
from the client's perspective), unless the FORCE_SCRIPT_NAME setting is
set (to anything).
"""
if settings.FORCE_
|
SCRIPT_NAME is not None:
return force_text(settings.FORCE_SCRIPT_NAME)
# If Apache's mod_rewrite had a whack at the URL, Apache set either
# SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any
# rewrites. Unfortunately not every Web server (lighttpd!) passes this
# information through all the time, so FORCE_SCRIPT_NAME, above, is still
# needed.
script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '')
if not script_url:
script_url = get_bytes_from_wsgi(environ, 'REDIRECT_URL', '')
if sc
|
0verchenko/Addressbook
|
fixture/db.py
|
Python
|
apache-2.0
| 1,388
| 0.005043
|
import mysql.connector
from model.group import Group
from model.contact import Contact
class DbFixture:
def __init__(self, host, dbname, username, password):
self.host = host
self.dbname = dbname
self.username = username
self.password = password
self.connection = mysql.connector.connect(host=host, database=dbname, user=usern
|
ame, password=password)
self.connection.autocommit = True
def get_group_list(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute("select group_id,
|
group_name, group_header, group_footer from group_list")
for row in cursor:
(id, name, header, footer) = row
list.append(Group(id=str(id), name=name, header=header, footer=footer))
finally:
cursor.close()
return list
def get_contact_list(self):
list = []
cursor = self.connection.cursor()
try:
cursor.execute("select id, firstname, lastname from addressbook where deprecated = '0000-00-00 00:00:00'")
for row in cursor:
(id, firstname, lastname) = row
list.append(Contact(id=str(id), firstname=firstname, lastname=lastname))
finally:
cursor.close()
return list
def destroy(self):
self.connection.close()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.