repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
samuelcolvin/django-importexport
|
views.py
|
1
|
5063
|
from django import forms
import Imex.models as m
import Imex.tasks as tasks
import HotDjango.views_base as viewb
from django.core.urlresolvers import reverse
from django.db import models
import settings
from django.shortcuts import redirect
import Imex
import_groups, export_groups = Imex.get_imex_groups()
actions = {'imex_import':import_groups, 'imex_export': export_groups}
class Export(viewb.TemplateBase):
template_name = 'export.html'
menu_active = 'imex_export'
side_menu = False
show_crums = False
def get_context_data(self, **kw):
self._context['title'] = 'Export'
self._context['page_menu'] = self.set_links()
return self._context
def set_links(self):
links= []
for group, label in actions['imex_export']:
links.append({'url': reverse('imex_process', kwargs={'command': 'imex_export', 'group': group}), 'name': label})
return links
class ExcelUploadForm(forms.Form):
xlfile = forms.FileField(
label='Select Excel (xlsx) File to Upload',
help_text='should be in standard format for this system'
)
import_group = forms.ChoiceField(widget=forms.RadioSelect, choices=import_groups, label='Import Type', initial=import_groups[0][0])
class Import(viewb.TemplateBase):
template_name = 'import.html'
menu_active = 'imex_import'
side_menu = False
show_crums = False
def get_context_data(self, **kw):
self._context['title'] = 'Import'
self._context['process_url'] = reverse('imex_process', kwargs={'command': 'imex_import'})
self._context['upload_form'] = ExcelUploadForm()
if 'errors' in self.request.session:
self._context['errors'] = self.request.session['errors']
return self._context
class Process(viewb.TemplateBase):
template_name = 'process.html'
side_menu = False
show_crums = False
_redirect = None
def get(self, request, *args, **kw):
if 'menu_active' in request.session:
self.menu_active = request.session['menu_active']
return super(Process, self).get(request, *args, **kw)
def post(self, request, *args, **kw):
page = self.get(request, *args, **kw)
if self._redirect:
return self._redirect
return page
_act_map = {'imex_export': 'EX', 'imex_import':'IM'}
def get_context_data(self, **kw):
self._context['expected_ms'] = 0
act = self._act_map[kw['command']]
self._context['act'] = act
prev_successful = m.Process.objects.filter(complete=True, successful=True, action=act)
if prev_successful.exists():
# print 'average_of %s' % ','.join([ '%0.3f' % p.time_taken for p in prev_successful])
expected_time = prev_successful.aggregate(expected_time = models.Avg('time_taken'))['expected_time']
self._context['expected_ms'] = '%0.0f' % (expected_time * 1000)
success = self.choose_func(kw)
if not success:
return self._context
self._context['media_url'] = settings.MEDIA_URL
self._context['json_url'] = '%s/%d.json' % (reverse('rest-Imex-Process-list'), self._pid)
return self._context
def choose_func(self, kw):
if 'command' in kw:
command = kw['command']
if command in [func_name for func_name, _ in self._act_map.items()]:
return getattr(self, command)(kw)
else:
self._context['errors'] = ['No function called %s' % command]
def imex_export(self, kw):
group = kw['group']
assert group in [g for g, _ in export_groups], \
'group %s not found in export_groups: %r' % (group, export_groups)
processor = m.Process.objects.create(action='EX', group=group)
self._pid = processor.id
tasks.perform_export(self._pid)
return True
def imex_import(self, kw):
error = None
if self.request.method != 'POST':
error = "No post data"
else:
form = ExcelUploadForm(self.request.POST, self.request.FILES)
import_group = form['import_group'].value()
if not form.is_valid():
error = "Form not valid"
elif not str(self.request.FILES['xlfile']).endswith('.xlsx'):
error = 'File must be xlsx, not xls or any other format.'
elif import_group not in [g for g, _ in import_groups]:
error = 'Group %s is not one of the import groups: %r' % (import_group, import_groups)
if error:
print 'refused'
self.request.session['errors'] = [error]
self._redirect = redirect(reverse('imex_import'))
return
p = m.Process.objects.create(action='IM', imex_file = self.request.FILES['xlfile'], group=import_group)
msg = tasks.perform_import(p.id)
if msg:
self._context['errors'].append(msg)
self._pid = p.id
return True
|
gpl-2.0
| 4,235,616,666,778,978,300
| 37.648855
| 135
| 0.596089
| false
| 3.7146
| false
| false
| false
|
ghostop14/sparrow-wifi
|
sparrowwifiagent.py
|
1
|
114857
|
#!/usr/bin/python3
#
# Copyright 2017 ghostop14
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import os
import sys
import datetime
import json
import re
import argparse
import configparser
# import subprocess
from socket import *
from time import sleep
from threading import Thread, Lock
from dateutil import parser
from http import server as HTTPServer
from socketserver import ThreadingMixIn
from wirelessengine import WirelessEngine
from sparrowgps import GPSEngine, GPSEngineStatic, GPSStatus, SparrowGPS
try:
from sparrowdrone import SparrowDroneMavlink
hasDroneKit = True
except:
hasDroneKit = False
from sparrowrpi import SparrowRPi
from sparrowbluetooth import SparrowBluetooth, BluetoothDevice
from sparrowhackrf import SparrowHackrf
from sparrowcommon import gzipCompress
try:
from manuf import manuf
hasOUILookup = True
except:
hasOUILookup = False
# ------ Global setup ------------
gpsEngine = None
curTime = datetime.datetime.now()
useMavlink = False
vehicle = None
mavlinkGPSThread = None
hasFalcon = False
hasBluetooth = False
hasUbertooth = False
falconWiFiRemoteAgent = None
bluetooth = None
hackrf = SparrowHackrf()
debugHTTP = False
allowCors = False
# Lock list is a dictionary of thread locks for scanning interfaces
lockList = {}
allowedIPs = []
useRPILeds = False
# runningcfg is created in main
runningcfg = None
recordThread = None
announceThread = None
# ------ Global functions ------------
def stringtobool(instr):
if (instr == 'True' or instr == 'true'):
return True
else:
return False
def TwoDigits(instr):
# Fill in a leading zero for single-digit numbers
while len(instr) < 2:
instr = '0' + instr
return instr
def deleteRecordingFiles(filelist):
dirname, filename = os.path.split(os.path.abspath(__file__))
recordingsDir = dirname + '/recordings'
retVal = ''
for curFilename in filelist:
# This split is simply a safety check to prevent path traversal attacks
dirname, filename = os.path.split(curFilename)
if len(filename) > 0:
fullpath = recordingsDir + '/' + filename
try:
os.remove(fullpath)
except:
if len(retVal) == 0:
retVal = filename
else:
retVal += ',' + filename
return retVal
def getRecordingFiles():
dirname, filename = os.path.split(os.path.abspath(__file__))
recordingsDir = dirname + '/recordings'
if not os.path.exists(recordingsDir):
os.makedirs(recordingsDir)
retVal = []
try:
for filename in os.listdir(recordingsDir):
fullPath = recordingsDir + '/' + filename
if not os.path.isdir(fullPath):
curFile = FileSystemFile()
curFile.filename = filename
curFile.size = os.path.getsize(fullPath)
try:
curFile.timestamp = datetime.datetime.fromtimestamp(os.path.getmtime(fullPath))
except:
curFile.timestamp = None
retVal.append(curFile.toJsondict())
except:
pass
return retVal
def restartAgent():
global bluetooth
if mavlinkGPSThread:
mavlinkGPSThread.signalStop = True
print('Waiting for mavlink GPS thread to terminate...')
while (mavlinkGPSThread.threadRunning):
sleep(0.2)
stopRecord()
stopAnnounceThread()
if bluetooth:
bluetooth.stopScanning()
if runningcfg.useRPiLEDs:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
if hasFalcon:
falconWiFiRemoteAgent.cleanup()
if os.path.isfile('/usr/local/bin/python3.5') or os.path.isfile('/usr/bin/python3.5'):
exefile = 'python3.5'
else:
exefile = 'python3'
# params = [exefile, __file__, '--delaystart=2']
newCommand = exefile + ' ' + __file__ + ' --delaystart=2 &'
os.system(newCommand)
# subprocess.Popen(params, stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
# result = subprocess.run(params, stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
# restartResult = result.stdout.decode('UTF-8')
os.kill(os.getpid(), 9)
def updateRunningConfig(newCfg):
global runningcfg
if runningcfg.ipAllowedList != newCfg.ipAllowedList:
buildAllowedIPs(newCfg.ipAllowedList)
# port we ignore since we're already running
# useRPiLEDs will just update
# Announce
if runningcfg.announce != newCfg.announce:
if not newCfg.announce:
stopAnnounceThread()
else:
# start will check if it's already running
startAnnounceThread()
# mavlinkGPS
# Need to restart to update mavlinkGPS
# So just copy forward
newCfg.mavlinkGPS = runningcfg.mavlinkGPS
# recordInterface
if runningcfg.recordInterface != newCfg.recordInterface:
if len(newCfg.recordInterface) == 0:
stopRecord()
else:
# start will check if it's already running
startRecord(newCfg.recordInterface)
# Finally swap out the config
runningcfg = newCfg
def startRecord(interface):
global recordThread
if recordThread:
return
if len(interface) > 0:
interfaces = WirelessEngine.getInterfaces()
if interface in interfaces:
recordThread = AutoAgentScanThread(interface)
recordThread.start()
else:
print('ERROR: Record was requested on ' + interface + ' but that interface was not found.')
else:
recordThread = None
def stopRecord():
global recordThread
if recordThread:
recordThread.signalStop = True
print('Waiting for record thread to terminate...')
i=0
maxCycles = 2 /0.2
while (recordThread.threadRunning) and (i<maxCycles):
sleep(0.2)
i += 1
def stopAnnounceThread():
global announceThread
if announceThread:
announceThread.signalStop = True
print('Waiting for announce thread to terminate...')
sleep(0.2)
# i=0
# maxCycles = 5 # int(2.0 /0.2)
# while (announceThread.threadRunning) and (i<maxCycles):
# sleep(0.2)
# i += 1
announceThread = None
def startAnnounceThread():
global runningcfg
global announceThread
# Start announce if needed
if announceThread:
# It's already running
return
print('Sending agent announcements on port ' + str(runningcfg.port) + '.')
announceThread = AnnounceThread(runningcfg.port)
announceThread.start()
def buildAllowedIPs(allowedIPstr):
global allowedIPs
allowedIPs = []
if len(allowedIPstr) > 0:
ippattern = re.compile('([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})')
if ',' in allowedIPstr:
tmpList = allowedIPstr.split(',')
for curItem in tmpList:
ipStr = curItem.replace(' ', '')
try:
ipValue = ippattern.search(ipStr).group(1)
except:
ipValue = ""
print('ERROR: Unknown IP pattern: ' + ipStr)
exit(3)
if len(ipValue) > 0:
allowedIPs.append(ipValue)
else:
ipStr = allowedIPstr.replace(' ', '')
try:
ipValue = ippattern.search(ipStr).group(1)
except:
ipValue = ""
print('ERROR: Unknown IP pattern: ' + ipStr)
return False
if len(ipValue) > 0:
allowedIPs.append(ipValue)
return True
# ------ OUI lookup functions ------------
def getOUIDB():
ouidb = None
if hasOUILookup:
if os.path.isfile('manuf'):
# We have the file but let's not update it every time we run the app.
# every 90 days should be plenty
last_modified_date = datetime.datetime.fromtimestamp(os.path.getmtime('manuf'))
now = datetime.datetime.now()
age = now - last_modified_date
if age.days > 90:
updateflag = True
else:
updateflag = False
else:
# We don't have the file, let's get it
updateflag = True
try:
ouidb = manuf.MacParser(update=updateflag)
except:
ouidb = None
else:
ouidb = None
return ouidb
# ------------------ File ------------------------------
class FileSystemFile(object):
def __init__(self):
self.filename = ""
self.size = 0
self.timestamp = None
def __str__(self):
retVal = self.filename
return retVal
def toJsondict(self):
jsondict = {}
jsondict['filename'] = self.filename
jsondict['size'] = self.size
jsondict['timestamp'] = str(self.timestamp)
return jsondict
def fromJsondict(self, jsondict):
self.filename = jsondict['filename']
self.size = jsondict['size']
if jsondict['timestamp'] == 'None':
self.timestamp = None
else:
self.timestamp = parser.parse(jsondict['timestamp'])
# ------------------ Config Settings ------------------------------
class AgentConfigSettings(object):
def __init__(self):
self.cancelStart = False
self.port = 8020
self.announce = False
self.useRPiLEDs = False
self.recordInterface=""
self.recordRunning = False
self.mavlinkGPS = ""
self.ipAllowedList = ""
self.allowCors = False
def __str__(self):
retVal = "Cancel Start: " + str(self.cancelStart) + "\n"
retVal += "Port: " + str(self.port) + "\n"
retVal += "Announce Agent: " + str(self.announce) + "\n"
retVal += "Use RPi LEDs: " + str(self.useRPiLEDs) + "\n"
retVal += "Record Interface: " + self.recordInterface + "\n"
retVal += "Record Running (for running configs): " + str(self.recordRunning) + "\n"
retVal += "Mavlink GPS: " + self.mavlinkGPS + "\n"
retVal += "IP Allowed List: " + self.ipAllowedList + "\n"
retVal += "Allow CORS: " + str(self.allowCors) + "\n"
return retVal
def __eq__(self, obj):
# This is equivance.... ==
if not isinstance(obj, AgentConfigSettings):
return False
if self.cancelStart != obj.cancelStart:
return False
if self.port != obj.port:
return False
if self.announce != obj.announce:
return False
if self.useRPiLEDs != obj.useRPiLEDs:
return False
if self.recordInterface != obj.recordInterface:
return False
if self.mavlinkGPS != obj.mavlinkGPS:
return False
if self.ipAllowedList != obj.ipAllowedList:
return False
if self.allowCors != obj.allowCors:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def toJsondict(self):
dictjson = {}
dictjson['cancelstart'] = str(self.cancelStart)
dictjson['port'] = self.port
dictjson['announce'] = str(self.announce)
dictjson['recordrunning'] = str(self.recordRunning)
dictjson['userpileds'] = str(self.useRPiLEDs)
dictjson['recordinterface'] = self.recordInterface
dictjson['mavlinkgps'] = self.mavlinkGPS
dictjson['allowedips'] = self.ipAllowedList
dictjson['allowcors'] = str(self.allowCors)
return dictjson
def toJson(self):
dictjson = self.toJsondict()
return json.dumps(dictjson)
def fromJsondict(self, dictjson):
try:
self.cancelStart = stringtobool(dictjson['cancelstart'])
self.port = int(dictjson['port'])
self.announce = stringtobool(dictjson['announce'])
self.recordRunning = stringtobool(dictjson['recordrunning'])
self.useRPiLEDs = stringtobool(dictjson['userpileds'])
self.recordInterface = dictjson['recordinterface']
self.mavlinkGPS = dictjson['mavlinkgps']
self.ipAllowedList = dictjson['allowedips']
# if 'allowcors' in dictjson.keys():
self.allowCors = stringtobool(dictjson['allowcors'])
# else:
# print("allowCors not set in dictjson!")
except Exception as e:
print(e)
def fromJson(self, jsonstr):
dictjson = json.loads(jsonstr)
self.fromJsondict(dictjson)
def toConfigFile(self, cfgFile):
config = configparser.ConfigParser()
config['agent'] = self.toJsondict()
try:
with open(cfgFile, 'w') as configfile:
config.write(configfile)
return True
except:
return False
def fromConfigFile(self, cfgFile):
if os.path.isfile(cfgFile):
cfgParser = configparser.ConfigParser()
try:
cfgParser.read(cfgFile)
section="agent"
options = cfgParser.options(section)
for option in options:
try:
if option =='cancelstart':
self.cancelStart = stringtobool(cfgParser.get(section, option))
elif option == 'sendannounce':
self.announce = stringtobool(cfgParser.get(section, option))
elif option == 'userpileds':
self.useRPiLEDs = stringtobool(cfgParser.get(section, option))
elif option == 'port':
self.port=int(cfgParser.get(section, option))
elif option == 'recordinterface':
self.recordInterface=cfgParser.get(section, option)
elif option == 'mavlinkgps':
self.mavlinkGPS=cfgParser.get(section, option)
elif option == 'allowedips':
self.ipAllowedList = cfgParser.get(section, option)
elif option == 'allowcors':
self.allowCors = stringtobool(cfgParser.get(section, option))
except:
print("exception on %s!" % option)
settings[option] = None
except:
print("ERROR: Unable to read config file: ", cfgFile)
return False
else:
return False
return True
# ------------------ Agent auto scan thread ------------------------------
class AutoAgentScanThread(Thread):
def __init__(self, interface):
global lockList
global hasBluetooth
super(AutoAgentScanThread, self).__init__()
self.interface = interface
self.signalStop = False
self.scanDelay = 0.5 # seconds
self.threadRunning = False
self.discoveredNetworks = {}
self.discoveredBluetoothDevices = {}
self.daemon = True
try:
self.hostname = os.uname()[1]
except:
self.hostname = 'unknown'
if len(self.hostname) == 0:
self.hostname = 'unknown'
self.ouiLookupEngine = getOUIDB()
if interface not in lockList.keys():
lockList[interface] = Lock()
if not os.path.exists('./recordings'):
os.makedirs('./recordings')
now = datetime.datetime.now()
self.filename = './recordings/' + self.hostname + '_wifi_' + str(now.year) + "-" + TwoDigits(str(now.month)) + "-" + TwoDigits(str(now.day))
self.filename += "_" + TwoDigits(str(now.hour)) + "_" + TwoDigits(str(now.minute)) + "_" + TwoDigits(str(now.second)) + ".csv"
self.btfilename = './recordings/' + self.hostname + '_bt_' + str(now.year) + "-" + TwoDigits(str(now.month)) + "-" + TwoDigits(str(now.day))
self.btfilename += "_" + TwoDigits(str(now.hour)) + "_" + TwoDigits(str(now.minute)) + "_" + TwoDigits(str(now.second)) + ".csv"
if hasBluetooth:
print('Capturing on ' + interface + ' and writing wifi to ' + self.filename)
print('and writing bluetooth to ' + self.btfilename)
else:
print('Capturing on ' + interface + ' and writing wifi to ' + self.filename)
def run(self):
global lockList
global hasBluetooth
self.threadRunning = True
if self.interface not in lockList.keys():
lockList[self.interface] = Lock()
curLock = lockList[self.interface]
if hasBluetooth:
# Start normal discovery
bluetooth.startDiscovery(False)
lastState = -1
while (not self.signalStop):
# Scan all / normal mode
if (curLock):
curLock.acquire()
retCode, errString, wirelessNetworks = WirelessEngine.scanForNetworks(self.interface)
if (curLock):
curLock.release()
if (retCode == 0):
if useMavlink:
gpsCoord = GPSStatus()
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
elif gpsEngine.gpsValid():
gpsCoord = gpsEngine.lastCoord
if useRPILeds and (lastState !=SparrowRPi.LIGHT_STATE_ON):
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
lastState = SparrowRPi.LIGHT_STATE_ON
else:
gpsCoord = GPSStatus()
if useRPILeds and (lastState !=SparrowRPi.LIGHT_STATE_HEARTBEAT) :
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
lastState = SparrowRPi.LIGHT_STATE_HEARTBEAT
# self.statusBar().showMessage('Scan complete. Found ' + str(len(wirelessNetworks)) + ' networks')
if wirelessNetworks and (len(wirelessNetworks) > 0) and (not self.signalStop):
for netKey in wirelessNetworks.keys():
curNet = wirelessNetworks[netKey]
curNet.gps.copy(gpsCoord)
curNet.strongestgps.copy(gpsCoord)
curKey = curNet.getKey()
if curKey not in self.discoveredNetworks.keys():
self.discoveredNetworks[curKey] = curNet
else:
# Network exists, need to update it.
pastNet = self.discoveredNetworks[curKey]
# Need to save strongest gps and first seen. Everything else can be updated.
# Carry forward firstSeen
curNet.firstSeen = pastNet.firstSeen # This is one field to carry forward
# Check strongest signal
if pastNet.strongestsignal > curNet.signal:
curNet.strongestsignal = pastNet.strongestsignal
curNet.strongestgps.latitude = pastNet.strongestgps.latitude
curNet.strongestgps.longitude = pastNet.strongestgps.longitude
curNet.strongestgps.altitude = pastNet.strongestgps.altitude
curNet.strongestgps.speed = pastNet.strongestgps.speed
curNet.strongestgps.isValid = pastNet.strongestgps.isValid
self.discoveredNetworks[curKey] = curNet
if not self.signalStop:
self.exportNetworks()
# Now if we have bluetooth running export these:
if hasBluetooth and bluetooth.discoveryRunning():
bluetooth.deviceLock.acquire()
# Update GPS
now = datetime.datetime.now()
for curKey in bluetooth.devices.keys():
curDevice = bluetooth.devices[curKey]
elapsedTime = now - curDevice.lastSeen
# This is a little bit of a hack for the BlueHydra side since it can take a while to see devices or have
# them show up in the db. For LE discovery scans this will always be pretty quick.
if elapsedTime.total_seconds() < 120:
curDevice.gps.copy(gpsCoord)
if curDevice.rssi >= curDevice.strongestRssi:
curDevice.strongestRssi = curDevice.rssi
curDevice.strongestgps.copy(gpsCoord)
# export
self.exportBluetoothDevices(bluetooth.devices)
bluetooth.deviceLock.release()
sleep(self.scanDelay)
if hasBluetooth:
# Start normal discovery
bluetooth.stopDiscovery()
self.threadRunning = False
def ouiLookup(self, macAddr):
clientVendor = ""
if hasOUILookup:
try:
if self.ouiLookupEngine:
clientVendor = self.ouiLookupEngine.get_manuf(macAddr)
except:
clientVendor = ""
return clientVendor
def exportBluetoothDevices(self, devices):
try:
btOutputFile = open(self.btfilename, 'w')
except:
print('ERROR: Unable to write to bluetooth file ' + self.filename)
return
btOutputFile.write('uuid,Address,Name,Company,Manufacturer,Type,RSSI,TX Power,Strongest RSSI,Est Range (m),Last Seen,GPS Valid,Latitude,Longitude,Altitude,Speed,Strongest GPS Valid,Strongest Latitude,Strongest Longitude,Strongest Altitude,Strongest Speed\n')
for curKey in devices.keys():
curData = devices[curKey]
btType = ""
if curData.btType == BluetoothDevice.BT_LE:
btType = "BTLE"
else:
btType = "Classic"
if curData.txPowerValid:
txPower = str(curData.txPower)
else:
txPower = 'Unknown'
btOutputFile.write(curData.uuid + ',' + curData.macAddress + ',"' + curData.name + '","' + curData.company + '","' + curData.manufacturer)
btOutputFile.write('","' + btType + '",' + str(curData.rssi) + ',' + str(curData.strongestRssi) + ',' + txPower + ',' + str(curData.iBeaconRange) + ',' +
curData.lastSeen.strftime("%m/%d/%Y %H:%M:%S") + ',' +
str(curData.gps.isValid) + ',' + str(curData.gps.latitude) + ',' + str(curData.gps.longitude) + ',' + str(curData.gps.altitude) + ',' + str(curData.gps.speed) + ',' +
str(curData.strongestgps.isValid) + ',' + str(curData.strongestgps.latitude) + ',' + str(curData.strongestgps.longitude) + ',' + str(curData.strongestgps.altitude) + ',' + str(curData.strongestgps.speed) + '\n')
btOutputFile.close()
def exportNetworks(self):
try:
self.outputFile = open(self.filename, 'w')
except:
print('ERROR: Unable to write to wifi file ' + self.filename)
return
self.outputFile.write('macAddr,vendor,SSID,Security,Privacy,Channel,Frequency,Signal Strength,Strongest Signal Strength,Bandwidth,Last Seen,First Seen,GPS Valid,Latitude,Longitude,Altitude,Speed,Strongest GPS Valid,Strongest Latitude,Strongest Longitude,Strongest Altitude,Strongest Speed\n')
for netKey in self.discoveredNetworks.keys():
curData = self.discoveredNetworks[netKey]
vendor = self.ouiLookup(curData.macAddr)
if vendor is None:
vendor = ''
self.outputFile.write(curData.macAddr + ',' + vendor + ',"' + curData.ssid + '",' + curData.security + ',' + curData.privacy)
self.outputFile.write(',' + curData.getChannelString() + ',' + str(curData.frequency) + ',' + str(curData.signal) + ',' + str(curData.strongestsignal) + ',' + str(curData.bandwidth) + ',' +
curData.lastSeen.strftime("%m/%d/%Y %H:%M:%S") + ',' + curData.firstSeen.strftime("%m/%d/%Y %H:%M:%S") + ',' +
str(curData.gps.isValid) + ',' + str(curData.gps.latitude) + ',' + str(curData.gps.longitude) + ',' + str(curData.gps.altitude) + ',' + str(curData.gps.speed) + ',' +
str(curData.strongestgps.isValid) + ',' + str(curData.strongestgps.latitude) + ',' + str(curData.strongestgps.longitude) + ',' + str(curData.strongestgps.altitude) + ',' + str(curData.strongestgps.speed) + '\n')
self.outputFile.close()
# ------------------ Announce thread ------------------------------
class AnnounceThread(Thread):
def __init__(self, port):
super(AnnounceThread, self).__init__()
self.signalStop = False
self.sendDelay = 4.0 # seconds
self.threadRunning = False
self.daemon = True
self.broadcastSocket = socket(AF_INET, SOCK_DGRAM)
self.broadcastSocket.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
self.broadcastSocket.setsockopt(SOL_SOCKET, SO_BROADCAST, 1)
self.broadcastPort = port
self.broadcastAddr=('255.255.255.255', self.broadcastPort)
def sendAnnounce(self):
try:
self.broadcastSocket.sendto(bytes('sparrowwifiagent', "utf-8"),self.broadcastAddr)
except:
pass
def run(self):
self.threadRunning = True
while (not self.signalStop):
self.sendAnnounce()
# 4 second delay, but check every second for termination signal
i=0
while i<4 and not self.signalStop:
sleep(1.0)
i += 1
self.threadRunning = False
# ------------------ Local network scan thread ------------------------------
class MavlinkGPSThread(Thread):
def __init__(self, vehicle):
super(MavlinkGPSThread, self).__init__()
self.signalStop = False
self.scanDelay = 0.5 # seconds
self.threadRunning = False
self.vehicle = vehicle
self.synchronized = False
self.latitude = 0.0
self.longitude = 0.0
self.altitude = 0.0
self.daemon = True
def run(self):
self.threadRunning = True
lastState = -1
while (not self.signalStop):
self.synchronized, self.latitude, self.longitude, self.altitude = self.vehicle.getGlobalGPS()
if self.synchronized:
# Solid on synchronized
if useRPILeds and (lastState != SparrowRPi.LIGHT_STATE_ON):
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
lastState = SparrowRPi.LIGHT_STATE_ON
else:
# heartbeat on unsynchronized
if useRPILeds and (lastState != SparrowRPi.LIGHT_STATE_HEARTBEAT):
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
lastState = SparrowRPi.LIGHT_STATE_HEARTBEAT
sleep(self.scanDelay)
self.threadRunning = False
class SparrowWiFiAgent(object):
# See https://docs.python.org/3/library/http.server.html
# For HTTP Server info
def run(self, port):
global useRPILeds
global hackrf
global bluetooth
global falconWiFiRemoteAgent
server_address = ('', port)
try: # httpd = HTTPServer.HTTPServer(server_address, SparrowWiFiAgentRequestHandler)
httpd = MultithreadHTTPServer(server_address, SparrowWiFiAgentRequestHandler)
except OSError as e:
curTime = datetime.datetime.now()
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Unable to bind to port " + str(port) + ". " + e.strerror)
if runningcfg.useRPiLEDs:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
exit(1)
curTime = datetime.datetime.now()
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Starting Sparrow-wifi agent on port " + str(port))
if useRPILeds:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
if useRPILeds:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
if hasFalcon:
falconWiFiRemoteAgent.cleanup()
if bluetooth:
bluetooth.stopScanning()
if hackrf.scanRunning():
hackrf.stopScanning()
curTime = datetime.datetime.now()
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Sparrow-wifi agent stopped.")
# --------------- Multithreaded HTTP Server ------------------------------------
class MultithreadHTTPServer(ThreadingMixIn, HTTPServer.HTTPServer):
pass
# --------------- HTTP Request Handler --------------------
# Sample handler: https://wiki.python.org/moin/BaseHttpServer
class SparrowWiFiAgentRequestHandler(HTTPServer.BaseHTTPRequestHandler):
def log_message(self, format, *args):
global debugHTTP
if not debugHTTP:
return
else:
HTTPServer.BaseHTTPRequestHandler(format, *args)
def do_HEAD(s):
global allowCors
s.send_response(200)
s.send_header("Content-type", "text/html")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
def do_POST(s):
global runningcfg
global falconWiFiRemoteAgent
if len(s.client_address) == 0:
# This should have the connecting client IP. If this isn't at least 1, something is wrong
return
if len(allowedIPs) > 0:
if s.client_address[0] not in allowedIPs:
try:
s.send_response(403)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Connections not authorized from your IP address</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
return
if (not s.isValidPostURL()):
try:
s.send_response(404)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Page not found.</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
return
# Get the size of the posted data
try:
length = int(s.headers['Content-Length'])
except:
length = 0
if length <= 0:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Agent received a zero-length request.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
# get the POSTed payload
jsonstr_data = s.rfile.read(length).decode('utf-8')
# Try to convert it to JSON
try:
jsondata = json.loads(jsonstr_data)
except:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'bad posted data.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
if s.path == '/system/config':
# ------------- Update startup config ------------------
try:
scfg = jsondata['startup']
startupCfg = AgentConfigSettings()
startupCfg.fromJsondict(scfg)
dirname, filename = os.path.split(os.path.abspath(__file__))
cfgFile = dirname + '/sparrowwifiagent.cfg'
retVal = startupCfg.toConfigFile(cfgFile)
if not retVal:
# HTML 400 = Bad request
s.send_response(400)
responsedict = {}
responsedict['errcode'] = 2
responsedict['errmsg'] = 'An error occurred saving the startup config.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
responsedict = {}
responsedict['errcode'] = 3
responsedict['errmsg'] = 'Bad startup config.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
# ------------- Check if we should reboot ------------------
if 'rebootagent' in jsondata:
rebootFlag = jsondata['rebootagent']
if rebootFlag:
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = 'Restarting agent.'
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
restartAgent()
# If we're restarting, we'll never get to running config.
# ------------- Update Running config ------------------
try:
rcfg = jsondata['running']
tmpcfg = AgentConfigSettings()
tmpcfg.fromJsondict(rcfg)
updateRunningConfig(tmpcfg)
try:
s.send_response(200)
s.send_header("Content-Length", 0)
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
except:
pass
except Exception as e:
print(e)
responsedict = {}
responsedict['errcode'] = 4
responsedict['errmsg'] = 'Bad running config.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
# ------------- Done updating config ------------------
elif s.path == '/system/deleterecordings':
try:
filelist = jsondata['files']
problemfiles=deleteRecordingFiles(filelist)
responsedict = {}
if len(problemfiles) == 0:
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = problemfiles
jsonstr = json.dumps(responsedict)
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
except:
pass
elif s.path == '/falcon/stopdeauth':
if not hasFalcon:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Should get a FalconDeauth object
# This is in jsondata
try:
apMacAddr = jsondata['apmacaddr']
clientMacAddr = jsondata['stationmacaddr']
channel = jsondata['channel']
curInterface = jsondata['interface']
falconWiFiRemoteAgent.stopDeauth(apMacAddr, clientMacAddr, curInterface, channel)
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/falcon/deauth':
if not hasFalcon:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Should get a FalconDeauth object
# This is in jsondata
try:
apMacAddr = jsondata['apmacaddr']
clientMacAddr = jsondata['stationmacaddr']
channel = jsondata['channel']
curInterface = jsondata['interface']
continuous = jsondata['continuous']
if len(clientMacAddr) == 0:
newDeauth = falconWiFiRemoteAgent.deauthAccessPoint(apMacAddr, curInterface, channel, continuous)
else:
newDeauth = falconWiFiRemoteAgent.deauthAccessPointAndClient(apMacAddr, clientMacAddr, curInterface, channel, continuous)
if not continuous:
# There's nothing to check. Just return
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
if newDeauth:
# Deauth was started
try:
s.send_response(200)
#s.send_header("Content-type", "text/html")
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Something went wrong with the start
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = "An error occurred starting the deauth process."
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/falcon/startcrack':
if not hasFalcon:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Extract necessary info for cracking
try:
crackType = jsondata['cracktype'] # This will be wep or wpapsk
curInterface = jsondata['interface']
channel = jsondata['channel']
ssid = jsondata['ssid']
apMacAddr=jsondata['apmacaddr']
hasClient = jsondata['hasclient']
# For now you can only run 1 crack globally due to tmp flie naming.
# At some point I'll scale it out
if crackType == 'wep':
if curInterface in falconWiFiRemoteAgent.WEPCrackList:
wepCrack = falconWiFiRemoteAgent.WEPCrackList[curInterface]
# Stop one if it was already running
wepCrack.stopCrack()
else:
wepCrack = WEPCrack()
falconWiFiRemoteAgent.WEPCrackList[curInterface] = wepCrack
wepCrack.cleanupTempFiles()
retVal, errMsg = wepCrack.startCrack(curInterface, channel, ssid, apMacAddr, hasClient)
else:
if curInterface in falconWiFiRemoteAgent.WPAPSKCrackList:
wpaPSKCrack = falconWiFiRemoteAgent.WPAPSKCrackList[curInterface]
# Stop one if it was already running
wpaPSKCrack.stopCrack()
else:
wpaPSKCrack = WPAPSKCrack()
falconWiFiRemoteAgent.WPAPSKCrackList[curInterface] = wpaPSKCrack
wpaPSKCrack.cleanupTempFiles()
retVal, errMsg = wpaPSKCrack.startCrack(curInterface, channel, ssid, apMacAddr, hasClient)
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
responsedict = {}
# For start, retVal is True/False
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
try:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = 'Bad request.'
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
def isValidPostURL(s):
allowedfullurls = ['/system/config',
'/falcon/startcrack',
'/falcon/deauth',
'/falcon/stopdeauth',
'/system/deleterecordings']
allowedstarturls=[]
if s.path in allowedfullurls:
return True
else:
for curURL in allowedstarturls:
if s.path.startswith(curURL):
return True
return False
def isValidGetURL(s):
# Full urls
allowedfullurls = ['/wireless/interfaces',
'/wireless/moninterfaces',
'/falcon/getscanresults',
'/falcon/getalldeauths',
'/system/getrecordings',
'/bluetooth/present',
'/bluetooth/scanstart',
'/bluetooth/scanstop',
'/bluetooth/scanstatus',
'/bluetooth/running',
'/bluetooth/beaconstart',
'/bluetooth/beaconstop',
'/bluetooth/discoverystartp',
'/bluetooth/discoverystarta',
'/bluetooth/discoverystop',
'/bluetooth/discoveryclear',
'/bluetooth/discoverystatus',
'/spectrum/scanstart24',
'/spectrum/scanstart5',
'/spectrum/scanstop',
'/spectrum/scanstatus',
'/spectrum/hackrfstatus',
'/gps/status']
# partials that have more in the URL
allowedstarturls=['/wireless/networks/',
'/falcon/startmonmode/',
'/falcon/stopmonmode/',
'/falcon/scanrunning/',
'/falcon/startscan/',
'/falcon/stopscan/',
'/falcon/stopalldeauths',
'/falcon/crackstatuswpapsk',
'/falcon/crackstatuswep',
'/falcon/stopcrack',
'/system/config',
'/system/startrecord',
'/system/stoprecord',
'/system/getrecording']
if s.path in allowedfullurls:
return True
else:
for curURL in allowedstarturls:
if s.path.startswith(curURL):
return True
return False
def sendFile(s, passedfilename):
# Directory traversal safety check
dirname, runfilename = os.path.split(os.path.abspath(__file__))
tmpdirname, filename = os.path.split(passedfilename)
recordingsDir = dirname + '/recordings'
fullPath = recordingsDir + '/' + filename
if not os.path.isfile(fullPath):
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'File not found.'
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
return
try:
f = open(fullPath, 'rb')
except:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 2
responsedict['errmsg'] = 'Unable to open file.'
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
return
fileExtension = filename.split(".")[-1]
if fileExtension in ['txt', 'csv', 'json', 'xml']:
contentType = 'text/plain'
elif fileExtension == 'html':
contentType = 'text/html'
else:
contentType = 'application/octet-stream'
s.send_response(200)
#s.send_header("Content-type", "text/html")
s.send_header("Content-type", contentType)
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
try:
s.wfile.write(f.read())
except:
pass
f.close()
return
def do_GET(s):
global gpsEngine
global useMavlink
global mavlinkGPSThread
global lockList
global allowedIPs
global runningcfg
global falconWiFiRemoteAgent
global hasBluetooth
global hasUbertooth
global bluetooth
global allowCors
# For RPi LED's, using it during each get request wasn't completely working. Short transactions like
# status and interface list were so quick the light would get "confused" and stay off. So
# the LED is only used for long calls like scan
if len(s.client_address) == 0:
# This should have the connecting client IP. If this isn't at least 1, something is wrong
return
try:
# If the pipe gets broken mid-stream it'll throw an exception
if len(allowedIPs) > 0:
if s.client_address[0] not in allowedIPs:
try:
s.send_response(403)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Connections not authorized from your IP address</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
return
if not s.isValidGetURL():
try:
s.send_response(404)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Bad Request</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
return
"""Respond to a GET request."""
if (not s.path.startswith('/system/getrecording/') and (not s.path == ('/bluetooth/scanstatus')) and
(not s.path == ('/spectrum/scanstatus'))):
# In getrecording we may adjust the content type header based on file extension
# Spectrum we'll gzip
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
except:
pass
# NOTE: In python 3, string is a bit different. Examples write strings directly for Python2,
# In python3 you have to convert it to UTF-8 bytes
# s.wfile.write("<html><head><title>Sparrow-wifi agent</title></head><body>".encode("utf-8"))
if s.path == '/wireless/interfaces':
wirelessInterfaces = WirelessEngine.getInterfaces()
jsondict={}
jsondict['interfaces']=wirelessInterfaces
jsonstr = json.dumps(jsondict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/wireless/networks/' in s.path:
# THIS IS THE NORMAL SCAN
inputstr = s.path.replace('/wireless/networks/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
if '?' in inputstr:
splitlist = inputstr.split('?')
curInterface = splitlist[0]
else:
curInterface = inputstr
p = re.compile('.*Frequencies=(.*)', re.IGNORECASE)
try:
channelStr = p.search(inputstr).group(1)
except:
channelStr = ""
huntChannelList = []
if ',' in channelStr:
tmpList = channelStr.split(',')
else:
tmpList = []
if len(tmpList) > 0:
for curItem in tmpList:
try:
if len(curItem) > 0:
huntChannelList.append(int(curItem))
# Get results for the specified interface
# Need to iterate through the channels and aggregate the results
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
sleep(0.1)
if curInterface not in lockList.keys():
lockList[curInterface] = Lock()
curLock = lockList[curInterface]
if (curLock):
curLock.acquire()
if useMavlink:
gpsCoord = GPSStatus()
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
retCode, errString, jsonstr=WirelessEngine.getNetworksAsJson(fieldValue, gpsCoord, huntChannelList)
elif gpsEngine.gpsValid():
retCode, errString, jsonstr=WirelessEngine.getNetworksAsJson(fieldValue, gpsEngine.lastCoord, huntChannelList)
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
retCode, errString, jsonstr=WirelessEngine.getNetworksAsJson(fieldValue, None, huntChannelList)
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
if (curLock):
curLock.release()
s.wfile.write(jsonstr.encode("UTF-8"))
elif s.path == '/gps/status':
jsondict={}
if not useMavlink:
jsondict['gpsinstalled'] = str(GPSEngine.GPSDInstalled())
jsondict['gpsrunning'] = str(GPSEngine.GPSDRunning())
jsondict['gpssynch'] = str(gpsEngine.gpsValid())
if gpsEngine.gpsValid():
gpsPos = {}
gpsPos['latitude'] = gpsEngine.lastCoord.latitude
gpsPos['longitude'] = gpsEngine.lastCoord.longitude
gpsPos['altitude'] = gpsEngine.lastCoord.altitude
gpsPos['speed'] = gpsEngine.lastCoord.speed
jsondict['gpspos'] = gpsPos
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
else:
jsondict['gpsinstalled'] = 'True'
jsondict['gpsrunning'] = 'True'
jsondict['gpssynch'] = str(mavlinkGPSThread.synchronized)
gpsPos = {}
gpsPos['latitude'] = mavlinkGPSThread.latitude
gpsPos['longitude'] = mavlinkGPSThread.longitude
gpsPos['altitude'] = mavlinkGPSThread.altitude
gpsPos['speed'] = mavlinkGPSThread.vehicle.getAirSpeed()
jsondict['gpspos'] = gpsPos
jsonstr = json.dumps(jsondict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/wireless/moninterfaces':
wirelessInterfaces = WirelessEngine.getMonitoringModeInterfaces()
jsondict={}
jsondict['interfaces']=wirelessInterfaces
jsonstr = json.dumps(jsondict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/system/getrecordings':
filelist = getRecordingFiles()
responsedict = {}
responsedict['files'] = filelist
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/system/getrecording/'):
filename = s.path.replace('/system/getrecording/', '')
s.sendFile(filename)
elif s.path == '/bluetooth/present':
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
responsedict['hasbluetooth'] = hasBluetooth
if hasBluetooth:
responsedict['scanrunning'] = bluetooth.scanRunnning()
else:
responsedict['scanrunning'] = False
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/bluetooth/beacon'):
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/bluetooth/beacon', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='start':
if bluetooth.discoveryRunning():
bluetooth.stopDiscovery()
retVal = bluetooth.startBeacon()
if not retVal:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unable to start beacon.'
elif function == 'stop':
bluetooth.stopBeacon()
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/bluetooth/scan'):
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/bluetooth/scan', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='start':
bluetooth.startScanning()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif function == 'stop':
bluetooth.stopScanning()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif function == 'status':
channelData = bluetooth.spectrumToChannels()
responsedict['channeldata'] = channelData
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
s.send_header("Content-Encoding", "gzip")
s.end_headers()
except:
pass
jsonstr = json.dumps(responsedict)
gzipBytes = gzipCompress(jsonstr)
# s.wfile.write(jsonstr.encode("UTF-8"))
try:
s.wfile.write(gzipBytes)
except:
pass
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/bluetooth/discovery'):
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/bluetooth/discovery', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='startp':
# Promiscuous with ubertooth
if hasUbertooth:
bluetooth.startDiscovery(True)
else:
responsedict['errcode'] = 2
responsedict['errmsg'] = 'Ubertooth not supported on this agent'
elif function == 'starta':
# Normal with Bluetooth
bluetooth.startDiscovery(False)
elif function == 'stop':
bluetooth.stopDiscovery()
elif function == 'clear':
# Device list accumulates in the bluetooth class over time
# If you want a fresh list every time, you need to clear the old list.
bluetooth.clearDeviceList()
# Add in successful response
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif function == 'status':
# have to get the GPS:
gpsCoord = SparrowGPS()
if useMavlink:
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
elif gpsEngine.gpsValid():
gpsCoord.copy(gpsEngine.lastCoord)
# errcode, devices = bluetooth.getDiscoveredDevices()
bluetooth.updateDeviceList()
bluetooth.deviceLock.acquire()
devdict = []
now = datetime.datetime.now()
for curKey in bluetooth.devices.keys():
curDevice = bluetooth.devices[curKey]
elapsedTime = now - curDevice.lastSeen
# This is a little bit of a hack for the BlueHydra side since it can take a while to see devices or have
# them show up in the db. For LE discovery scans this will always be pretty quick.
if elapsedTime.total_seconds() < 120:
curDevice.gps.copy(gpsCoord)
if curDevice.rssi >= curDevice.strongestRssi:
curDevice.strongestRssi = curDevice.rssi
curDevice.strongestgps.copy(gpsCoord)
entryDict = curDevice.toJsondict()
devdict.append(entryDict)
bluetooth.deviceLock.release()
responsedict['devices'] = devdict
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/bluetooth/running':
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
responsedict['hasbluetooth'] = hasBluetooth
responsedict['hasubertooth'] = hasUbertooth
responsedict['spectrumscanrunning'] = False
responsedict['discoveryscanrunning'] = False
responsedict['beaconrunning'] = False
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
responsedict['hasbluetooth'] = hasBluetooth
responsedict['hasubertooth'] = hasUbertooth
responsedict['spectrumscanrunning'] = bluetooth.scanRunning()
responsedict['discoveryscanrunning'] = bluetooth.discoveryRunning()
responsedict['beaconrunning'] = bluetooth.beaconRunning()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/spectrum/hackrfstatus':
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
responsedict['hashackrf'] = hackrf.hasHackrf
responsedict['scan24running'] = hackrf.scanRunning24()
responsedict['scan5running'] = hackrf.scanRunning5()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/spectrum/scan'):
if not hackrf.hasHackrf:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'HackRF is not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/spectrum/scan', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='start24':
hackrf.startScanning24()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
elif function == 'start5':
hackrf.startScanning5()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
elif function == 'stop':
hackrf.stopScanning()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
elif function == 'status':
if hackrf.scanRunning24():
channelData = hackrf.spectrum24ToChannels()
responsedict['scanrunning'] = hackrf.scanRunning24()
elif hackrf.scanRunning5():
channelData = hackrf.spectrum5ToChannels()
responsedict['scanrunning'] = hackrf.scanRunning24()
else:
channelData = {} # Shouldn't be here but just in case.
responsedict['scanrunning'] = False
responsedict['channeldata'] = channelData
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
s.send_header("Content-Encoding", "gzip")
s.end_headers()
jsonstr = json.dumps(responsedict)
gzipBytes = gzipCompress(jsonstr)
# s.wfile.write(jsonstr.encode("UTF-8"))
s.wfile.write(gzipBytes)
except:
pass
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/system/config':
cfgSettings = AgentConfigSettings()
cfgSettings.fromConfigFile('sparrowwifiagent.cfg')
responsedict = {}
responsedict['startup'] = cfgSettings.toJsondict()
if recordThread:
runningcfg.recordRunning = True
runningcfg.recordInterface = recordThread.interface
responsedict['running'] = runningcfg.toJsondict()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/system/startrecord'):
recordinterface = s.path.replace('/system/startrecord/', '')
# Check that the specified interface is valid:
interfaces = WirelessEngine.getInterfaces()
if recordinterface in interfaces:
startRecord(recordinterface)
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
jsonstr = json.dumps(responsedict)
else:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'The requested interface was not found on the system.'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/system/stoprecord':
stopRecord()
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/startmonmode' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/startmonmode/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
retVal, errMsg = falconWiFiRemoteAgent.startMonitoringInterface(fieldValue)
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopmonmode' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopmonmode/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
retVal, errMsg = falconWiFiRemoteAgent.stopMonitoringInterface(fieldValue)
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/scanrunning' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/scanrunning/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
scanrunning = falconWiFiRemoteAgent.isScanRunning(fieldValue)
if scanrunning:
retVal = 0
errMsg = "scan for " + fieldValue + " is running"
else:
retVal = 1
errMsg = "scan for " + fieldValue + " is not running"
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/startscan' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/startscan/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
scanProc = falconWiFiRemoteAgent.startCapture(fieldValue)
if scanProc is not None:
retVal = 0
errMsg = ""
else:
retVal = -1
errMsg = "Unable to start scanning process."
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopscan' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopscan/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
retVal = falconWiFiRemoteAgent.stopCapture(fieldValue)
if retVal == 0:
errMsg = ""
else:
errMsg = "Unable to stop scanning process."
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopcrack' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopcrack/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
curInterface = p.search(inputstr).group(1)
except:
curInterface = ""
if len(curInterface) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
try:
if curInterface in falconWiFiRemoteAgent.WEPCrackList:
falconWiFiRemoteAgent.WEPCrackList[curInterface].stopCrack()
falconWiFiRemoteAgent.WEPCrackList[curInterface].cleanupTempFiles()
del falconWiFiRemoteAgent.WEPCrackList[curInterface]
if curInterface in falconWiFiRemoteAgent.WPAPSKCrackList:
falconWiFiRemoteAgent.WPAPSKCrackList[curInterface].stopCrack()
falconWiFiRemoteAgent.WPAPSKCrackList[curInterface].cleanupTempFiles()
del falconWiFiRemoteAgent.WPAPSKCrackList[curInterface]
except:
pass
retVal = 0
errMsg = ""
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/crackstatus' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
if 'crackstatuswep' in s.path:
type='wep'
else:
type = 'wpapsk'
inputstr = s.path.replace('/falcon/crackstatus'+type+'/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
curInterface = p.search(inputstr).group(1)
except:
curInterface = ""
if len(curInterface) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + curInterface
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
responsedict = {}
retVal = -1
errMsg = "Unable to find running crack."
try:
if type == 'wep':
if curInterface in falconWiFiRemoteAgent.WEPCrackList:
wepCrack = falconWiFiRemoteAgent.WEPCrackList[curInterface]
retVal = 0
errMsg = ""
responsedict['isrunning'] = wepCrack.isRunning()
responsedict['ivcount'] = wepCrack.getIVCount()
responsedict['ssid'] = wepCrack.SSID
responsedict['crackedpasswords'] = wepCrack.getCrackedPasswords()
else:
if curInterface in falconWiFiRemoteAgent.WPAPSKCrackList:
wpaPSKCrack = falconWiFiRemoteAgent.WPAPSKCrackList[curInterface]
retVal = 0
errMsg = ""
responsedict['isrunning'] = wpaPSKCrack.isRunning()
hasHandshake = wpaPSKCrack.hasHandshake()
responsedict['hashandshake'] = hasHandshake
if hasHandshake:
# For WPAPSK, lets copy the capture file to our recording directory for recovery
dirname, filename = os.path.split(os.path.abspath(__file__))
fullpath, filename=wpaPSKCrack.copyCaptureFile(dirname + '/recordings')
responsedict['capturefile'] = filename
else:
responsedict['capturefile'] = ""
except:
pass
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/falcon/getscanresults':
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
if useMavlink:
gpsCoord = GPSStatus()
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
retCode, errString, jsonstr=falconWiFiRemoteAgent.getNetworksAsJson(gpsCoord)
elif gpsEngine.gpsValid():
retCode, errString, jsonstr=falconWiFiRemoteAgent.getNetworksAsJson(gpsEngine.lastCoord)
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
retCode, errString, jsonstr=falconWiFiRemoteAgent.getNetworksAsJson(None)
if useRPILeds:
# This just signals that the GPS isn't synced
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopalldeauths' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopalldeauths/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
return
falconWiFiRemoteAgent.stopAllDeauths(fieldValue)
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/getalldeauths' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
responsedict = falconWiFiRemoteAgent.getAllDeauthsAsJsonDict()
# Add in successful response
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Catch-all. Should never be here
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
# ----------------- Bluetooth check -----------------------------
def checkForBluetooth():
global hasBluetooth
global hasUbertooth
global bluetooth
numBtAdapters = len(SparrowBluetooth.getBluetoothInterfaces())
if numBtAdapters > 0:
hasBluetooth = True
if SparrowBluetooth.getNumUbertoothDevices() > 0:
#SparrowBluetooth.ubertoothStopSpecan()
errcode, errmsg = SparrowBluetooth.hasUbertoothTools()
# errcode, errmsg = SparrowBluetooth.ubertoothOnline()
if errcode == 0:
hasUbertooth = True
bluetooth = SparrowBluetooth()
if hasBluetooth:
print("Found bluetooth hardware. Bluetooth capabilities enabled.")
else:
print("Bluetooth hardware not found. Bluetooth capabilities disabled.")
if hasUbertooth:
print("Found ubertooth hardware and software. Ubertooth capabilities enabled.")
else:
print("Ubertooth hardware and/or software not found. Ubertooth capabilities disabled.")
# ----------------- Main -----------------------------
if __name__ == '__main__':
argparser = argparse.ArgumentParser(description='Sparrow-wifi agent')
argparser.add_argument('--port', help='Port for HTTP server to listen on. Default is 8020.', default=8020, required=False)
argparser.add_argument('--allowedips', help="IP addresses allowed to connect to this agent. Default is any. This can be a comma-separated list for multiple IP addresses", default='', required=False)
argparser.add_argument('--staticcoord', help="Use user-defined lat,long,altitude(m) rather than GPS. Ex: 40.1,-75.3,150", default='', required=False)
argparser.add_argument('--mavlinkgps', help="Use Mavlink (drone) for GPS. Options are: '3dr' for a Solo, 'sitl' for local simulator, or full connection string ('udp/tcp:<ip>:<port>' such as: 'udp:10.1.1.10:14550')", default='', required=False)
argparser.add_argument('--sendannounce', help="Send a UDP broadcast packet on the specified port to announce presence", action='store_true', default=False, required=False)
argparser.add_argument('--userpileds', help="Use RPi LEDs to signal state. Red=GPS [off=None,blinking=Unsynchronized,solid=synchronized], Green=Agent Running [On=Running, blinking=servicing HTTP request]", action='store_true', default=False, required=False)
argparser.add_argument('--recordinterface', help="Automatically start recording locally with the given wireless interface (headless mode) in a recordings directory", default='', required=False)
argparser.add_argument('--ignorecfg', help="Don't load any config files (useful for overriding and/or testing)", action='store_true', default=False, required=False)
argparser.add_argument('--cfgfile', help="Use the specified config file rather than the default sparrowwifiagent.cfg file", default='', required=False)
argparser.add_argument('--allowcors', help="Allow Cross Domain Resource Sharing", action='store_true', default=False, required=False)
argparser.add_argument('--delaystart', help="Wait <delaystart> seconds before initializing", default=0, required=False)
argparser.add_argument('--debughttp', help="Print each URL request", action='store_true', default=False, required=False)
args = argparser.parse_args()
if len(args.staticcoord) > 0:
coord_array = args.staticcoord.split(",")
if len(coord_array) < 3:
print("ERROR: Provided static coordinates are not in the format latitude,longitude,altitude.")
exit(1)
usingStaticGPS = True
gpsEngine = GPSEngineStatic(float(coord_array[0]), float(coord_array[1]), float(coord_array[2]))
else:
usingStaticGPS = False
gpsEngine = GPSEngine()
debugHTTP = args.debughttp
if os.geteuid() != 0:
print("ERROR: You need to have root privileges to run this script. Please try again, this time using 'sudo'. Exiting.\n")
exit(2)
# Code to add paths
dirname, filename = os.path.split(os.path.abspath(__file__))
if dirname not in sys.path:
sys.path.insert(0, dirname)
# Check for Falcon offensive plugin
pluginsdir = dirname+'/plugins'
if os.path.exists(pluginsdir):
if pluginsdir not in sys.path:
sys.path.insert(0,pluginsdir)
if os.path.isfile(pluginsdir + '/falconwifi.py'):
from falconwifi import FalconWiFiRemoteAgent, WPAPSKCrack, WEPCrack
hasFalcon = True
falconWiFiRemoteAgent = FalconWiFiRemoteAgent()
if not falconWiFiRemoteAgent.toolsInstalled():
print("ERROR: aircrack suite of tools does not appear to be installed. Please install it.")
exit(4)
checkForBluetooth()
# See if we have a config file:
dirname, filename = os.path.split(os.path.abspath(__file__))
settings = {}
runningcfg=AgentConfigSettings()
if len(args.cfgfile) == 0:
cfgFile = dirname + '/sparrowwifiagent.cfg'
else:
cfgFile = args.cfgfile
# Since it's user-specified, let's see if it exists.
if not os.path.isfile(cfgFile):
print("ERROR: Unable to find the specified config file.")
exit(3)
if os.path.isfile(cfgFile) and (not args.ignorecfg):
cfgParser = configparser.ConfigParser()
try:
cfgParser.read(cfgFile)
section="agent"
options = cfgParser.options(section)
for option in options:
try:
if (option == 'sendannounce' or option == 'userpileds' or
option == 'cancelstart' or option == 'allowcors'):
settings[option] = stringtobool(cfgParser.get(section, option))
else:
settings[option] = cfgParser.get(section, option)
except:
print("exception on %s!" % option)
settings[option] = None
except:
print("ERROR: Unable to read config file: ", cfgFile)
exit(1)
# Set up parameters
if 'cancelstart' in settings.keys():
if settings['cancelstart']:
exit(0)
delayStart = int(args.delaystart)
if delayStart > 0:
sleep(delayStart)
runningcfg.cancelStart = False
if 'port' not in settings.keys():
port = args.port
else:
port = int(settings['port'])
runningcfg.port = port
if 'sendannounce' not in settings.keys():
sendannounce = args.sendannounce
else:
sendannounce = settings['sendannounce']
runningcfg.announce = sendannounce
if 'userpileds' not in settings.keys():
useRPILeds = args.userpileds
else:
useRPILeds = settings['userpileds']
runningcfg.useRPiLEDs = useRPILeds
if 'allowedips' not in settings.keys():
allowedIPstr = args.allowedips
else:
allowedIPstr = settings['allowedips']
runningcfg.ipAllowedList = allowedIPstr
if 'mavlinkgps' not in settings.keys():
mavlinksetting = args.mavlinkgps
else:
mavlinksetting = settings['mavlinkgps']
runningcfg.mavlinkGPS = mavlinksetting
if 'recordinterface' not in settings.keys():
recordinterface = args.recordinterface
else:
recordinterface = settings['recordinterface']
runningcfg.recordInterface = recordinterface
if 'allowcors' not in settings.keys():
allowCors = args.allowcors
else:
allowCors = settings['allowcors']
runningcfg.allowCors = allowCors
print("Allow CORS: " + str(runningcfg.allowCors))
# Now start logic
if runningcfg.useRPiLEDs:
# One extra check that the LED's are really present
runningcfg.useRPiLEDs = SparrowRPi.hasLights()
if not runningcfg.useRPiLEDs:
# we changed state. Print warning
print('WARNING: RPi LEDs were requested but were not found on this platform.')
# Now check again:
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
buildAllowedIPs(allowedIPstr)
if len(runningcfg.mavlinkGPS) > 0 and hasDroneKit:
vehicle = SparrowDroneMavlink()
print('Connecting to ' + runningcfg.mavlinkGPS)
connected = False
synchronized = False
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_OFF)
# If we're in drone gps mode, wait for the drone to be up and gps synchronized before starting.
while (not connected) or (not synchronized):
if not connected:
if runningcfg.mavlinkGPS == '3dr':
retVal = vehicle.connectToSolo()
elif (runningcfg.mavlinkGPS == 'sitl'):
retVal = vehicle.connectToSimulator()
else:
retVal = vehicle.connect(runningcfg.mavlinkGPS)
connected = retVal
if connected:
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
print('Mavlink connected.')
print('Current GPS Info:')
# get synchronized flag and position
synchronized, latitude, longitude, altitude = vehicle.getGlobalGPS()
print('Synchronized: ' + str(synchronized))
print('Latitude: ' + str(latitude))
print('Longitude: ' + str(longitude))
print('Altitude (m): ' + str(altitude))
print('Heading: ' + str(vehicle.getHeading()))
if synchronized:
useMavlink = True
mavlinkGPSThread = MavlinkGPSThread(vehicle)
mavlinkGPSThread.start()
print('Mavlink GPS synchronized. Continuing.')
else:
print('Mavlink GPS not synchronized yet. Waiting...')
sleep(2)
else:
print("ERROR: Unable to connect to " + mavlinksetting + '. Retrying...')
sleep(2)
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
# No mavlink specified. Check the local GPS.
if GPSEngine.GPSDRunning():
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
gpsEngine.start()
if usingStaticGPS:
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Using static lat/long/altitude(m): " + args.staticcoord)
else:
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Local gpsd Found. Providing GPS coordinates when synchronized.")
if useRPILeds:
sleep(1)
if gpsEngine.gpsValid():
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] No local gpsd running. No GPS data will be provided.")
if runningcfg.announce:
startAnnounceThread()
if len(runningcfg.recordInterface) > 0:
startRecord(runningcfg.recordInterface)
# -------------- Run HTTP Server / Main Loop--------------
server = SparrowWiFiAgent()
server.run(runningcfg.port)
# -------------- This is the shutdown process --------------
if mavlinkGPSThread:
mavlinkGPSThread.signalStop = True
print('Waiting for mavlink GPS thread to terminate...')
while (mavlinkGPSThread.threadRunning):
sleep(0.2)
stopRecord()
if hasDroneKit and useMavlink and vehicle:
vehicle.close()
stopAnnounceThread()
if runningcfg.useRPiLEDs:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
#for curKey in lockList.keys():
# curLock = lockList[curKey]
# try:
# curLock.release()
# except:
# pass
# os._exit(0)
exit(0)
|
gpl-3.0
| 6,750,538,930,191,065,000
| 39.385724
| 300
| 0.488999
| false
| 4.707833
| false
| false
| false
|
roxxup/PartialTuring
|
WebcamSound.py
|
1
|
3038
|
import threading
from threading import Thread
import cv2
import sys
#import wikipedia
#from chatterbot import ChatBot
import shlex, subprocess
import speech_recognition as sr
import pyvona
from googlesearch import GoogleSearch
import xml.etree.ElementTree as ET
import requests
cascPath = sys.argv[1]
def wikileaks(string):
string=wikipedia.summary(string,sentences=1)
chatvoice(string)
def speak():
# obtain audio from the microphone
r = sr.Recognizer()
with sr.Microphone() as source:
r.adjust_for_ambient_noise(source) # listen for 1 second to calibrate the energy threshold for ambient noise levels
print("Say something!")
audio = r.listen(source)
# recognize speech using Google Speech Recognition
try:
# for testing purposes, we're just using the default API key
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")`
# instead of `r.recognize_google(audio)`
string = r.recognize_google(audio)
print "you said "+string
return string
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
def Google1(string):
gs = GoogleSearch(string)
for hit in gs.top_results():
#send(hit[u'content'])
chatvoice(hit[u'content'])
break
def chatvoice(string):
v = pyvona.create_voice('username','password')
#v.region('en-IN')
#print v.list_voices()
v.speak(string)
#v.speak(a)
def intelbot(string):
payload = {'input':string,'botid':'9fa364f2fe345a10'}
r = requests.get("http://fiddle.pandorabots.com/pandora/talk-xml", params=payload)
for child in ET.fromstring(r.text):
if child.tag == "that":
chatvoice(child.text)
def Camera():
faceCascade = cv2.CascadeClassifier(cascPath)
video_capture = cv2.VideoCapture(1)
while True:
# Capture frame-by-frame
ret, frame = video_capture.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30, 30),
#flags=cv2.cv.CV_HAAR_SCALE_IMAGE
flags = 0
)
# Draw a rectangle around the faces
for (x, y, w, h) in faces:
cv2.rectangle(gray, (x, y), (x+w, y+h), (0, 255, 0), 2)
# Display the resulting frame
cv2.imshow('Video', gray)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything is done, release the capture
video_capture.release()
cv2.destroyAllWindows()
def Sound():
while True:
takeString = speak()
intelbot(takeString)
if __name__ == '__main__':
Thread(target = Camera).start()
Thread(target = Sound).start()
|
gpl-3.0
| 5,042,716,747,227,126,000
| 26.369369
| 123
| 0.628045
| false
| 3.544924
| false
| false
| false
|
yohanyee/simple-neural-net
|
classes/example_pipelines.py
|
1
|
1478
|
import numpy as np
from data import *
from construct import *
from train import *
from hippocampi_to_patches import *
class DigitsPipeline(object):
def __init__(self):
self.D = Data()
self.D.load_digits_data()
self.D.reshape([16,16],[1])
self.N = FeedForwardNetwork()
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.input_shape, neuron_activation='identity'))
self.N.auto_add_layer_fullyconnected(NeuronLayer(10, neuron_activation='logistic', neuron_bias=True))
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.output_shape, neuron_activation='logistic', neuron_bias=True))
self.Trainer = BackpropagationTrainer(self.D, self.N)
class DigitsConvolutionPipeline(object):
def __init__(self):
self.D = Data()
self.D.load_digits_data()
self.D.reshape([16,16],[1])
self.N = FeedForwardNetwork()
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.input_shape, neuron_activation='identity'))
self.N.auto_add_layer_convolution(NeuronLayer([4,8,8], neuron_activation='logistic', neuron_bias=True))
self.N.auto_add_layer_fullyconnected(NeuronLayer(10, neuron_activation='logistic', neuron_bias=True))
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.output_shape, neuron_activation='logistic', neuron_bias=True))
self.Trainer = BackpropagationTrainer(self.D, self.N)
|
mit
| -7,236,642,412,433,993,000
| 42.470588
| 126
| 0.673207
| false
| 3.485849
| false
| false
| false
|
anurag03/integration_tests
|
cfme/tests/services/test_provision_stack.py
|
1
|
9381
|
import fauxfactory
import pytest
from widgetastic_patternfly import DropdownItemDisabled
from cfme import test_requirements
from cfme.cloud.provider import CloudProvider
from cfme.cloud.provider.azure import AzureProvider
from cfme.cloud.provider.ec2 import EC2Provider
from cfme.cloud.provider.openstack import OpenStackProvider
from cfme.services.myservice import MyService
from cfme.services.service_catalogs import ServiceCatalogs
from cfme.utils.blockers import BZ
from cfme.utils.conf import credentials
from cfme.utils.datafile import load_data_file
from cfme.utils.path import orchestration_path
pytestmark = [
pytest.mark.meta(server_roles='+automate'),
pytest.mark.ignore_stream('upstream'),
test_requirements.stack,
pytest.mark.tier(2),
pytest.mark.usefixtures("setup_provider_modscope"),
pytest.mark.provider([CloudProvider],
required_fields=[['provisioning', 'stack_provisioning']],
scope='module'),
]
@pytest.fixture
def stack_data(appliance, provider, provisioning):
random_base = fauxfactory.gen_alphanumeric()
stackname = 'test{}'.format(random_base)
vm_name = 'test-{}'.format(random_base)
stack_timeout = '20'
if provider.one_of(AzureProvider):
try:
template = provider.data.templates.small_template
vm_user = credentials[template.creds].username
vm_password = credentials[template.creds].password
except AttributeError:
pytest.skip('Could not find small_template or credentials for {}'.format(provider.name))
stack_data = {
'stack_name': stackname,
'resource_group': provisioning.get('resource_group'),
'deploy_mode': provisioning.get('mode'),
'location': provisioning.get('region_api'),
'vmname': vm_name,
'vmuser': vm_user,
'vmpassword': vm_password,
'vmsize': provisioning.get('vm_size'),
'cloudnetwork': provisioning.get('cloud_network').split()[0],
'cloudsubnet': provisioning.get('cloud_subnet').split()[0]
}
elif provider.one_of(OpenStackProvider):
stack_prov = provisioning['stack_provisioning']
stack_data = {
'stack_name': stackname,
'key': stack_prov['key_name'],
'flavor': stack_prov['instance_type'],
}
else:
stack_prov = provisioning['stack_provisioning']
if appliance.version < '5.9':
stack_data = {
'stack_name': stackname,
'stack_timeout': stack_timeout,
'virtualMachineName': vm_name,
'KeyName': stack_prov['key_name'],
'InstanceType': stack_prov['instance_type'],
'SSHLocation': provisioning['ssh_location']
}
else:
stack_data = {
'stack_name': stackname,
'stack_timeout': stack_timeout,
'param_virtualMachineName': vm_name,
'param_KeyName': stack_prov['key_name']
}
return stack_data
@pytest.fixture
def dialog_name():
return 'dialog_{}'.format(fauxfactory.gen_alphanumeric())
@pytest.fixture
def template(appliance, provider, provisioning, dialog_name, stack):
template_group = provisioning['stack_provisioning']['template_type']
template_type = provisioning['stack_provisioning']['template_type_dd']
template_name = fauxfactory.gen_alphanumeric()
file = provisioning['stack_provisioning']['data_file']
data_file = load_data_file(str(orchestration_path.join(file)))
content = data_file.read().replace('CFMETemplateName', template_name)
collection = appliance.collections.orchestration_templates
template = collection.create(template_group=template_group, template_name=template_name,
template_type=template_type, description="my template",
content=content)
template.create_service_dialog_from_template(dialog_name)
yield template
if stack.exists:
stack.retire_stack()
if template.exists:
template.delete()
@pytest.fixture
def catalog(appliance):
cat_name = "cat_{}".format(fauxfactory.gen_alphanumeric())
catalog = appliance.collections.catalogs.create(name=cat_name, description="my catalog")
yield catalog
if catalog.exists:
catalog.delete()
@pytest.fixture
def catalog_item(appliance, dialog, catalog, template, provider, dialog_name):
item_name = fauxfactory.gen_alphanumeric()
catalog_item = appliance.collections.catalog_items.create(
appliance.collections.catalog_items.ORCHESTRATION,
name=item_name,
description="my catalog",
display_in=True,
catalog=catalog,
dialog=dialog_name,
orch_template=template,
provider_name=provider.name,
)
yield catalog_item
if catalog_item.exists:
catalog_item.delete()
@pytest.fixture
def service_catalogs(appliance, catalog_item, stack_data):
return ServiceCatalogs(appliance, catalog_item.catalog, catalog_item.name, stack_data)
@pytest.fixture
def stack(appliance, provider, stack_data):
return appliance.collections.cloud_stacks.instantiate(stack_data['stack_name'],
provider=provider)
@pytest.fixture
def order_stack(appliance, request, service_catalogs, stack):
"""Fixture which prepares provisioned stack"""
provision_request = service_catalogs.order()
provision_request.wait_for_request(method='ui')
request.addfinalizer(lambda: _cleanup(appliance, provision_request))
assert provision_request.is_succeeded()
stack.wait_for_exists()
return provision_request, stack
def _cleanup(appliance=None, provision_request=None, service=None):
if not service:
last_message = provision_request.get_request_row_from_ui()['Last Message'].text
service_name = last_message.split()[2].strip('[]')
myservice = MyService(appliance, service_name)
else:
myservice = service
if myservice.exists:
myservice.delete()
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_provision_stack(order_stack):
"""Tests stack provisioning
Metadata:
test_flag: provision
"""
provision_request, stack = order_stack
assert provision_request.is_succeeded()
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_reconfigure_service(appliance, service_catalogs, request):
"""Tests service reconfiguring
Metadata:
test_flag: provision
"""
provision_request = service_catalogs.order()
provision_request.wait_for_request(method='ui')
last_message = provision_request.get_request_row_from_ui()['Last Message'].text
service_name = last_message.split()[2].strip('[]')
myservice = MyService(appliance, service_name)
request.addfinalizer(lambda: _cleanup(service=myservice))
assert provision_request.is_succeeded()
myservice.reconfigure_service()
@pytest.mark.uncollectif(lambda provider: provider.one_of(EC2Provider),
reason='EC2 locks template between Stack order and template removal')
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_remove_non_read_only_orch_template(appliance, provider, template, service_catalogs,
request):
"""
Steps:
1. Order Service which uses Orchestration template
2. Try to remove this Orchestration template
Metadata:
test_flag: provision
"""
provision_request = service_catalogs.order()
request.addfinalizer(lambda: _cleanup(appliance, provision_request))
template.delete()
assert (provision_request.rest.message == 'Service_Template_Provisioning failed' or
provision_request.status == 'Error')
assert not template.exists
@pytest.mark.uncollectif(lambda provider: not provider.one_of(EC2Provider),
reason='Only EC2 locks orchestration template')
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_remove_read_only_orch_template_neg(appliance, provider, template, service_catalogs,
request):
"""
For RHOS/Azure the original template will remain stand-alone while the stack links
to a new template read from the RHOS/Azure provider. Hence we can delete used orchestration
template for RHOS/Azure.
Steps:
1. Order Service which uses Orchestration template
2. Try to remove this Orchestration template
Metadata:
test_flag: provision
"""
provision_request = service_catalogs.order()
request.addfinalizer(lambda: _cleanup(appliance, provision_request))
provision_request.wait_for_request(method='ui')
with pytest.raises(DropdownItemDisabled):
template.delete()
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_retire_stack(order_stack):
"""Tests stack retirement.
Steps:
1. Retire Orchestration stack
2. Verify it doesn't exist in UI
Metadata:
test_flag: provision
"""
_, stack = order_stack
stack.retire_stack()
assert not stack.exists, "Stack still visible in UI"
|
gpl-2.0
| -7,078,827,523,268,368,000
| 35.933071
| 100
| 0.663895
| false
| 4.057526
| true
| false
| false
|
Telestream/telestream-cloud-python-sdk
|
telestream_cloud_flip_sdk/telestream_cloud_flip/models/extra_file.py
|
1
|
4957
|
# coding: utf-8
"""
Flip API
Flip # noqa: E501
The version of the OpenAPI document: 3.1
Contact: cloudsupport@telestream.net
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from telestream_cloud_flip.configuration import Configuration
class ExtraFile(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'tag': 'str',
'file_size': 'int',
'file_name': 'str'
}
attribute_map = {
'tag': 'tag',
'file_size': 'file_size',
'file_name': 'file_name'
}
def __init__(self, tag=None, file_size=None, file_name=None, local_vars_configuration=None): # noqa: E501
"""ExtraFile - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._tag = None
self._file_size = None
self._file_name = None
self.discriminator = None
self.tag = tag
self.file_size = file_size
self.file_name = file_name
@property
def tag(self):
"""Gets the tag of this ExtraFile. # noqa: E501
:return: The tag of this ExtraFile. # noqa: E501
:rtype: str
"""
return self._tag
@tag.setter
def tag(self, tag):
"""Sets the tag of this ExtraFile.
:param tag: The tag of this ExtraFile. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and tag is None: # noqa: E501
raise ValueError("Invalid value for `tag`, must not be `None`") # noqa: E501
self._tag = tag
@property
def file_size(self):
"""Gets the file_size of this ExtraFile. # noqa: E501
:return: The file_size of this ExtraFile. # noqa: E501
:rtype: int
"""
return self._file_size
@file_size.setter
def file_size(self, file_size):
"""Sets the file_size of this ExtraFile.
:param file_size: The file_size of this ExtraFile. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and file_size is None: # noqa: E501
raise ValueError("Invalid value for `file_size`, must not be `None`") # noqa: E501
self._file_size = file_size
@property
def file_name(self):
"""Gets the file_name of this ExtraFile. # noqa: E501
:return: The file_name of this ExtraFile. # noqa: E501
:rtype: str
"""
return self._file_name
@file_name.setter
def file_name(self, file_name):
"""Sets the file_name of this ExtraFile.
:param file_name: The file_name of this ExtraFile. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and file_name is None: # noqa: E501
raise ValueError("Invalid value for `file_name`, must not be `None`") # noqa: E501
self._file_name = file_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ExtraFile):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ExtraFile):
return True
return self.to_dict() != other.to_dict()
|
mit
| 4,721,568,431,818,250,000
| 27.164773
| 110
| 0.556788
| false
| 3.934127
| true
| false
| false
|
pombreda/https-gitorious.org-appstream-software-center
|
softwarecenter/ui/gtk3/widgets/buttons.py
|
1
|
21538
|
# Copyright (C) 2011 Canonical
#
# Authors:
# Matthew McGowan
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import cairo
from gi.repository import Gtk, Gdk, Pango, GObject, GdkPixbuf
from gettext import gettext as _
from softwarecenter.backend import get_install_backend
from softwarecenter.db.application import AppDetails
from softwarecenter.enums import Icons
from softwarecenter.ui.gtk3.em import StockEms, em
from softwarecenter.ui.gtk3.drawing import darken
from softwarecenter.ui.gtk3.widgets.stars import Star, StarSize
_HAND = Gdk.Cursor.new(Gdk.CursorType.HAND2)
def _update_icon(image, icon, icon_size):
if isinstance(icon, GdkPixbuf.Pixbuf):
image = image.set_from_pixbuf(icon)
elif isinstance(icon, Gtk.Image):
image = image.set_from_pixbuf(icon.get_pixbuf())
elif isinstance(icon, str):
image = image.set_from_icon_name(icon, icon_size)
else:
msg = "Acceptable icon values: None, GdkPixbuf, GtkImage or str"
raise TypeError(msg)
return image
class _Tile(object):
MIN_WIDTH = em(7)
def __init__(self):
self.set_focus_on_click(False)
self.set_relief(Gtk.ReliefStyle.NONE)
self.box = Gtk.Box.new(Gtk.Orientation.VERTICAL, 0)
self.box.set_size_request(self.MIN_WIDTH, -1)
self.add(self.box)
def build_default(self, label, icon, icon_size):
if icon is not None:
if isinstance(icon, Gtk.Image):
self.image = icon
else:
self.image = Gtk.Image()
_update_icon(self.image, icon, icon_size)
self.box.pack_start(self.image, True, True, 0)
self.label = Gtk.Label.new(label)
self.box.pack_start(self.label, True, True, 0)
class TileButton(Gtk.Button, _Tile):
def __init__(self):
Gtk.Button.__init__(self)
_Tile.__init__(self)
class TileToggleButton(Gtk.RadioButton, _Tile):
def __init__(self):
Gtk.RadioButton.__init__(self)
self.set_mode(False)
_Tile.__init__(self)
class LabelTile(TileButton):
MIN_WIDTH = -1
def __init__(self, label, icon, icon_size=Gtk.IconSize.MENU):
TileButton.__init__(self)
self.build_default(label, icon, icon_size)
self.label.set_line_wrap(True)
context = self.label.get_style_context()
context.add_class("label-tile")
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
def do_draw(self, cr):
cr.save()
A = self.get_allocation()
if self.has_focus():
Gtk.render_focus(self.get_style_context(),
cr,
3, 3,
A.width - 6, A.height - 6)
for child in self:
self.propagate_draw(child, cr)
cr.restore()
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
class CategoryTile(TileButton):
def __init__(self, label, icon, icon_size=Gtk.IconSize.DIALOG):
TileButton.__init__(self)
self.set_size_request(em(8), -1)
self.build_default(label, icon, icon_size)
self.label.set_justify(Gtk.Justification.CENTER)
self.label.set_alignment(0.5, 0.0)
self.label.set_line_wrap(True)
self.box.set_border_width(StockEms.SMALL)
context = self.label.get_style_context()
context.add_class("category-tile")
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
def do_draw(self, cr):
cr.save()
A = self.get_allocation()
if self.has_focus():
Gtk.render_focus(self.get_style_context(),
cr,
3, 3,
A.width - 6, A.height - 6)
for child in self:
self.propagate_draw(child, cr)
cr.restore()
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
_global_featured_tile_width = em(11)
class FeaturedTile(TileButton):
INSTALLED_OVERLAY_SIZE = 22
_MARKUP = '<b><small>%s</small></b>'
def __init__(self, helper, doc, icon_size=48):
TileButton.__init__(self)
self._pressed = False
label = helper.get_appname(doc)
icon = helper.get_icon_at_size(doc, icon_size, icon_size)
stats = helper.get_review_stats(doc)
helper.update_availability(doc)
helper.connect("needs-refresh", self._on_needs_refresh, doc, icon_size)
self.is_installed = helper.is_installed(doc)
self._overlay = helper.icons.load_icon(Icons.INSTALLED_OVERLAY,
self.INSTALLED_OVERLAY_SIZE,
0) # flags
self.box.set_orientation(Gtk.Orientation.HORIZONTAL)
self.box.set_spacing(StockEms.SMALL)
self.content_left = Gtk.Box.new(Gtk.Orientation.VERTICAL,
StockEms.MEDIUM)
self.content_right = Gtk.Box.new(Gtk.Orientation.VERTICAL, 1)
self.box.pack_start(self.content_left, False, False, 0)
self.box.pack_start(self.content_right, False, False, 0)
self.image = Gtk.Image()
_update_icon(self.image, icon, icon_size)
self.content_left.pack_start(self.image, False, False, 0)
self.title = Gtk.Label.new(self._MARKUP %
GObject.markup_escape_text(label))
self.title.set_alignment(0.0, 0.5)
self.title.set_use_markup(True)
self.title.set_ellipsize(Pango.EllipsizeMode.END)
self.content_right.pack_start(self.title, False, False, 0)
categories = helper.get_categories(doc)
if categories is not None:
self.category = Gtk.Label.new('<span font_desc="%i">%s</span>' %
(em(0.6), GObject.markup_escape_text(categories)))
self.category.set_use_markup(True)
self.category.set_alignment(0.0, 0.5)
self.category.set_ellipsize(Pango.EllipsizeMode.END)
self.content_right.pack_start(self.category, False, False, 4)
stats_a11y = None
if stats is not None:
self.stars = Star(size=StarSize.SMALL)
self.stars.render_outline = True
self.stars.set_rating(stats.ratings_average)
self.rating_box = Gtk.Box.new(Gtk.Orientation.HORIZONTAL,
StockEms.SMALL)
self.rating_box.pack_start(self.stars, False, False, 0)
self.n_ratings = Gtk.Label.new(
'<span font_desc="%i"> (%i)</span>' % (
em(0.45), stats.ratings_total))
self.n_ratings.set_use_markup(True)
self.n_ratings.set_name("subtle-label")
self.n_ratings.set_alignment(0.0, 0.5)
self.rating_box.pack_start(self.n_ratings, False, False, 0)
self.content_right.pack_start(self.rating_box, False, False, 0)
# TRANSLATORS: this is an accessibility description for eg orca and
# is not visible in the ui
stats_a11y = _('%(stars)d stars - %(reviews)d reviews') % {
'stars': stats.ratings_average, 'reviews': stats.ratings_total}
# work out width tile needs to be to ensure ratings text is all
# visible
req_width = (self.stars.size_request().width +
self.image.size_request().width +
self.n_ratings.size_request().width +
StockEms.MEDIUM * 3
)
global _global_featured_tile_width
_global_featured_tile_width = max(_global_featured_tile_width,
req_width)
details = AppDetails(db=helper.db, doc=doc)
# TRANSLATORS: Free here means Gratis
price = details.price or _("Free")
if price == '0.00':
# TRANSLATORS: Free here means Gratis
price = _("Free")
# TRANSLATORS: Free here means Gratis
if price != _("Free"):
price = 'US$ ' + price
self.price = Gtk.Label.new(
'<span font_desc="%i">%s</span>' % (em(0.6), price))
self.price.set_use_markup(True)
self.price.set_name("subtle-label")
self.price.set_alignment(0.0, 0.5)
self.content_right.pack_start(self.price, False, False, 0)
self.set_name("featured-tile")
a11y_name = '. '.join([t
for t in [label, categories, stats_a11y, price] if t])
self.get_accessible().set_name(a11y_name)
backend = get_install_backend()
backend.connect("transaction-finished",
self.on_transaction_finished,
helper, doc)
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
self.connect("button-press-event", self.on_press)
self.connect("button-release-event", self.on_release)
def _on_needs_refresh(self, helper, pkgname, doc, icon_size):
icon = helper.get_icon_at_size(doc, icon_size, icon_size)
_update_icon(self.image, icon, icon_size)
def do_get_preferred_width(self):
w = _global_featured_tile_width
return w, w
def do_draw(self, cr):
cr.save()
A = self.get_allocation()
if self._pressed:
cr.translate(1, 1)
if self.has_focus():
Gtk.render_focus(self.get_style_context(),
cr,
3, 3,
A.width - 6, A.height - 6)
for child in self:
self.propagate_draw(child, cr)
if self.is_installed:
# paint installed tick overlay
if self.get_direction() != Gtk.TextDirection.RTL:
x = y = 36
else:
x = A.width - 56
y = 36
Gdk.cairo_set_source_pixbuf(cr, self._overlay, x, y)
cr.paint()
cr.restore()
def on_transaction_finished(self, backend, result, helper, doc):
trans_pkgname = str(result.pkgname)
pkgname = helper.get_pkgname(doc)
if trans_pkgname != pkgname:
return
# update installed state
helper.update_availability(doc)
self.is_installed = helper.is_installed(doc)
self.queue_draw()
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
return True
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
self._pressed = False
return True
def on_press(self, widget, event):
self._pressed = True
def on_release(self, widget, event):
if not self._pressed:
return
self.emit("clicked")
self._pressed = False
class ChannelSelector(Gtk.Button):
PADDING = 0
def __init__(self, section_button):
Gtk.Button.__init__(self)
alignment = Gtk.Alignment.new(0.5, 0.5, 0.0, 1.0)
alignment.set_padding(self.PADDING, self.PADDING,
self.PADDING, self.PADDING)
self.add(alignment)
self.arrow = Gtk.Arrow.new(Gtk.ArrowType.DOWN, Gtk.ShadowType.IN)
alignment.add(self.arrow)
# vars
self.parent_style_type = Gtk.Toolbar
self.section_button = section_button
self.popup = None
self.connect("button-press-event", self.on_button_press)
def do_draw(self, cr):
cr.save()
parent_style = self.get_ancestor(self.parent_style_type)
context = parent_style.get_style_context()
color = darken(context.get_border_color(Gtk.StateFlags.ACTIVE), 0.2)
cr.set_line_width(1)
a = self.get_allocation()
lin = cairo.LinearGradient(0, 0, 0, a.height)
lin.add_color_stop_rgba(0.1,
color.red,
color.green,
color.blue,
0.0) # alpha
lin.add_color_stop_rgba(0.5,
color.red,
color.green,
color.blue,
1.0) # alpha
lin.add_color_stop_rgba(1.0,
color.red,
color.green,
color.blue,
0.1) # alpha
cr.set_source(lin)
cr.move_to(0.5, 0.5)
cr.rel_line_to(0, a.height)
cr.stroke()
cr.move_to(a.width - 0.5, 0.5)
cr.rel_line_to(0, a.height)
cr.stroke()
cr.restore()
for child in self:
self.propagate_draw(child, cr)
def on_button_press(self, button, event):
if self.popup is None:
self.build_channel_selector()
self.show_channel_sel_popup(self, event)
#~
#~ def on_style_updated(self, widget):
#~ context = widget.get_style_context()
#~ context.save()
#~ context.add_class("menu")
#~ bgcolor = context.get_background_color(Gtk.StateFlags.NORMAL)
#~ context.restore()
#~
#~ self._dark_color = darken(bgcolor, 0.5)
def show_channel_sel_popup(self, widget, event):
def position_func(menu, (window, a)):
if self.get_direction() != Gtk.TextDirection.RTL:
tmpx = a.x
else:
tmpx = a.x + a.width - self.popup.get_allocation().width
x, y = window.get_root_coords(tmpx,
a.y + a.height)
return (x, y, False)
a = self.section_button.get_allocation()
window = self.section_button.get_window()
self.popup.popup(None, None, position_func, (window, a),
event.button, event.time)
def set_build_func(self, build_func):
self.build_func = build_func
def build_channel_selector(self):
self.popup = Gtk.Menu()
self.popup.set_name('toolbar-popup') # to set 'padding: 0;'
self.popup.get_style_context().add_class('primary-toolbar')
self.build_func(self.popup)
class SectionSelector(TileToggleButton):
MIN_WIDTH = em(5)
_MARKUP = '<small>%s</small>'
def __init__(self, label, icon, icon_size=Gtk.IconSize.DIALOG):
TileToggleButton.__init__(self)
markup = self._MARKUP % label
self.build_default(markup, icon, icon_size)
self.label.set_use_markup(True)
self.label.set_justify(Gtk.Justification.CENTER)
context = self.get_style_context()
context.add_class("section-sel-bg")
context = self.label.get_style_context()
context.add_class("section-sel")
self.draw_hint_has_channel_selector = False
self._alloc = None
self._bg_cache = {}
self.connect('size-allocate', self.on_size_allocate)
self.connect('style-updated', self.on_style_updated)
def on_size_allocate(self, *args):
alloc = self.get_allocation()
if (self._alloc is None or
self._alloc.width != alloc.width or
self._alloc.height != alloc.height):
self._alloc = alloc
# reset the bg cache
self._bg_cache = {}
def on_style_updated(self, *args):
# also reset the bg cache
self._bg_cache = {}
def _cache_bg_for_state(self, state):
a = self.get_allocation()
# tmp surface on which we render the button bg as per the gtk
# theme engine
_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32,
a.width, a.height)
cr = cairo.Context(_surf)
context = self.get_style_context()
context.save()
context.set_state(state)
Gtk.render_background(context, cr,
-5, -5, a.width + 10, a.height + 10)
Gtk.render_frame(context, cr,
-5, -5, a.width + 10, a.height + 10)
del cr
# new surface which will be cached which
surf = cairo.ImageSurface(cairo.FORMAT_ARGB32,
a.width, a.height)
cr = cairo.Context(surf)
# gradient for masking
lin = cairo.LinearGradient(0, 0, 0, a.height)
lin.add_color_stop_rgba(0.0, 1, 1, 1, 0.1)
lin.add_color_stop_rgba(0.25, 1, 1, 1, 0.7)
lin.add_color_stop_rgba(0.5, 1, 1, 1, 1.0)
lin.add_color_stop_rgba(0.75, 1, 1, 1, 0.7)
lin.add_color_stop_rgba(1.0, 1, 1, 1, 0.1)
cr.set_source_surface(_surf, 0, 0)
cr.mask(lin)
del cr
# cache the resulting surf...
self._bg_cache[state] = surf
def do_draw(self, cr):
state = self.get_state_flags()
if self.get_active():
if state not in self._bg_cache:
self._cache_bg_for_state(state)
cr.set_source_surface(self._bg_cache[state], 0, 0)
cr.paint()
for child in self:
self.propagate_draw(child, cr)
class Link(Gtk.Label):
__gsignals__ = {
"clicked": (GObject.SignalFlags.RUN_LAST,
None,
(),)
}
def __init__(self, markup="", uri="none"):
Gtk.Label.__init__(self)
self._handler = 0
self.set_markup(markup, uri)
def set_markup(self, markup="", uri="none"):
markup = '<a href="%s">%s</a>' % (uri, markup)
Gtk.Label.set_markup(self, markup)
if self._handler == 0:
self._handler = self.connect("activate-link",
self.on_activate_link)
# synonyms for set_markup
def set_label(self, label):
return self.set_markup(label)
def set_text(self, text):
return self.set_markup(text)
def on_activate_link(self, uri, data):
self.emit("clicked")
def disable(self):
self.set_sensitive(False)
self.set_name("subtle-label")
def enable(self):
self.set_sensitive(True)
self.set_name("label")
class MoreLink(Gtk.Button):
_MARKUP = '<b>%s</b>'
_MORE = _("More")
def __init__(self):
Gtk.Button.__init__(self)
self.label = Gtk.Label()
self.label.set_padding(StockEms.SMALL, 0)
self.label.set_markup(self._MARKUP % _(self._MORE))
self.add(self.label)
self._init_event_handling()
context = self.get_style_context()
context.add_class("more-link")
def _init_event_handling(self):
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
def do_draw(self, cr):
if self.has_focus():
layout = self.label.get_layout()
a = self.get_allocation()
e = layout.get_pixel_extents()[1]
xo, yo = self.label.get_layout_offsets()
Gtk.render_focus(self.get_style_context(), cr,
xo - a.x - 3, yo - a.y - 1,
e.width + 6, e.height + 2)
for child in self:
self.propagate_draw(child, cr)
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
def _build_channels_list(popup):
for i in range(3):
item = Gtk.MenuItem.new()
label = Gtk.Label.new("channel_name %s" % i)
box = Gtk.Box.new(Gtk.Orientation.HORIZONTAL, StockEms.MEDIUM)
box.pack_start(label, False, False, 0)
item.add(box)
item.show_all()
popup.attach(item, 0, 1, i, i + 1)
def get_test_buttons_window():
win = Gtk.Window()
win.set_size_request(200, 200)
vb = Gtk.VBox(spacing=12)
win.add(vb)
link = Link("<small>test link</small>", uri="www.google.co.nz")
vb.pack_start(link, False, False, 0)
button = Gtk.Button()
button.set_label("channels")
channels_button = ChannelSelector(button)
channels_button.parent_style_type = Gtk.Window
channels_button.set_build_func(_build_channels_list)
hb = Gtk.HBox()
hb.pack_start(button, False, False, 0)
hb.pack_start(channels_button, False, False, 0)
vb.pack_start(hb, False, False, 0)
win.show_all()
win.connect("destroy", Gtk.main_quit)
return win
if __name__ == "__main__":
win = get_test_buttons_window()
Gtk.main()
|
gpl-3.0
| -1,711,772,411,216,868,400
| 31.782344
| 79
| 0.562308
| false
| 3.589068
| false
| false
| false
|
lucperkins/heron
|
integration_test/src/python/integration_test/topology/one_spout_bolt_multi_tasks/one_spout_bolt_multi_tasks.py
|
1
|
1368
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# copyright 2016 twitter. all rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=missing-docstring
from heronpy.api.stream import Grouping
from integration_test.src.python.integration_test.core import TestTopologyBuilder
from integration_test.src.python.integration_test.common.bolt import IdentityBolt
from integration_test.src.python.integration_test.common.spout import ABSpout
def one_spout_bolt_multi_tasks_builder(topology_name, http_server_url):
builder = TestTopologyBuilder(topology_name, http_server_url)
ab_spout = builder.add_spout("ab-spout", ABSpout, 3)
builder.add_bolt("identity-bolt", IdentityBolt,
inputs={ab_spout: Grouping.SHUFFLE},
par=3,
optional_outputs=['word'])
return builder.create_topology()
|
apache-2.0
| 9,151,532,406,057,182,000
| 39.235294
| 81
| 0.741228
| false
| 3.768595
| true
| false
| false
|
burtgulash/PaTrie
|
patrie.py
|
1
|
2824
|
#!/usr/bin/python3
class TNode:
def __init__(self, children):
self.children = children
def is_leaf(self):
return self.children is None
def __repr__(self):
return repr(self.children)
class PaTrie:
def __init__(self):
self.root = None
def __contains__(self, word):
cur = self.root
if cur is None:
return False
i = 0
while cur is not None and not cur.is_leaf():
for label, child in cur.children.items():
if len(label) == 0 and i < len(word):
continue
if word[i:i + len(label)] == label:
cur = child
i += len(label)
break
else:
return False
return i == len(word)
def insert(self, word):
cur = self.root
if cur is None:
self.root = TNode({ word: None })
return
i = 0
while not cur.is_leaf():
for label, child in cur.children.items():
cl = self.common_prefix_len(word[i:], label)
if cl:
if cl == len(label):
cur = child
i += len(label)
break
del cur.children[label]
cur.children[label[:cl]] = TNode({
label[cl:]: child,
word[i + cl:]: TNode(None),
})
return
else:
cur.children[word[i:]] = TNode(None)
return
cur.children = {
"": TNode(None),
word[i:]: TNode(None)
}
def __str__(self):
s = []
def _str(tnode, sofar, label, prepend):
if tnode is None:
return
if tnode.is_leaf():
if label:
s.append(prepend + "+ " + label)
s.append(prepend + " {"+sofar+"}")
else:
s.append(prepend + "+ " + label)
for label, child in tnode.children.items():
_str(child, sofar + label, label, prepend + " ")
if self.root is not None:
_str(self.root, "", "", "")
return "\n".join(s)
def common_prefix_len(self, a, b):
i = 0
for x, y in zip(a, b):
if x == y:
i += 1
else:
break
return i
if __name__ == "__main__":
t = PaTrie()
words = "autobus", "auto", "abraka", "dabra", "abrakadabra", "honza", "honirna", "honicka", "hony", "ho", "h"
for w in words:
t.insert(w)
print("AFTER INSERTING", w)
print(t.root)
print(t)
print()
|
mit
| -5,929,119,131,326,029,000
| 24.672727
| 113
| 0.410765
| false
| 4.092754
| false
| false
| false
|
stringertheory/names
|
update_meter.py
|
1
|
4118
|
import sys
import string
import termcolor
import pymongo
import pronouncing
import unidecode
import distance
# a decorator that caches functions but stores results with a db backend would be nice.
def mongo_collection():
collection = pymongo.MongoClient().poetry.poems
return collection
def word_tokenize(sentence):
ascii_version = unidecode.unidecode(sentence.lower())
word_list = []
for word in ascii_version.split():
stripped = word.strip(string.punctuation).strip()
if stripped:
word_list.append(stripped)
return word_list
def phones_for_sentence(word_list):
approximate_words = []
phones_list = []
for word in word_list:
replacement, phones = distance.phones_for_word(word)
approximate_words.append(replacement)
# for now, just pick first alternative from list
phones_list.append(phones[0])
return approximate_words, phones_list
def stress_pattern(phones):
return pronouncing.stresses(''.join(p for p in phones))
collection = mongo_collection()
for index, document in enumerate(collection.find(no_cursor_timeout=True).sort("_id", pymongo.DESCENDING).batch_size(5), 1):
if 'analyzed' in document or not 'lines' in document:
print('skipping %s' % document['_id'], file=sys.stderr)
continue
else:
print('analyzing %s' % document['_id'], file=sys.stderr)
normalized = [word_tokenize(sentence) for sentence in document['lines']]
approximate = []
phones = []
for sentence in normalized:
a, p = phones_for_sentence(sentence)
approximate.append(a)
phones.append(p)
stresses = [stress_pattern(sentence) for sentence in phones]
# zip up for easier storage
analyzed = []
for n, a, p in zip(normalized, approximate, phones):
sentence = []
for n_, a_, p_ in zip(n, a, p):
word = {
'ascii': n_,
'closest': a_,
'phones': p_,
}
sentence.append(word)
analyzed.append(sentence)
document['analyzed'] = analyzed
document['stresses'] = stresses
collection.update_one(
{'_id': document.get('_id')},
{'$set': {'analyzed': analyzed, 'stresses': stresses}},
)
print(index, 'inserted', document['_id'])
row_list = []
for signal, line in zip(stresses, document['lines']):
terminal = []
block_list = []
for i in signal:
if int(i):
block_list.append('<div class="diagram stressed"></div>')
terminal.append(termcolor.colored(' ', 'green', 'on_blue'))
else:
terminal.append(termcolor.colored(' ', 'green', 'on_yellow'))
block_list.append('<div class="diagram unstressed"></div>')
row = '<div class="diagram sentence">%s</div>' % ''.join(block_list)
row_list.append(row)
print(''.join(terminal), file=sys.stderr)
diagram = '<div class="diagram container">%s</div>' % ''.join(row_list)
with open('formatted/%s.html' % document['_id'], 'w') as outfile:
outfile.write('<html>')
outfile.write('<head>')
outfile.write('<link rel="stylesheet" type="text/css" href="diagram.css">')
outfile.write('</head>')
outfile.write('<body>')
outfile.write(document['html'])
outfile.write('\n')
outfile.write(diagram)
outfile.write('\n')
outfile.write('</body>')
outfile.write('</html>')
# c_a, c_d = pywt.dwt(signal, 'haar')
# for i, j in enumerate(signal):
# print i, j
# print ''
# # for i in c_a:
# # print i
# # print ''
# # for i in c_d:
# # print i
# # print ''
# ps = np.abs(np.fft.fft(signal))**2
# # for i, j in enumerate(ps):
# # print i, j
# time_step = 1
# freqs = np.fft.fftfreq(len(signal), time_step)
# print >> sys.stderr, freqs
# idx = np.argsort(freqs)
# for x, y in zip(freqs[idx], ps[idx]):
# print x, y
|
mit
| -8,512,225,107,606,674,000
| 29.503704
| 123
| 0.575765
| false
| 3.706571
| false
| false
| false
|
chromatic-universe/imap2017
|
src/imap-python-gadget/cci_imap_gadget/core_on_login.py
|
1
|
4756
|
# core_on_login.py chromatic universe william k. johnson 2018
from time import sleep
#cci
from cci_imap_gadget.imap_gadget_base import cci_chilkat , \
cci_ecosys , \
cci_mini_imap_mail
from cci_imap_gadget.core_on_logout import on_logout
import cci_utils.cci_io_tools as io
# ----------------------------------------------------------------------------------------
class on_login( object ) :
"""
on_login
"""
def __init__( self ,
cci_chilkat = None ,
cci_mail = None ,
cci_ico = None ) :
"""
:param cci_chilkat:
:param cci_mail:
:param cci_ico:
"""
# logging
self._logger = io.init_logging( self.__class__.__name__ )
self._logger.info( self.__class__.__name__ + '...' )
self._cci = cci_chilkat
self._mail = cci_mail
self._ecosys = cci_ico
self._imap_states = set()
self._imap_states.add( 'non-authenticated' )
@property
def cci( self ) :
return self._cci
@cci.setter
def cci( self , cc ) :
self._cci = cc
@property
def mail( self ) :
return self._mail
@mail.setter
def mail( self , m ) :
self._mail = m
@property
def eco( self ) :
return self._ecosys
@eco.setter
def eco( self , ec ) :
self._eco = ec
@property
def logger( self ) :
return self._logger
@logger.setter
def logger( self , log ) :
self._logger = log
@property
def imap_states( self ) :
return self._imap_states
@imap_states.setter
def imap_states( self , states ) :
self._imap_states = states
def perform( self ) :
"""
:return:
"""
# xoonnect to an imap server.
self.cci.imap.put_KeepSessionLog( True )
self.cci.imap.put_VerboseLogging( True )
print( self.mail.imap_addr )
b_ret = self.cci.imap.Connect( self.mail.imap_addr )
if b_ret :
#login
b_ret = self.cci.imap.Login( self.mail.imap_mail_account ,
self.mail.imap_mail_auth )
if not b_ret :
self.logger.error( '...login failed....')
#self.logger.info( self.cci.imap.lastErrorText() )
self.logger.info( self.cci.imap.sessionLog() )
self.imap_states.remove( 'non-authenticated' )
self.imap_states.add( 'authenticated' )
sleep( 2 )
# ----------------------------------------------------------------------------------------
if __name__ == '__main__' :
try :
#imap default params
mini_mail = cci_mini_imap_mail( mail_account="wiljoh" , mail_auth="Argentina1" )
mini_mail.logger.info( repr( mini_mail) )
#cci microkernel
mini_ecosys = cci_ecosys( mta_addr='127.0.0.1' ,
mda_addr='127.0.0.1' ,
imap_mail=mini_mail )
mini_ecosys.logger.info( repr( mini_ecosys ) )
#chilkat instances
cci = cci_chilkat()
for idx in range( 0 , 9 ) :
login = on_login( cci , mini_mail , mini_ecosys )
login.perform()
#logout
logout = on_logout( cci , mini_mail , mini_ecosys )
logout.perform()
sleep( 8 )
except Exception as e :
print( str( e ) )
|
mit
| 2,335,134,393,039,799,300
| 34.22963
| 108
| 0.353869
| false
| 4.858018
| false
| false
| false
|
uclmr/inferbeddings
|
tests/inferbeddings/adversarial/closedform/test_lifted_simple_distmult_unit_cube.py
|
1
|
3989
|
# -*- coding: utf-8 -*-
import numpy as np
import tensorflow as tf
from inferbeddings.models import base as models
from inferbeddings.models import similarities
from inferbeddings.knowledgebase import Fact, KnowledgeBaseParser
from inferbeddings.parse import parse_clause
from inferbeddings.models.training import constraints
from inferbeddings.adversarial import Adversarial
from inferbeddings.adversarial.closedform import ClosedForm
import logging
import pytest
logger = logging.getLogger(__name__)
triples = [
('a', 'p', 'b'),
('c', 'p', 'd'),
('a', 'q', 'b')
]
facts = [Fact(predicate_name=p, argument_names=[s, o]) for s, p, o in triples]
parser = KnowledgeBaseParser(facts)
nb_entities = len(parser.entity_to_index)
nb_predicates = len(parser.predicate_to_index)
# Clauses
clause_str = 'q(X, Y) :- p(X, Y)'
clauses = [parse_clause(clause_str)]
# Instantiating the model parameters
model_class = models.get_function('DistMult')
similarity_function = similarities.get_function('dot')
model_parameters = dict(similarity_function=similarity_function)
@pytest.mark.closedform
def test_distmult_unit_cube():
for seed in range(32):
tf.reset_default_graph()
np.random.seed(seed)
tf.set_random_seed(seed)
entity_embedding_size = np.random.randint(low=1, high=5)
predicate_embedding_size = entity_embedding_size
# Instantiating entity and predicate embedding layers
entity_embedding_layer = tf.get_variable('entities',
shape=[nb_entities + 1, entity_embedding_size],
initializer=tf.contrib.layers.xavier_initializer())
predicate_embedding_layer = tf.get_variable('predicates',
shape=[nb_predicates + 1, predicate_embedding_size],
initializer=tf.contrib.layers.xavier_initializer())
# Adversary - used for computing the adversarial loss
adversarial = Adversarial(clauses=clauses, parser=parser,
entity_embedding_layer=entity_embedding_layer,
predicate_embedding_layer=predicate_embedding_layer,
model_class=model_class,
model_parameters=model_parameters,
batch_size=1)
adv_projection_steps = [constraints.unit_cube(adv_emb_layer) for adv_emb_layer in adversarial.parameters]
adversarial_loss = adversarial.loss
v_optimizer = tf.train.AdagradOptimizer(learning_rate=1e-1)
v_training_step = v_optimizer.minimize(- adversarial_loss, var_list=adversarial.parameters)
init_op = tf.global_variables_initializer()
closed_form_lifted = ClosedForm(parser=parser,
predicate_embedding_layer=predicate_embedding_layer,
model_class=model_class, model_parameters=model_parameters,
is_unit_cube=True)
opt_adversarial_loss = closed_form_lifted(clauses[0])
with tf.Session() as session:
session.run(init_op)
for finding_epoch in range(1, 100 + 1):
_ = session.run([v_training_step])
for projection_step in adv_projection_steps:
session.run([projection_step])
violation_loss_val, opt_adversarial_loss_val = session.run([adversarial_loss, opt_adversarial_loss])
if violation_loss_val + 1e-1 > opt_adversarial_loss_val:
print('{} <= {}'.format(violation_loss_val, opt_adversarial_loss_val))
assert violation_loss_val <= (opt_adversarial_loss_val + 1e-4)
tf.reset_default_graph()
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
pytest.main([__file__])
|
mit
| -1,489,150,325,355,351,800
| 36.632075
| 116
| 0.610679
| false
| 3.922321
| false
| false
| false
|
uskudnik/ggrc-core
|
src/ggrc_workflows/migrations/versions/20150514130212_1431e7094e26_add_new_notification_type.py
|
1
|
1859
|
"""add new notification type
Revision ID: 1431e7094e26
Revises: 2b89912f95f1
Create Date: 2015-05-14 13:02:12.165612
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
from datetime import timedelta, date
from sqlalchemy import and_
from ggrc import db
from ggrc_workflows.models import Workflow
from ggrc_workflows.notification.notification_handler import (
get_notification_type,
add_notif,
)
# revision identifiers, used by Alembic.
revision = '1431e7094e26'
down_revision = '2b89912f95f1'
def upgrade():
notification_types_table = table(
'notification_types',
column('id', sa.Integer),
column('name', sa.String),
column('description', sa.Text),
column('template', sa.String),
column('instant', sa.Boolean),
column('advance_notice', sa.Integer),
column('advance_notice_end', sa.Integer),
column('created_at', sa.DateTime),
column('modified_by_id', sa.Integer),
column('updated_at', sa.DateTime),
column('context_id', sa.Integer),
)
notification_types = [
# cycle created notifictions
{"name": "cycle_start_failed",
"description": ("Notify workflow owners that a cycle has failed to"
"start for a recurring workflow"),
"template": "cycle_start_failed",
"advance_notice": 0,
"instant": False,
},
]
op.bulk_insert(notification_types_table, notification_types)
existing_wfs = Workflow.query.filter(and_(
Workflow.frequency.in_(["weekly", "monthly", "quarterly", "annually"]),
Workflow.next_cycle_start_date >= date.today()
))
for wf in existing_wfs:
notif_type = get_notification_type("cycle_start_failed")
add_notif(wf, notif_type, wf.next_cycle_start_date + timedelta(1))
db.session.commit()
def downgrade():
pass
|
apache-2.0
| 2,134,548,577,250,535,400
| 26.338235
| 77
| 0.669715
| false
| 3.568138
| false
| false
| false
|
shimpe/frescobaldi
|
frescobaldi_app/matcher.py
|
1
|
7388
|
# This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
Highlights matching tokens such as { and }, << and >> etc.
"""
from __future__ import unicode_literals
import weakref
from PyQt4.QtGui import QAction
import app
import plugin
import ly.lex
import lydocument
import viewhighlighter
import actioncollection
import actioncollectionmanager
class AbstractMatcher(object):
def __init__(self, view=None):
"""Initialize with an optional View. (Does not keep a reference.)"""
self._view = lambda: None
if view:
self.setView(view)
app.settingsChanged.connect(self.updateSettings)
self.updateSettings()
def updateSettings(self):
from PyQt4.QtCore import QSettings
s = QSettings()
s.beginGroup("editor_highlighting")
self._match_duration = s.value("match", 1, int) * 1000
def setView(self, view):
"""Set the current View (to monitor for cursor position changes)."""
old = self._view()
if old:
old.cursorPositionChanged.disconnect(self.showMatches)
if view:
self._view = weakref.ref(view)
view.cursorPositionChanged.connect(self.showMatches)
else:
self._view = lambda: None
def view(self):
"""Return the current View."""
return self._view()
def highlighter(self):
"""Implement to return an ArbitraryHighlighter for the current View."""
pass
def showMatches(self):
"""Highlights matching tokens if the view's cursor is at such a token."""
cursors = matches(self.view().textCursor(), self.view())
if cursors:
self.highlighter().highlight("match", cursors, 2, self._match_duration)
else:
self.highlighter().clear("match")
class Matcher(AbstractMatcher, plugin.MainWindowPlugin):
"""One Matcher automatically handling the current View."""
def __init__(self, mainwindow):
super(Matcher, self).__init__()
ac = self.actionCollection = Actions()
actioncollectionmanager.manager(mainwindow).addActionCollection(ac)
ac.view_matching_pair.triggered.connect(self.moveto_match)
ac.view_matching_pair_select.triggered.connect(self.select_match)
mainwindow.currentViewChanged.connect(self.setView)
view = mainwindow.currentView()
if view:
self.setView(view)
def highlighter(self):
return viewhighlighter.highlighter(self.view())
def moveto_match(self):
"""Jump to the matching token."""
self.goto_match(False)
def select_match(self):
"""Select from the current to the matching token."""
self.goto_match(True)
def goto_match(self, select=False):
"""Jump to the matching token, selecting the text if select is True."""
cursor = self.view().textCursor()
cursors = matches(cursor)
if len(cursors) < 2:
return
if select:
if cursors[0] < cursors[1]:
anchor, pos = cursors[0].selectionStart(), cursors[1].selectionEnd()
else:
anchor, pos = cursors[0].selectionEnd(), cursors[1].selectionStart()
cursor.setPosition(anchor)
cursor.setPosition(pos, cursor.KeepAnchor)
else:
cursor.setPosition(cursors[1].selectionStart())
self.view().setTextCursor(cursor)
class Actions(actioncollection.ActionCollection):
name = "matchingpair"
def createActions(self, parent):
self.view_matching_pair = QAction(parent)
self.view_matching_pair_select = QAction(parent)
def translateUI(self):
self.view_matching_pair.setText(_("Matching Pai&r"))
self.view_matching_pair_select.setText(_("&Select Matching Pair"))
def matches(cursor, view=None):
"""Return a list of zero to two cursors specifying matching tokens.
If the list is empty, the cursor was not at a MatchStart/MatchEnd token,
if the list only contains one cursor the matching token could not be found,
if the list contains two cursors, the first is the token the cursor was at,
and the second is the matching token.
If view is given, only the visible part of the document is searched.
"""
block = cursor.block()
column = cursor.position() - block.position()
tokens = lydocument.Runner(lydocument.Document(cursor.document()))
tokens.move_to_block(block)
if view is not None:
first_block = view.firstVisibleBlock()
bottom = view.contentOffset().y() + view.viewport().height()
pred_forward = lambda: view.blockBoundingGeometry(tokens.block).top() <= bottom
pred_backward = lambda: tokens.block >= first_block
else:
pred_forward = lambda: True
pred_backward = lambda: True
source = None
for token in tokens.forward_line():
if token.pos <= column <= token.end:
if isinstance(token, ly.lex.MatchStart):
match, other = ly.lex.MatchStart, ly.lex.MatchEnd
def source_gen():
while pred_forward():
for t in tokens.forward_line():
yield t
if not tokens.next_block():
break
source = source_gen()
break
elif isinstance(token, ly.lex.MatchEnd):
match, other = ly.lex.MatchEnd, ly.lex.MatchStart
def source_gen():
while pred_backward():
for t in tokens.backward_line():
yield t
if not tokens.previous_block():
break
source = source_gen()
break
elif token.pos > column:
break
cursors = []
if source:
# we've found a matcher item
cursors.append(tokens.cursor())
nest = 0
for token2 in source:
if isinstance(token2, other) and token2.matchname == token.matchname:
if nest == 0:
# we've found the matching item!
cursors.append(tokens.cursor())
break
else:
nest -= 1
elif isinstance(token2, match) and token2.matchname == token.matchname:
nest += 1
return cursors
app.mainwindowCreated.connect(Matcher.instance)
|
gpl-2.0
| -4,223,175,629,480,719,000
| 34.864078
| 87
| 0.611668
| false
| 4.330598
| false
| false
| false
|
TriggeredMessaging/pydotmailer
|
pydotmailer.py
|
1
|
26995
|
# pydotmailer - A lightweight wrapper for the dotMailer API, written in Python.
# Copyright (c) 2012 Triggered Messaging Ltd, released under the MIT license
# Home page:
# https://github.com/TriggeredMessaging/pydotmailer/
# See README and LICENSE files.
#
# dotMailer API docs are at http://www.dotmailer.co.uk/api/
# This class was influenced by earllier work: https://github.com/JeremyJones/dotmailer-client/blob/master/dotmailer.py
import base64
import time
from datetime import datetime, timedelta
from suds.client import Client as SOAPClient
__version__ = '0.1.2'
try:
import simplejson as json
except ImportError:
import json # fall back to traditional json module.
import logging
logger = logging.getLogger(__name__)
from dotmailersudsplugin import DotMailerSudsPlugin
class PyDotMailer(object):
version = '0.1'
class RESULT_FIELDS_ERROR_CODE:
"""
Defines for RESULT_FIELDS.ERROR_CODE error codes which we're deriving from the string the ESP
e.g dotMailer returns.
"""
ERROR_CAMPAIGN_NOT_FOUND = 'ERROR_CAMPAIGN_NOT_FOUND' # no email template
ERROR_CAMPAIGN_SENDNOTPERMITTED = 'ERROR_CAMPAIGN_SENDNOTPERMITTED'
# not paid enough? dotMailer tends to return this if you've run out of campaign credits or a similar issue.
ERROR_CAMPAIGN_APINOTPERMITTED = 'ERROR_CAMPAIGN_APINOTPERMITTED' # e,g, exceeded dotmailer API limits (API_USAGE_EXCEEDED)
ERROR_GENERIC = 'ERROR_UNKNOWN' # code which couldn't be parsed.
ERROR_CONTACT_NOT_FOUND = 'ERROR_CONTACT_NOT_FOUND' # no email address?
ERROR_CONTACT_UNSUBSCRIBED = 'ERROR_CONTACT_UNSUBSCRIBED' # no send permission
ERROR_CONTACT_BLACKHOLED = 'ERROR_CONTACT_BLACKHOLED' # address blackholed
ERROR_OTHER = 'ERROR_OTHER' # Etc
TIMEOUT_ERROR = 'Timeout Error' # Timeout from ESP
ERROR_UNFINISHED = "ERROR_UNFINISHED" # Load had not finished
ERROR_ESP_LOAD_FAIL = 'ERROR_ESP_LOAD_FAIL' # Data not loaded
# Cache the information on the API location on the server
api_url = ''
def __init__(self, api_username='', api_password='', secure=True):
"""
Connect to the dotMailer API at apiconnector.com, using SUDS.
param string $ap_key Not present, because the dotMailer API doesn't support an API key
@param api_username Your dotMailer user name
@param api_password Your dotMailer password
@param secure Whether or not this should use a secure connection (HTTPS).
Always True if the ESP doesn't support an insecure API.
"""
# Remember the HTTPS flag
self.secure = secure or False # Cast to a boolean (?)
# Choose the dotMailer API URL
if secure:
self.api_url = 'https://apiconnector.com/API.asmx?WSDL'
else:
self.api_url = 'http://apiconnector.com/API.asmx?WSDL'
# Connect to the API, using SUDS. Log before and after to track the time taken.
logger.debug("Connecting to web service")
self.client = SOAPClient(self.api_url,
plugins=[DotMailerSudsPlugin()]) # Plugin makes a tiny XML patch for dotMailer
logger.debug("Connected to web service")
# Change the logging level to CRITICAL to avoid logging errors for every API call which fails via suds
logging.getLogger('suds.client').setLevel(logging.CRITICAL)
# Remember the username and password. There's no API key to remember with dotMailer
self.api_username = api_username
self.api_password = api_password
if (not api_username) or (not api_password):
raise Exception('Bad username or password')
self.last_exception = None
def unpack_exception(self, e):
""" unpack the exception thrown by suds. This contains a string code in e.fault.faultstring containing text e.g.
Server was unable to process request. ---> Campaign not found ERROR_CAMPAIGN_NOT_FOUND
Use this to set a suitable value for dict_result
@param e exception
@return dict_result, e.g. {'ok':False,
'errors':[e.message],
'error_code':PyDotMailer.ERRORS.ERROR_CAMPAIGN_NOT_FOUND }
"""
self.last_exception = e # in case caller cares
fault_string = ''
# http://stackoverflow.com/questions/610883/how-to-know-if-an-object-has-an-attribute-in-python
if e and hasattr(e, 'fault') and hasattr(e.fault, 'faultstring'):
fault_string = e.fault.faultstring
# todo clearly a more generic way of doing this would be good.
if 'ERROR_CAMPAIGN_NOT_FOUND' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_NOT_FOUND
elif 'ERROR_CAMPAIGN_SENDNOTPERMITTED' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_SENDNOTPERMITTED
elif 'ERROR_APIUSAGE_EXCEEDED' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_APINOTPERMITTED
elif 'ERROR_CONTACT_NOT_FOUND' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_NOT_FOUND
elif 'ERROR_CONTACT_SUPPRESSED' in fault_string:
# Server was unable to process request. ---> Contact is suppressed. ERROR_CONTACT_SUPPRESSED
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_UNSUBSCRIBED
else:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_OTHER
dict_result = {'ok': False, 'errors': [e.message], 'error_code': error_code}
return dict_result
def add_contacts_to_address_book(self, address_book_id, s_contacts, wait_to_complete_seconds=False):
"""
Add a list of contacts to the address book
@param address_book_id the id of the address book
@param s_contacts containing the contacts to be added. You may upload either a .csv or .xls file.
It must contain one column with the heading "Email".
Other columns must will attempt to map to your custom data fields.
@param wait_to_complete_seconds seconds to wait.
@return dict e.g. {'progress_id': 15edf1c4-ce5f-42e3-b182-3b20c880bcf8, 'ok': True, 'result': Finished}
http://www.dotmailer.co.uk/api/address_books/add_contacts_to_address_book_with_progress.aspx
"""
dict_result = {'ok': True}
return_code = None
base64_data = base64.b64encode(s_contacts)
try:
progress_id = self.client.service.AddContactsToAddressBookWithProgress(username=self.api_username,
password=self.api_password,
addressbookID=address_book_id,
data=base64_data,
dataType='CSV')
dict_result = {'ok': True}
if wait_to_complete_seconds:
# retry loop...
dt_wait_until = datetime.utcnow() + timedelta(seconds=wait_to_complete_seconds) # wait for max
sleep_time = 0.2 # start with short sleep between retries
while (not return_code or return_code.get('result') == 'NotFinished') and \
datetime.utcnow() < dt_wait_until:
time.sleep(sleep_time)
return_code = self.get_contact_import_progress(progress_id) # E.g: {'error_code': 'ERROR_UNFINISHED', 'ok': False, 'result': NotFinished}
# gradually backoff with longer sleep intervals up to a max of 5 seconds
sleep_time = min(sleep_time * 2, 5.0)
if return_code:
dict_result = return_code
dict_result.update({'progress_id': progress_id})
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result
def add_contact_to_address_book(self, address_book_id, email_address, d_fields, email_type="Html",
audience_type="Unknown",
opt_in_type="Unknown"):
"""
add a single contact into an address book. - uses AddContactToAddressBook
@param address_book_id the id of the address book
@param email_address The email address to add
@param d_fields - dict containing the data to be added. e.g. { 'firstname': 'mike', 'lastname': 'austin'}.
columns must map to standard fields in DM or will attempt to map to your custom data fields in DM.
@param email_type = "Html" - the new contact will be set to receive this format by default.
@return dict e.g. {'contact_id': 123532543, 'ok': True, 'contact': APIContact object }
"""
# Initialise the result dictionary
dict_result = {'ok': False}
# Create an APIContact object with the details of the record to load. For example:
# APIContact: (APIContact){
# ID = None, Email = None,
# AudienceType = (ContactAudienceTypes){ value = None, }
# DataFields = (ContactDataFields){ Keys = (ArrayOfString){ string[] = <empty> }
# Values = (ArrayOfAnyType){ anyType[] = <empty> }
# OptInType = (ContactOptInTypes){ value = None }
# EmailType = (ContactEmailTypes){ value = None }
# Notes = None }
contact = self.client.factory.create('APIContact')
del contact.ID
contact.Email = email_address
# Copy field data into the call
for field_name in d_fields:
if field_name != 'email' and d_fields.get(field_name):
contact.DataFields.Keys[0].append(field_name)
contact.DataFields.Values[0].append(d_fields.get(field_name))
# remove some empty values that will upset suds/dotMailer
####del contact.AudienceType
####del contact.OptInType
contact.AudienceType = audience_type
contact.OptInType = opt_in_type
contact.EmailType = email_type
#### logging.getLogger('suds.client').setLevel(logging.DEBUG)
try:
created_contact = self.client.service.AddContactToAddressBook(username=self.api_username,
password=self.api_password,
contact=contact,
addressbookId=address_book_id)
# Example dict_result contents:
# { 'contact': (APIContact){ ID = 417373614, Email = "test.mailings+unit_tests@triggeredmessaging.com",
# AudienceType = "Unknown",
# DataFields = (ContactDataFields){
# Keys = (ArrayOfString){ string[] = "Postcode", }
# Values = (ArrayOfAnyType){ anyType[] = "SW1A 0AA", } }
# OptInType = "Unknown", EmailType = "Html" },
# 'ok': True, 'contact_id': 417373614}
dict_result = ({'ok': True, 'contact_id': created_contact.ID, 'contact': created_contact})
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result
def get_contact_import_progress(self, progress_id):
"""
@param progress_id the progress_id from add_contacts_to_address_book
@return dict e.g. {'ok': False, 'result': NotFinished} or dict: {'ok': True, 'result': Finished}
http://www.dotmailer.co.uk/api/contacts/get_contact_import_progress.aspx
"""
dict_result = {'ok': True}
try:
return_code = self.client.service.GetContactImportProgress(username=self.api_username,
password=self.api_password,
progressID=progress_id)
if return_code == 'Finished':
dict_result = {'ok': True, 'result': return_code, 'errors': [' Load OK. See report at https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=%s ' % progress_id] }
elif return_code == 'RejectedByWatchdog':
# API call AddContactsToAddressBookWithProgress has triggered "RejectedByWatchdog" for one client and (we believe) dotMailer blocked the whole upload.
# https://support.dotmailer.com/entries/44346548-Data-Watchdog-FAQs
# https://support.dotmailer.com/entries/21449156-Better-API-feedback-for-Reject...
dict_result = {'ok': False, 'result': return_code, 'error_code':PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_ESP_LOAD_FAIL,
'errors': [' Load Fail. See report at https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=%s ' % progress_id]}
else:
dict_result = {'ok': False, 'result': return_code, 'error_code':PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_UNFINISHED,
'errors': [' Load Unfinished. See report at https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=%s ' % progress_id]}
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result # E.g: {'ok': True, 'result': Finished, 'errors': [u'<a href="https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=d82602bb-adfb-4e2d-aabc-5fb77af2ae3d">Load OK Report</a>']}
def send_campaign_to_contact(self, campaign_id, contact_id, send_date=None):
"""
@param campaign_id
@param contact_id
@param send_date date/time in server time when the campaign should be sent.
@return dict e.g. {'ok': True} or {'ok': False,
'result': <return code if there is one>,
'errors':['sample error']}
http://www.dotmailer.co.uk/api/campaigns/send_campaign_to_contact.aspx
"""
# format the date in ISO format, e.g. "2012-03-28T19:51:00" for sending via SOAP call.
if not send_date:
send_date = datetime.utcnow()
dict_result = {'ok': True}
iso_send_date = self.dt_to_iso_date(send_date)
return_code = None
try:
return_code = self.client.service.SendCampaignToContact(username=self.api_username,
password=self.api_password,
campaignId=campaign_id,
contactid=contact_id,
sendDate=iso_send_date) # note inconsistent case
# in DM API
if return_code:
# return code, which means an error
dict_result = {'ok': False, 'result': return_code}
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result
def get_contact_by_email(self, email):
"""
@param email email address to search for.
@return dict e.g. {'ok': True,
contact_id: 32323232, # the dotMailer contact ID
email: # the email address of the returned record
d_fields: { field_name: field_value }, # dictionary with multiple fields, keyed by field name
# The result member is the raw return from dotMailer.
'result': (APIContact){
ID = 367568124
Email = "test@blackhole.triggeredmessaging.com"
AudienceType = "Unknown"
DataFields =
(ContactDataFields){
Keys =
(ArrayOfString){
string[] =
"FIRSTNAME",
"FULLNAME",
"GENDER",
"LASTNAME",
"POSTCODE",
}
Values =
(ArrayOfAnyType){
anyType[] =
None,
None,
None,
None,
}
}
OptInType = "Unknown"
EmailType = "Html"
Notes = None
}}
http://www.dotmailer.co.uk/api/contacts/get_contact_by_email.aspx
"""
dict_result = {'ok': True}
data_fields = None
try:
return_code = self.client.service.GetContactByEmail(username=self.api_username,
password=self.api_password,
email=email)
dict_result = {'ok': True, 'result': return_code}
if dict_result.get('ok'):
# create a dictionary with structure { field_name: field_value }
try:
data_fields = dict_result.get('result').DataFields
d_fields = self._clean_returned_data_fields(data_fields=data_fields)
dict_result.update({'d_fields': d_fields})
except:
logger.exception("Exception unpacking fields in GetContactByEmail for email=%s" % email)
# log additional info separately in case something bad has happened
# which'll cause this logging line to raise.
logger.error("Further info: data_fields=%s" % data_fields)
contact_id = return_code.ID
dict_result.update({'contact_id': contact_id})
returned_email_address = return_code.Email
dict_result.update({'email': returned_email_address})
except Exception as e:
dict_result = self.unpack_exception(e)
error_code = dict_result.get("error_code")
if error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_NOT_FOUND:
pass # ignore these expected errors
elif error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_APINOTPERMITTED:
pass
else:
logger.exception("Exception in GetContactByEmail")
return dict_result
def dt_to_iso_date(self, dt):
""" convert a python datetime to an iso date, e.g. "2012-03-28T19:51:00"
ready to send via SOAP
http://www.iso.org/iso/date_and_time_format
"""
try:
iso_dt = dt.strftime('%Y-%m-%dT%H:%M:%S')
except:
logger.exception('Exception converting dt to iso')
iso_dt = None
return iso_dt
def _clean_returned_data_fields(self, data_fields):
"""
Case 1886: If there's an empty first name/last name key, then dotMailer fails to return a value,
so the lengths don't match.
If this happens, scan through the keys and add an extra value of None just before the dodgy key(s)
len_data_fields_names = len(data_fields_keys)
len_data_fields_values = len(data_fields_values)
if len_data_fields_names > len_data_fields_values:
# Different number of keys and values, so do a copy but insert None when necessary
name_index = 0
value_index = 0
while name_index < len_data_fields_names:
field_name = data_fields_keys[name_index]
if name_index+1 < len_data_fields_names:
next_field_name = data_fields_keys[name_index+1]
else:
next_field_name = ""
if ((len_data_fields_names > len_data_fields_values)
and (next_field_name=="FIRSTNAME" or next_field_name=="LASTNAME" or next_field_name=="FULLNAME")):
d_fields.update({field_name: None }) # Insert new value Null
len_data_fields_values += 1 # Count one more value, but don't step on to next value
else:
d_fields.update({field_name: data_fields_values[value_index] }) # Copy the real value
value_index += 1 # Step on to next value
name_index += 1 # Next key
"""
d_fields = {}
data_fields_keys = data_fields.Keys[0]
data_fields_values = data_fields.Values[0]
# Case 1886: If there's an empty first name/last name key, then dotMailer fails to return a value,
# so the lengths don't match
# If this happens, scan through the keys and add an extra value of None just before the dodgy key(s)
len_data_fields_names = len(data_fields_keys)
len_data_fields_values = len(data_fields_values)
if len_data_fields_names > len_data_fields_values:
# Different number of keys and values, so do a copy but insert None when necessary
name_index = 0
value_index = 0
while name_index < len_data_fields_names:
field_name = data_fields_keys[name_index]
if name_index+1 < len_data_fields_names:
next_field_name = data_fields_keys[name_index+1]
else:
next_field_name = ""
if ((len_data_fields_names > len_data_fields_values)
and (next_field_name == "FIRSTNAME"
or next_field_name == "LASTNAME"
or next_field_name == "FULLNAME")):
d_fields.update({field_name: None}) # Insert new value Null
len_data_fields_values += 1 # Count one more value, but don't step on to next value
else:
d_fields.update({field_name: data_fields_values[value_index]}) # Copy the real value
value_index += 1 # Step on to next value
name_index += 1 # Next key
else:
# Same number of keys and values, so just do a straightforward copy
for idx, field_name in enumerate(data_fields_keys):
logger.debug(idx, field_name, data_fields_values[idx])
d_fields.update({field_name: data_fields_values[idx]})
return d_fields
def get_contact_by_id(self, contact_id):
"""
@param contact_id - id to search for
@return dict e.g. {'ok': True,
contact_id: 32323232, # the dotMailer contact ID
email: # the email address of the returned record
d_fields: { field_name: field_value }, # dictionary with multiple fields, keyed by field name
# The result member is the raw return from dotMailer.
'result': (APIContact){
ID = 367568124
Email = "test@blackhole.triggeredmessaging.com"
AudienceType = "Unknown"
DataFields =
(ContactDataFields){
Keys =
(ArrayOfString){
string[] =
"FIRSTNAME",
"FULLNAME",
"GENDER",
"LASTNAME",
"POSTCODE",
}
Values =
(ArrayOfAnyType){
anyType[] =
None,
None,
None,
None,
}
}
OptInType = "Unknown"
EmailType = "Html"
Notes = None
}}
http://www.dotmailer.co.uk/api/contacts/get_contact_by_id.aspx
"""
dict_result = {'ok': True}
data_fields = None
try:
return_code = self.client.service.GetContactById(username=self.api_username, password=self.api_password,
id=contact_id)
dict_result = {'ok': True, 'result': return_code}
if dict_result.get('ok'):
# create a dictionary with structure { field_name: field_value }
try:
d_fields = {}
data_fields = dict_result.get('result').DataFields
d_fields = self._clean_returned_data_fields(data_fields=data_fields)
dict_result.update({'d_fields': d_fields })
except:
logger.exception("Exception unpacking fields in GetContactById for id=%s" % contact_id)
# log additional info separately in case something bad has happened
# which'll cause this logging line to raise.
logger.error("Further info: data_fields=%s" % data_fields)
contact_id = return_code.ID
dict_result.update({'contact_id': contact_id})
returned_email_address = return_code.Email
dict_result.update({'email': returned_email_address})
except Exception as e:
dict_result = self.unpack_exception(e)
error_code = dict_result.get('error_code')
if error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_NOT_FOUND:
pass # Don't log these expected errors
elif error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_APINOTPERMITTED:
pass
return dict_result
"""
might implement a command line at some point.
def main():
try:
addressbookid = sys.argv[2] #should use argparse or similar.
contactsfilename = sys.argv[3]
except IndexError:
print "Usage: dotmailer addcontactstoaddressbook addressbookid contactsfilename\n"
sys.exit(1)
initial_data = open(contactsfilename, 'r').read()
"""
|
mit
| 7,280,185,620,400,163,000
| 54.204499
| 215
| 0.544619
| false
| 4.276097
| false
| false
| false
|
Runscope/pysaml2
|
tests/test_30_mdstore.py
|
1
|
7860
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import re
from saml2.httpbase import HTTPBase
from saml2.mdstore import MetadataStore, MetaDataMDX
from saml2.mdstore import destinations
from saml2.mdstore import name
from saml2 import md
from saml2 import sigver
from saml2 import BINDING_SOAP
from saml2 import BINDING_HTTP_REDIRECT
from saml2 import BINDING_HTTP_POST
from saml2 import BINDING_HTTP_ARTIFACT
from saml2 import saml
from saml2 import config
from saml2.attribute_converter import ac_factory
from saml2.attribute_converter import d_to_local_name
from saml2.extension import mdui
from saml2.extension import idpdisc
from saml2.extension import dri
from saml2.extension import mdattr
from saml2.extension import ui
from saml2.s_utils import UnknownPrincipal
import xmldsig
import xmlenc
from pathutils import full_path
sec_config = config.Config()
#sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
ONTS = {
saml.NAMESPACE: saml,
mdui.NAMESPACE: mdui,
mdattr.NAMESPACE: mdattr,
dri.NAMESPACE: dri,
ui.NAMESPACE: ui,
idpdisc.NAMESPACE: idpdisc,
md.NAMESPACE: md,
xmldsig.NAMESPACE: xmldsig,
xmlenc.NAMESPACE: xmlenc
}
ATTRCONV = ac_factory(full_path("attributemaps"))
METADATACONF = {
"1": {
"local": [full_path("swamid-1.0.xml")]
},
"2": {
"local": [full_path("InCommon-metadata.xml")]
},
"3": {
"local": [full_path("extended.xml")]
},
"7": {
"local": [full_path("metadata_sp_1.xml"),
full_path("InCommon-metadata.xml")],
"remote": [
{"url": "https://kalmar2.org/simplesaml/module.php/aggregator/?id=kalmarcentral2&set=saml2",
"cert": full_path("kalmar2.pem")}]
},
"4": {
"local": [full_path("metadata_example.xml")]
},
"5": {
"local": [full_path("metadata.aaitest.xml")]
},
"8": {
"mdfile": [full_path("swamid.md")]
}
}
def _eq(l1, l2):
return set(l1) == set(l2)
def _fix_valid_until(xmlstring):
new_date = datetime.datetime.now() + datetime.timedelta(days=1)
new_date = new_date.strftime("%Y-%m-%dT%H:%M:%SZ")
return re.sub(r' validUntil=".*?"', ' validUntil="%s"' % new_date,
xmlstring)
def test_swami_1():
UMU_IDP = 'https://idp.umu.se/saml2/idp/metadata.php'
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["1"])
assert len(mds) == 1 # One source
idps = mds.with_descriptor("idpsso")
assert idps.keys()
idpsso = mds.single_sign_on_service(UMU_IDP)
assert len(idpsso) == 1
assert destinations(idpsso) == [
'https://idp.umu.se/saml2/idp/SSOService.php']
_name = name(mds[UMU_IDP])
assert _name == u'Umeå University (SAML2)'
certs = mds.certs(UMU_IDP, "idpsso", "signing")
assert len(certs) == 1
sps = mds.with_descriptor("spsso")
assert len(sps) == 108
wants = mds.attribute_requirement('https://connect8.sunet.se/shibboleth')
lnamn = [d_to_local_name(mds.attrc, attr) for attr in wants["optional"]]
assert _eq(lnamn, ['eduPersonPrincipalName', 'mail', 'givenName', 'sn',
'eduPersonScopedAffiliation'])
wants = mds.attribute_requirement('https://beta.lobber.se/shibboleth')
assert wants["required"] == []
lnamn = [d_to_local_name(mds.attrc, attr) for attr in wants["optional"]]
assert _eq(lnamn, ['eduPersonPrincipalName', 'mail', 'givenName', 'sn',
'eduPersonScopedAffiliation', 'eduPersonEntitlement'])
def test_incommon_1():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["2"])
print mds.entities()
assert mds.entities() > 1700
idps = mds.with_descriptor("idpsso")
print idps.keys()
assert len(idps) > 300 # ~ 18%
try:
_ = mds.single_sign_on_service('urn:mace:incommon:uiuc.edu')
except UnknownPrincipal:
pass
idpsso = mds.single_sign_on_service('urn:mace:incommon:alaska.edu')
assert len(idpsso) == 1
print idpsso
assert destinations(idpsso) == [
'https://idp.alaska.edu/idp/profile/SAML2/Redirect/SSO']
sps = mds.with_descriptor("spsso")
acs_sp = []
for nam, desc in sps.items():
if "attribute_consuming_service" in desc:
acs_sp.append(nam)
assert len(acs_sp) == 0
# Look for attribute authorities
aas = mds.with_descriptor("attribute_authority")
print aas.keys()
assert len(aas) == 180
def test_ext_2():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["3"])
# No specific binding defined
ents = mds.with_descriptor("spsso")
for binding in [BINDING_SOAP, BINDING_HTTP_POST, BINDING_HTTP_ARTIFACT,
BINDING_HTTP_REDIRECT]:
assert mds.single_logout_service(ents.keys()[0], binding, "spsso")
def test_example():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["4"])
assert len(mds.keys()) == 1
idps = mds.with_descriptor("idpsso")
assert idps.keys() == [
'http://xenosmilus.umdc.umu.se/simplesaml/saml2/idp/metadata.php']
certs = mds.certs(
'http://xenosmilus.umdc.umu.se/simplesaml/saml2/idp/metadata.php',
"idpsso", "signing")
assert len(certs) == 1
def test_switch_1():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["5"])
assert len(mds.keys()) > 160
idps = mds.with_descriptor("idpsso")
print idps.keys()
idpsso = mds.single_sign_on_service(
'https://aai-demo-idp.switch.ch/idp/shibboleth')
assert len(idpsso) == 1
print idpsso
assert destinations(idpsso) == [
'https://aai-demo-idp.switch.ch/idp/profile/SAML2/Redirect/SSO']
assert len(idps) > 30
aas = mds.with_descriptor("attribute_authority")
print aas.keys()
aad = aas['https://aai-demo-idp.switch.ch/idp/shibboleth']
print aad.keys()
assert len(aad["attribute_authority_descriptor"]) == 1
assert len(aad["idpsso_descriptor"]) == 1
sps = mds.with_descriptor("spsso")
dual = [eid for eid, ent in idps.items() if eid in sps]
print len(dual)
assert len(dual) == 0
def test_metadata_file():
sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["8"])
print len(mds.keys())
assert len(mds.keys()) == 560
def test_mdx_service():
sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
http = HTTPBase(verify=False, ca_bundle=None)
mdx = MetaDataMDX(ONTS.values(), ATTRCONV, "http://pyff-test.nordu.net",
sec_config, None, http)
foo = mdx.service("https://idp.umu.se/saml2/idp/metadata.php",
"idpsso_descriptor", "single_sign_on_service")
assert len(foo) == 1
assert foo.keys()[0] == BINDING_HTTP_REDIRECT
def test_mdx_certs():
sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
http = HTTPBase(verify=False, ca_bundle=None)
mdx = MetaDataMDX(ONTS.values(), ATTRCONV, "http://pyff-test.nordu.net",
sec_config, None, http)
foo = mdx.certs("https://idp.umu.se/saml2/idp/metadata.php", "idpsso")
assert len(foo) == 1
if __name__ == "__main__":
test_mdx_certs()
|
bsd-2-clause
| 1,031,400,579,156,348,700
| 30.063241
| 104
| 0.630233
| false
| 3.077134
| true
| false
| false
|
python-xlib/python-xlib
|
examples/xdamage.py
|
1
|
4638
|
#!/usr/bin/python
#
# examples/xdamage.py -- demonstrate damage extension
#
# Copyright (C) 2019 Mohit Garg <mrmohitgarg1990@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
# Python 2/3 compatibility.
from __future__ import print_function
import sys
import os
# Change path so we find Xlib
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from Xlib import display, X, threaded,Xutil
import time
try:
import thread
except ModuleNotFoundError:
import _thread as thread
from Xlib.ext import damage
from PIL import Image, ImageTk
import traceback
def redraw(win, gc):
# win.clear_area()
win.fill_rectangle(gc, 0, 0, 60, 60)
def blink(display, win, gc, cols):
while 1:
time.sleep(2)
print('Changing color', cols[0])
gc.change(foreground = cols[0])
cols = (cols[1], cols[0])
redraw(win, gc)
display.flush()
def get_image_from_win(win, pt_w, pt_h, pt_x=0, pt_y=0):
try:
raw = win.get_image(pt_x, pt_y, pt_w, pt_h, X.ZPixmap, 0xffffffff)
image = Image.frombytes("RGB", (pt_w, pt_h), raw.data, "raw", "BGRX")
return image
except Exception:
traceback.print_exc()
def check_ext(disp):
# Check for extension
if not disp.has_extension('DAMAGE'):
sys.stderr.write('server does not have the DAMAGE extension\n')
sys.stderr.write("\n".join(disp.list_extensions()))
if disp.query_extension('DAMAGE') is None:
sys.exit(1)
else:
r = disp.damage_query_version()
print('DAMAGE version {}.{}'.format(r.major_version, r.minor_version))
def main():
d = display.Display()
root = d.screen().root
check_ext(d)
colormap = d.screen().default_colormap
red = colormap.alloc_named_color("red").pixel
blue = colormap.alloc_named_color("blue").pixel
background = colormap.alloc_named_color("white").pixel
window1 = root.create_window(100, 100, 250, 100, 1,
X.CopyFromParent, X.InputOutput,
X.CopyFromParent,
background_pixel = background,
event_mask = X.StructureNotifyMask | X.ExposureMask)
window1.set_wm_name('Changing Window')
window1.map()
gc = window1.create_gc(foreground = red)
thread.start_new_thread(blink, (d, window1, gc, (blue, red)))
window1.damage_create(damage.DamageReportRawRectangles)
window1.set_wm_normal_hints(
flags=(Xutil.PPosition | Xutil.PSize | Xutil.PMinSize),
min_width=50,
min_height=50
)
window2 = root.create_window(100, 250, 250, 100, 1,
X.CopyFromParent, X.InputOutput,
X.CopyFromParent,
background_pixel = background,
event_mask = X.StructureNotifyMask | X.ExposureMask)
window2.set_wm_normal_hints(
flags=(Xutil.PPosition | Xutil.PSize | Xutil.PMinSize),
min_width=50,
min_height=50
)
window2.set_wm_name('Tracking Window')
window2.map()
while 1:
event = d.next_event()
if event.type == X.Expose:
if event.count == 0:
redraw(window1, gc)
elif event.type == d.extension_event.DamageNotify:
image = get_image_from_win(window1, event.area.width, event.area.height, event.area.x, event.area.y)
bgpm = window2.create_pixmap(image.width, image.height, d.screen().root_depth)
bggc = window2.create_gc(foreground=0, background=0)
bgpm.put_pil_image(bggc, 0, 0, image)
window2.copy_area(bggc, bgpm, 0, 0, image.width, image.height, 0, 0)
# bggc.free()
elif event.type == X.DestroyNotify:
sys.exit(0)
if __name__ == "__main__":
main()
|
lgpl-2.1
| -16,081,992,391,398,216
| 32.366906
| 112
| 0.609314
| false
| 3.540458
| false
| false
| false
|
lindegroup/lookback
|
config/models.py
|
1
|
1764
|
# This Python file uses the following encoding: utf-8
# Part of the Lookback project (https://github.com/lindegroup/lookback)
# Copyright 2015 The Linde Group Computer Support, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
from django.db import models
from config.crypt import CryptoHelper
class Configuration(models.Model):
"""This model stores all configuration options for accessing the external
system."""
url = models.URLField('URL')
user = models.CharField('Username', max_length=255)
password = models.CharField('Password', max_length=512)
# System type choices
JSS = 'JSS'
SYSTEM_TYPE_CHOICES = (
(JSS, 'JSS'),
)
system_type = models.CharField('System Type',
choices=SYSTEM_TYPE_CHOICES,
default=JSS,
max_length=32)
def save(self, *args, **kwargs):
"""automatically encrypt during save"""
helper = CryptoHelper()
self.password = helper.encrypt(self.password)
super(Configuration, self).save(*args, **kwargs)
def decrypt_password(self):
helper = CryptoHelper()
return helper.decrypt(self.password)
|
apache-2.0
| 6,610,376,122,505,525,000
| 35.75
| 77
| 0.670635
| false
| 4.281553
| false
| false
| false
|
louiejtaylor/pyViKO
|
examples/batch.py
|
1
|
1173
|
if __name__ == '__main__':
#####
##temporary dev hack
import os,time
os.chdir('..')
#####
from pyviko import core, mutation, restriction
#ovr = core.readFasta('test/dem/over/2000.fasta')
#toKO = core.readFasta('test/dem/ko/2000.fasta')
'''#True batch script
t1 = time.time()
for i in range(len(toKO)):
m = mutation.Mutant(toKO[i][1],numMutations=1,regEx=True)
m.setOverGene(overSeq = ovr[i][1])
#print toKO[i][1]
print m.findMutants(ignoreRxSites = False, rSiteLength='all')[:5]
print
print m.findMutants()[:5]
print "done "+str(i)
print
print time.time()-t1
'''
#testing RC search
ovr=['ATGATTACCCGGGTTTCCCAAAGGGTTTCATCCTAA']
z=''' TTACCCGGGTTTCCCAAAGGGTTTCAT'''
toKO = ['ATGAAACCCTTTGGGAAACCCGGGTAA']
t1 = time.time()
for i in range(len(toKO))[:1]:
m = mutation.Mutant(toKO[i][1],numMutations=1,regEx=True)
m.setOverGene(overSeq = ovr[i][1])
#print toKO[i][1]
print m.findMutants(ignoreRxSites = False, rSiteLength='all')[:5]
print
print m.findMutants()[:5]
print "done "+str(i)
print
print time.time()-t1
#overlaps = [core.findOverlap(toKO[i][1],ovr[i][1]) for i in range(len(toKO))]
#print overlaps
|
mit
| 7,485,453,507,291,470,000
| 26.302326
| 79
| 0.658994
| false
| 2.423554
| false
| false
| false
|
tangledhelix/dp_pp_utils
|
make_project.py
|
1
|
11438
|
#!/usr/bin/env python3
import json
import requests
import os
import sys
import re
import shutil
from os.path import basename
from jinja2 import Template
from subprocess import call
from trello import TrelloClient
from zipfile import ZipFile
AUTH_CONFIG = "auth-config.json"
TRELLO_TEMPLATE = "TEMPLATE: PPgen workflow"
PGDP_URL = "https://www.pgdp.net"
GITHUB_REMOTE = "origin"
GITHUB_BRANCH = "main"
# Set true to assume we'll use ppgen; false otherwise
PPGEN = True
class MakeProject():
def __init__(self):
self.dp_base = f"{os.environ['HOME']}/dp"
self.projects_base = f"{self.dp_base}/pp"
self.template_dir = f"{self.dp_base}/util/templates"
self.params = {}
self.trello_template = TRELLO_TEMPLATE
with open(f"{self.dp_base}/util/{AUTH_CONFIG}") as file:
self.auth = json.loads(file.read())
def get_param(self, param_name, prompt_text):
param_answer = input(f"{prompt_text}: ")
if param_name == "project_id":
param_answer = param_answer.replace("projectID", "")
self.params[param_name] = param_answer
def get_params(self):
self.get_param("project_name", 'Project name, e.g. "missfairfax"')
self.get_param("project_id", 'Project ID, e.g. "projectID5351bd1e5eca9"')
self.project_dir = f"{self.projects_base}/{self.params['project_name']}"
#self.params["kindlegen_dir"] = self.dp_base + "/kindlegen"
def pgdp_login(self):
payload = {
"destination": "/c/",
"userNM": self.auth["pgdp"]["username"],
"userPW": self.auth["pgdp"]["password"],
}
r = requests.post(f"{PGDP_URL}/c/accounts/login.php", data=payload)
if r.status_code != 200:
print("Error: unable to log into DP site")
sys.exit(1)
self.dp_cookie = r.headers["Set-Cookie"].split(";")[0]
def scrape_project_info(self):
r = requests.post(
f"{PGDP_URL}/c/project.php?id=projectID{self.params['project_id']}",
headers={"Cookie": self.dp_cookie}
)
if r.status_code != 200:
print("Error: unable to retrieve DP project info")
sys.exit(1)
html_doc = re.sub(r"\n", "", r.text)
self.params["title"] = re.sub(
#
# This version broke on cavalry. Changing " to <, see if it works
# r'.*<td[^>]+><b>Title</b></td><td[^>]+>([^"]+)</td>.*',
#
# This version broke on irishjournal, the site updated
# to use th instead of tr... updating to match site.
#r'.*<td[^>]+><b>Title</b></td><td[^>]+>([^<]+)</td>.*',
#
r'.*<th\s+class=["\']label["\']>Title</th>\s*<td[^>]+>([^<]+)</td>.*',
r"\1",
html_doc
)
self.params["author"] = re.sub(
# This version broke on irishjournal, the site updated
# to use th instead of tr... updating to match site.
#r'.*<td[^>]+><b>Author</b></td><td[^>]+>([^<]+)</td>.*',
#
r'.*<th\s+class=["\']label["\']>Author</th>\s*<td[^>]+>([^<]+)</td>.*',
r"\1",
html_doc
)
#<tr><th class='label'>Forum</th><td colspan='4'><a href='https://www.pgdp.net/phpBB3/viewtopic.php?t=63502'>Discuss this project</a> (19 replies)</td></tr>
self.params["forum_link"] = re.sub(
# This version broke on irishjournal, the site updated
# to use th instead of tr... updating to match site.
#r".*<td[^>]+><b>Forum</b></td><td[^>]+><a href='([^']+)'>.*",
#<a href='([^']+)'>
#
r".*<th\s+class=.label.>Forum</th>\s*<td[^>]+>\s*<a href='([^']+)'.*",
r"\1",
html_doc
)
def create_directories(self):
os.mkdir(self.project_dir, mode=0o755)
os.chdir(self.project_dir)
os.mkdir("images", mode=0o755)
os.mkdir("illustrations", mode=0o755)
os.mkdir("pngs", mode=0o755)
def create_git_repository(self):
call(["git", "init"])
call(["git", "add", "."])
call(["git", "commit", "-m", "Initial import from DP"])
call(["git", "remote", "add", GITHUB_REMOTE, self.git_remote_url])
call(["git", "push", "-u", GITHUB_REMOTE, GITHUB_BRANCH])
def process_template(self, src_filename, dst_filename=None):
if not dst_filename:
dst_filename = src_filename
with open(f"{self.template_dir}/{src_filename}") as file:
template = Template(file.read())
with open(f"{self.project_dir}/{dst_filename}", "w") as file:
file.write(template.render(self.params))
def copy_text_file(self):
project_id = self.params["project_id"]
project_name = self.params["project_name"]
project_dir = self.project_dir
input_file = f"{project_dir}/projectID{project_id}.txt"
if PPGEN:
output_file = f"{project_dir}/{project_name}-src.txt"
else:
output_file = f"{project_dir}/{project_name}-utf8.txt"
shutil.copyfile(input_file, output_file)
def make_github_repo(self):
headers = {
"Accept": "application/vnd.github.v3+json",
"Content-Type": "application/json",
}
payload = {
"name": f"DP_{self.params['project_name']}",
"description": 'DP PP project "{self.params["title"]}" ID {self.params["project_id"]}',
"private": False,
"has_issues": False,
"has_wiki": False,
"has_downloads": False,
"auto_init": False,
}
auth_data = (
self.auth["github"]["username"],
self.auth["github"]["password"],
)
r = requests.post("https://api.github.com/user/repos",
auth=auth_data, headers=headers,
data=json.dumps(payload))
if r.status_code == 201:
print("Created GitHub repository")
json_response = json.loads(r.text)
self.git_remote_url = json_response["clone_url"].replace(
"github.com",
self.auth["github"]["username"] + "@github.com"
)
else:
print(f"ERROR: GitHub response code {r.status_code} unexpected.")
def make_gitlab_repo(self):
headers = {
"Content-Type": "application/json",
"PRIVATE-TOKEN": self.auth["gitlab"],
}
payload = {
"name": f"DP_{self.params['project_name']}",
"description": f'DP PP project "{self.params["title"]}" ID {self.params["project_id"]}',
"visibility": "private",
"issues_enabled": False,
"merge_requests_enabled": False,
"jobs_enabled": False,
"wiki_enabled": False,
"snippets_enabled": False,
"container_registry_enabled": False,
"shared_runners_enabled": False,
"lfs_enabled": False,
"request_access_enabled": False,
}
r = requests.post("https://gitlab.com/api/v4/projects",
headers=headers,
data=json.dumps(payload))
if r.status_code == 201:
print("Created Gitlab repository")
json_response = json.loads(r.text)
self.git_remote_url = json_response["ssh_url_to_repo"]
else:
print(f"ERROR: Gitlab response code {r.status_code} unexpected.")
print(r.text)
def make_online_repo(self):
if self.auth["git_site"] == "github":
project.make_github_repo()
elif self.auth["git_site"] == "gitlab":
project.make_gitlab_repo()
def make_trello_board(self):
client = TrelloClient(
api_key=self.auth["trello"]["api_key"],
api_secret=self.auth["trello"]["api_secret"],
token=self.auth["trello"]["token"],
token_secret=self.auth["trello"]["token_secret"],
)
template = None
for board in client.list_boards():
if board.name == self.trello_template:
template = board
break
new_board = client.add_board(
f"DP: {self.params['title']}",
source_board=template,
permission_level="public"
)
for _list in new_board.list_lists():
if _list.name == "Notes":
for _card in _list.list_cards():
if _card.name == "Project info":
info_card = _card
break
break
new_description = info_card.desc.replace(
"{{PROJECT_NAME}}", self.params["project_name"]
).replace(
"{{PROJECT_ID}}", self.params["project_id"]
)
info_card.set_description(new_description)
self.params["trello_url"] = new_board.url
print(f"Created Trello board - {new_board.url}")
def download_text(self):
print("Downloading text from DP ...", end="", flush=True)
zipfile = f"projectID{self.params['project_id']}.zip"
url = f"{PGDP_URL}/projects/projectID{self.params['project_id']}/projectID{self.params['project_id']}.zip"
r = requests.get(url, headers={"Cookie": self.dp_cookie})
with open(zipfile, "wb") as file:
file.write(r.content)
self.unzip_file(zipfile, self.project_dir)
print(" done.")
def download_images(self):
print("Downloading images from DP ...", end="", flush=True)
zipfile = f"projectID{self.params['project_id']}images.zip"
url = f"{PGDP_URL}/c/tools/download_images.php?projectid=projectID{self.params['project_id']}"
r = requests.get(url, headers={"Cookie": self.dp_cookie})
with open(zipfile, "wb") as file:
file.write(r.content)
self.unzip_file(zipfile, f"{self.project_dir}/pngs")
print(" done.")
def unzip_file(self, filename, path):
with ZipFile(filename, "r") as zip_ref:
zip_ref.extractall(path)
os.remove(filename)
if __name__ == "__main__":
# By default, create remote resources like Trello & GitHub.
CREATE_REMOTE = True
# Process arguments, if any
if len(sys.argv) >= 2:
if sys.argv[1] == "-h" or sys.argv[1] == "--help":
print(f"Usage: {sys.argv[0]} [<option(s)>]")
print(" -h, --help: print this help")
print(" -l, --local: only create local resources (for debug)")
sys.exit(1)
elif sys.argv[1] == "-l" or sys.argv[1] == "--local":
CREATE_REMOTE = False
project = MakeProject()
project.get_params()
project.pgdp_login()
project.scrape_project_info()
project.create_directories()
project.download_text()
project.download_images()
# Make a copy of the text to work on
project.copy_text_file()
if CREATE_REMOTE:
project.make_online_repo()
project.make_trello_board()
project.process_template("Makefile")
project.process_template("README.md")
project.process_template("pp-gitignore", ".gitignore")
if CREATE_REMOTE:
# This is only done if remote, because it will try to push.
project.create_git_repository()
|
mit
| -6,135,142,467,199,165,000
| 34.302469
| 164
| 0.539255
| false
| 3.633418
| false
| false
| false
|
sahlinet/fastapp
|
fastapp/api_serializers.py
|
1
|
3087
|
from rest_framework import serializers
from rest_framework.reverse import reverse
from fastapp.models import Base, Apy, Setting, Counter, TransportEndpoint, Transaction, LogEntry
import logging
logger = logging.getLogger(__name__)
class CounterSerializer(serializers.ModelSerializer):
class Meta:
model = Counter
fields = ('executed', 'failed')
class LogSerializer(serializers.ModelSerializer):
class Meta:
model = LogEntry
fields = ('level', 'msg', 'created', )
class TransactionSerializer(serializers.ModelSerializer):
logs = LogSerializer(many=True, read_only=True)
class Meta:
model = Transaction
fields = ('rid', 'tin', 'tout', 'status', 'created', 'modified', 'async', 'logs', )
class ApySerializer(serializers.ModelSerializer):
counter = CounterSerializer(many=False, read_only=True)
class Meta:
model = Apy
fields = ('id', 'name', 'module', 'counter', 'description', 'public', 'schedule', 'everyone')
def save_object(self, obj, **kwargs):
logger.debug("save_and_sync")
obj.save_and_sync(**kwargs)
class PublicApySerializer(serializers.ModelSerializer):
"""
Return all Apy objects which are made public. Enrich
"""
first_lastname = serializers.SerializerMethodField(method_name="creator")
base = serializers.SerializerMethodField(method_name="base_name")
url = serializers.SerializerMethodField(method_name="detail_view")
class Meta:
model = Apy
fields = ('id', 'name', 'module', 'description',
'first_lastname', 'url', 'base')
def creator(self, obj):
try:
user = obj.base.user
return user.first_name + " " + user.last_name
except Base.DoesNotExist, e:
logger.warn(e)
def base_name(self, obj):
return obj.base.name
def detail_view(self, obj):
return reverse('public-apy-detail', args=[obj.pk],
request=self.context['request'])
class SettingSerializer(serializers.ModelSerializer):
class Meta:
model = Setting
fields = ('id', 'key', 'value', 'public')
class TransportEndpointSerializer(serializers.ModelSerializer):
class Meta:
model = TransportEndpoint
fields = ('id', 'url', 'override_settings_priv',
'override_settings_pub', 'token')
class BaseSerializer(serializers.ModelSerializer):
apy = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
state = serializers.Field()
executors = serializers.Field()
foreign_apys = serializers.HyperlinkedRelatedField(
many=True,
read_only=False,
view_name='public-apy-detail'
)
class Meta:
model = Base
fields = ('id', 'name', 'state', 'uuid',
'executors', 'content', 'foreign_apys', 'public', 'static_public',)
def save_object(self, obj, **kwargs):
super(BaseSerializer, self).save_object(obj, **kwargs)
logger.debug("save_and_sync")
obj.save_and_sync(**kwargs)
|
mit
| -5,149,677,560,269,219,000
| 29.564356
| 101
| 0.63816
| false
| 4.056505
| false
| false
| false
|
kern3020/opportunity
|
opportunity/tracker/migrations/0002_auto__add_mentorship.py
|
1
|
13807
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Mentorship'
db.create_table('tracker_mentorship', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('jobseeker', self.gf('django.db.models.fields.related.ForeignKey')(related_name='jobseeker', to=orm['tracker.UserProfile'])),
('mentor', self.gf('django.db.models.fields.related.ForeignKey')(related_name='mentor', to=orm['tracker.UserProfile'])),
('coach', self.gf('django.db.models.fields.related.ForeignKey')(related_name='coach', to=orm['tracker.UserProfile'])),
('startDate', self.gf('django.db.models.fields.DateField')()),
))
db.send_create_signal('tracker', ['Mentorship'])
def backwards(self, orm):
# Deleting model 'Mentorship'
db.delete_table('tracker_mentorship')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tracker.apply': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Apply'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Position']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.company': {
'Meta': {'ordering': "['name']", 'object_name': 'Company'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'division': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'state_province': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'zipCode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'})
},
'tracker.conversation': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Conversation'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'via': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.gratitude': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Gratitude'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.interview': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Interview'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Position']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'when': ('django.db.models.fields.DateField', [], {}),
'withWhom': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']"})
},
'tracker.lunch': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Lunch'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'venue': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DateField', [], {}),
'withWhom': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']"})
},
'tracker.mentorship': {
'Meta': {'object_name': 'Mentorship'},
'coach': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'coach'", 'to': "orm['tracker.UserProfile']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jobseeker': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'jobseeker'", 'to': "orm['tracker.UserProfile']"}),
'mentor': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'mentor'", 'to': "orm['tracker.UserProfile']"}),
'startDate': ('django.db.models.fields.DateField', [], {})
},
'tracker.networking': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Networking'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'venue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Company']", 'unique': 'True'}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.onlinepresence': {
'Meta': {'ordering': "['name']", 'object_name': 'OnlinePresence'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.par': {
'Meta': {'ordering': "['question']", 'object_name': 'PAR'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'par_response': ('django.db.models.fields.TextField', [], {}),
'question': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.person': {
'Meta': {'object_name': 'Person'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Company']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.pitch': {
'Meta': {'object_name': 'Pitch'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'thePitch': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.position': {
'Meta': {'ordering': "['title']", 'object_name': 'Position'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Company']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'tracker.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['tracker']
|
mit
| -6,980,998,740,839,843,000
| 71.673684
| 182
| 0.539581
| false
| 3.700616
| false
| false
| false
|
adhish20/TwitterWithCassandra
|
users/forms.py
|
1
|
1902
|
import uuid
from django import forms
import cass
class LoginForm(forms.Form):
username = forms.CharField(max_length=30)
password = forms.CharField(widget=forms.PasswordInput(render_value=False))
def clean(self):
username = self.cleaned_data['username']
password = self.cleaned_data['password']
try:
user = cass.get_user_by_username(username)
except cass.DatabaseError:
raise forms.ValidationError(u'Invalid username and/or password')
if user.get('password') != password:
raise forms.ValidationError(u'Invalid username and/or password')
return self.cleaned_data
def get_username(self):
return self.cleaned_data['username']
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^\w+$', max_length=30)
password1 = forms.CharField(widget=forms.PasswordInput(render_value=False))
password2 = forms.CharField(widget=forms.PasswordInput(render_value=False))
def clean_username(self):
username = self.cleaned_data['username']
try:
cass.get_user_by_username(username)
raise forms.ValidationError(u'Username is already taken')
except cass.DatabaseError:
pass
return username
def clean(self):
if ('password1' in self.cleaned_data and 'password2' in
self.cleaned_data):
password1 = self.cleaned_data['password1']
password2 = self.cleaned_data['password2']
if password1 != password2:
raise forms.ValidationError(
u'You must type the same password each time')
return self.cleaned_data
def save(self):
username = self.cleaned_data['username']
password = self.cleaned_data['password1']
cass.save_user(username, password)
return username
|
mit
| 3,853,147,340,635,723,000
| 34.222222
| 79
| 0.638275
| false
| 4.303167
| false
| false
| false
|
tobiasgehring/qudi
|
logic/sequence_generator_logic.py
|
1
|
52322
|
# -*- coding: utf-8 -*-
"""
This file contains the Qudi sequence generator logic for general sequence structure.
Qudi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Qudi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Qudi. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the
top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/>
"""
import numpy as np
import pickle
import os
import time
from qtpy import QtCore
from collections import OrderedDict
import inspect
import importlib
import sys
from logic.pulse_objects import PulseBlockElement
from logic.pulse_objects import PulseBlock
from logic.pulse_objects import PulseBlockEnsemble
from logic.pulse_objects import PulseSequence
from logic.generic_logic import GenericLogic
from logic.sampling_functions import SamplingFunctions
from logic.samples_write_methods import SamplesWriteMethods
class SequenceGeneratorLogic(GenericLogic, SamplingFunctions, SamplesWriteMethods):
"""unstable: Nikolas Tomek
This is the Logic class for the pulse (sequence) generation.
The basis communication with the GUI should be done as follows:
The logic holds all the created objects in its internal lists. The GUI is
able to view this list and get the element of this list.
How the logic will contruct its objects according to configuration dicts.
The configuration dicts contain essentially, which parameters of either the
PulseBlockElement objects or the PulseBlock objects can be changed and
set via the GUI.
In the end the information transfer happend through lists (read by the GUI)
and dicts (set by the GUI). The logic sets(creats) the objects in the list
and read the dict, which tell it which parameters to expect from the GUI.
"""
_modclass = 'sequencegeneratorlogic'
_modtype = 'logic'
# define signals
sigBlockDictUpdated = QtCore.Signal(dict)
sigEnsembleDictUpdated = QtCore.Signal(dict)
sigSequenceDictUpdated = QtCore.Signal(dict)
sigSampleEnsembleComplete = QtCore.Signal(str, np.ndarray, np.ndarray)
sigSampleSequenceComplete = QtCore.Signal(str, list)
sigCurrentBlockUpdated = QtCore.Signal(object)
sigCurrentEnsembleUpdated = QtCore.Signal(object)
sigCurrentSequenceUpdated = QtCore.Signal(object)
sigSettingsUpdated = QtCore.Signal(list, str, float, dict, str)
sigPredefinedSequencesUpdated = QtCore.Signal(dict)
sigPredefinedSequenceGenerated = QtCore.Signal(str)
def __init__(self, config, **kwargs):
super().__init__(config=config, **kwargs)
self.log.info('The following configuration was found.')
# checking for the right configuration
for key in config.keys():
self.log.info('{0}: {1}'.format(key, config[key]))
# Get all the attributes from the SamplingFunctions module:
SamplingFunctions.__init__(self)
# Get all the attributes from the SamplesWriteMethods module:
SamplesWriteMethods.__init__(self)
# here the currently shown data objects of the editors should be stored
self.current_block = None
self.current_ensemble = None
self.current_sequence = None
# The created PulseBlock objects are saved in this dictionary. The keys are the names.
self.saved_pulse_blocks = OrderedDict()
# The created PulseBlockEnsemble objects are saved in this dictionary.
# The keys are the names.
self.saved_pulse_block_ensembles = OrderedDict()
# The created Sequence objects are saved in this dictionary. The keys are the names.
self.saved_pulse_sequences = OrderedDict()
if 'pulsed_file_dir' in config.keys():
self.pulsed_file_dir = config['pulsed_file_dir']
if not os.path.exists(self.pulsed_file_dir):
homedir = self.get_home_dir()
self.pulsed_file_dir = os.path.join(homedir, 'pulsed_files')
self.log.warning('The directort defined in "pulsed_file_dir" in the config for '
'SequenceGeneratorLogic class does not exist! The default home '
'directory\n{0}'
'\nwill be taken instead.'.format(self.pulsed_file_dir))
else:
homedir = self.get_home_dir()
self.pulsed_file_dir = os.path.join(homedir, 'pulsed_files')
self.log.warning('No directory with the attribute "pulsed_file_dir" is defined for the '
'SequenceGeneratorLogic! The default home directory\n{0}\nwill be '
'taken instead.'.format(self.pulsed_file_dir))
# Byte size of the max. memory usage during sampling/write-to-file process
if 'overhead_bytes' in config.keys():
self.sampling_overhead_bytes = config['overhead_bytes']
else:
self.sampling_overhead_bytes = None
self.log.warning('No max. memory overhead specified in config.\nIn order to avoid '
'memory overflow during sampling/writing of Pulse objects you must '
'set "overhead_bytes".')
# directory for additional generate methods to import
# (other than qudi/logic/predefined_methods)
if 'additional_methods_dir' in config.keys():
if os.path.exists(config['additional_methods_dir']):
self.additional_methods_dir = config['additional_methods_dir']
else:
self.additional_methods_dir = None
self.log.error('Specified path "{0}" for import of additional generate methods '
'does not exist.'.format(config['additional_methods_dir']))
else:
self.additional_methods_dir = None
self.block_dir = self._get_dir_for_name('pulse_block_objects')
self.ensemble_dir = self._get_dir_for_name('pulse_ensemble_objects')
self.sequence_dir = self._get_dir_for_name('sequence_objects')
self.waveform_dir = self._get_dir_for_name('sampled_hardware_files')
self.temp_dir = self._get_dir_for_name('temporary_files')
# Information on used channel configuration for sequence generation
# IMPORTANT: THIS CONFIG DOES NOT REPRESENT THE ACTUAL SETTINGS ON THE HARDWARE
self.analog_channels = 2
self.digital_channels = 4
self.activation_config = ['a_ch1', 'd_ch1', 'd_ch2', 'a_ch2', 'd_ch3', 'd_ch4']
self.laser_channel = 'd_ch1'
self.amplitude_dict = OrderedDict({'a_ch1': 0.5, 'a_ch2': 0.5, 'a_ch3': 0.5, 'a_ch4': 0.5})
self.sample_rate = 25e9
# The file format for the sampled hardware-compatible waveforms and sequences
self.waveform_format = 'wfmx' # can be 'wfmx', 'wfm' or 'fpga'
self.sequence_format = 'seq' # only .seq file format
# a dictionary with all predefined generator methods and measurement sequence names
self.generate_methods = None
def on_activate(self):
""" Initialisation performed during activation of the module.
"""
self._get_blocks_from_file()
self._get_ensembles_from_file()
self._get_sequences_from_file()
self._attach_predefined_methods()
if 'activation_config' in self._statusVariables:
self.activation_config = self._statusVariables['activation_config']
if 'laser_channel' in self._statusVariables:
self.laser_channel = self._statusVariables['laser_channel']
if 'amplitude_dict' in self._statusVariables:
self.amplitude_dict = self._statusVariables['amplitude_dict']
if 'sample_rate' in self._statusVariables:
self.sample_rate = self._statusVariables['sample_rate']
if 'waveform_format' in self._statusVariables:
self.waveform_format = self._statusVariables['waveform_format']
self.analog_channels = len([chnl for chnl in self.activation_config if 'a_ch' in chnl])
self.digital_channels = len([chnl for chnl in self.activation_config if 'd_ch' in chnl])
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
def on_deactivate(self):
""" Deinitialisation performed during deactivation of the module.
"""
self._statusVariables['activation_config'] = self.activation_config
self._statusVariables['laser_channel'] = self.laser_channel
self._statusVariables['amplitude_dict'] = self.amplitude_dict
self._statusVariables['sample_rate'] = self.sample_rate
self._statusVariables['waveform_format'] = self.waveform_format
def _attach_predefined_methods(self):
"""
Retrieve in the folder all files for predefined methods and attach their methods to the
@return:
"""
self.generate_methods = OrderedDict()
filenames_list = []
additional_filenames_list = []
# The assumption is that in the directory predefined_methods, there are
# *.py files, which contain only methods!
path = os.path.join(self.get_main_dir(), 'logic', 'predefined_methods')
for entry in os.listdir(path):
filepath = os.path.join(path, entry)
if os.path.isfile(filepath) and entry.endswith('.py'):
filenames_list.append(entry[:-3])
# Also attach methods from the non-default additional methods directory if defined in config
if self.additional_methods_dir is not None:
# attach to path
sys.path.append(self.additional_methods_dir)
for entry in os.listdir(self.additional_methods_dir):
filepath = os.path.join(self.additional_methods_dir, entry)
if os.path.isfile(filepath) and entry.endswith('.py'):
additional_filenames_list.append(entry[:-3])
for filename in filenames_list:
mod = importlib.import_module('logic.predefined_methods.{0}'.format(filename))
# To allow changes in predefined methods during runtime by simply reloading
# sequence_generator_logic.
importlib.reload(mod)
for method in dir(mod):
try:
# Check for callable function or method:
ref = getattr(mod, method)
if callable(ref) and (inspect.ismethod(ref) or inspect.isfunction(ref)):
# Bind the method as an attribute to the Class
setattr(SequenceGeneratorLogic, method, getattr(mod, method))
# Add method to dictionary if it is a generator method
if method.startswith('generate_'):
self.generate_methods[method[9:]] = eval('self.'+method)
except:
self.log.error('It was not possible to import element {0} from {1} into '
'SequenceGenerationLogic.'.format(method, filename))
for filename in additional_filenames_list:
mod = importlib.import_module(filename)
for method in dir(mod):
try:
# Check for callable function or method:
ref = getattr(mod, method)
if callable(ref) and (inspect.ismethod(ref) or inspect.isfunction(ref)):
# Bind the method as an attribute to the Class
setattr(SequenceGeneratorLogic, method, getattr(mod, method))
# Add method to dictionary if it is a generator method
if method.startswith('generate_'):
self.generate_methods[method[9:]] = eval('self.'+method)
except:
self.log.error('It was not possible to import element {0} from {1} into '
'SequenceGenerationLogic.'.format(method, filepath))
self.sigPredefinedSequencesUpdated.emit(self.generate_methods)
return
def _get_dir_for_name(self, name):
""" Get the path to the pulsed sub-directory 'name'.
@param str name: name of the folder
@return: str, absolute path to the directory with folder 'name'.
"""
path = os.path.join(self.pulsed_file_dir, name)
if not os.path.exists(path):
os.makedirs(os.path.abspath(path))
return os.path.abspath(path)
def request_init_values(self):
"""
@return:
"""
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
self.sigCurrentBlockUpdated.emit(self.current_block)
self.sigCurrentEnsembleUpdated.emit(self.current_ensemble)
self.sigCurrentSequenceUpdated.emit(self.current_sequence)
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
self.sigPredefinedSequencesUpdated.emit(self.generate_methods)
return
def set_settings(self, activation_config, laser_channel, sample_rate, amplitude_dict, waveform_format):
"""
Sets all settings for the generator logic.
@param activation_config:
@param laser_channel:
@param sample_rate:
@param amplitude_dict:
@param waveform_format:
@return:
"""
# check if the currently chosen laser channel is part of the config and adjust if this
# is not the case. Choose first digital channel in that case.
if laser_channel not in activation_config:
laser_channel = None
for channel in activation_config:
if 'd_ch' in channel:
laser_channel = channel
break
if laser_channel is None:
self.log.warning('No digital channel present in sequence generator activation '
'config.')
self.laser_channel = laser_channel
self.activation_config = activation_config
self.analog_channels = len([chnl for chnl in activation_config if 'a_ch' in chnl])
self.digital_channels = len([chnl for chnl in activation_config if 'd_ch' in chnl])
self.amplitude_dict = amplitude_dict
self.sample_rate = sample_rate
self.waveform_format = waveform_format
self.sigSettingsUpdated.emit(activation_config, laser_channel, sample_rate, amplitude_dict,
waveform_format)
return self.activation_config, self.laser_channel, self.sample_rate, self.amplitude_dict, \
waveform_format
# -----------------------------------------------------------------------------
# BEGIN sequence/block generation
# -----------------------------------------------------------------------------
def get_saved_asset(self, name):
"""
Returns the data object for a saved Ensemble/Sequence with name "name". Searches in the
saved assets for a Sequence object first. If no Sequence by that name could be found search
for Ensembles instead. If neither could be found return None.
@param name: Name of the Sequence/Ensemble
@return: PulseSequence | PulseBlockEnsemble | None
"""
if name == '':
asset_obj = None
elif name in list(self.saved_pulse_sequences):
asset_obj = self.saved_pulse_sequences[name]
elif name in list(self.saved_pulse_block_ensembles):
asset_obj = self.saved_pulse_block_ensembles[name]
else:
asset_obj = None
self.log.warning('No PulseSequence or PulseBlockEnsemble by the name "{0}" could be '
'found in saved assets. Returning None.'.format(name))
return asset_obj
def save_block(self, name, block):
""" Serialize a PulseBlock object to a *.blk file.
@param name: string, name of the block to save
@param block: PulseBlock object which will be serialized
"""
# TODO: Overwrite handling
block.name = name
self.current_block = block
self.saved_pulse_blocks[name] = block
self._save_blocks_to_file()
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
self.sigCurrentBlockUpdated.emit(self.current_block)
return
def load_block(self, name):
"""
@param name:
@return:
"""
if name not in self.saved_pulse_blocks:
self.log.error('PulseBlock "{0}" could not be found in saved pulse blocks. Load failed.'
''.format(name))
return
block = self.saved_pulse_blocks[name]
self.current_block = block
self.sigCurrentBlockUpdated.emit(self.current_block)
return
def delete_block(self, name):
""" Remove the serialized object "name" from the block list and HDD.
@param name: string, name of the PulseBlock object to be removed.
"""
if name in list(self.saved_pulse_blocks):
del(self.saved_pulse_blocks[name])
if hasattr(self.current_block, 'name'):
if self.current_block.name == name:
self.current_block = None
self.sigCurrentBlockUpdated.emit(self.current_block)
self._save_blocks_to_file()
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
else:
self.log.warning('PulseBlock object with name "{0}" not found in saved '
'blocks.\nTherefore nothing is removed.'.format(name))
return
def _get_blocks_from_file(self):
""" Update the saved_pulse_block dict from file """
block_files = [f for f in os.listdir(self.block_dir) if 'block_dict.blk' in f]
if len(block_files) == 0:
self.log.info('No serialized block dict was found in {0}.'.format(self.block_dir))
self.saved_pulse_blocks = OrderedDict()
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
return
# raise error if more than one file is present
if len(block_files) > 1:
self.log.error('More than one serialized block dict was found in {0}.\n'
'Using {1}.'.format(self.block_dir, block_files[-1]))
block_files = block_files[-1]
try:
with open(os.path.join(self.block_dir, block_files), 'rb') as infile:
self.saved_pulse_blocks = pickle.load(infile)
except:
self.saved_pulse_blocks = OrderedDict()
self.log.error('Failed to deserialize ensemble dict "{0}" from "{1}".'
''.format(block_files, self.block_dir))
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
return
def _save_blocks_to_file(self):
""" Saves the saved_pulse_block dict to file """
try:
with open(os.path.join(self.block_dir, 'block_dict.blk.tmp'), 'wb') as outfile:
pickle.dump(self.saved_pulse_blocks, outfile)
except:
self.log.error('Failed to serialize ensemble dict in "{0}".'
''.format(os.path.join(self.block_dir, 'block_dict.blk.tmp')))
return
# remove old file and rename temp file
try:
os.rename(os.path.join(self.block_dir, 'block_dict.blk.tmp'),
os.path.join(self.block_dir, 'block_dict.blk'))
except WindowsError:
os.remove(os.path.join(self.block_dir, 'block_dict.blk'))
os.rename(os.path.join(self.block_dir, 'block_dict.blk.tmp'),
os.path.join(self.block_dir, 'block_dict.blk'))
return
def save_ensemble(self, name, ensemble):
""" Saves a PulseBlockEnsemble with name name to file.
@param str name: name of the ensemble, which will be serialized.
@param obj ensemble: a PulseBlockEnsemble object
"""
# TODO: Overwrite handling
ensemble.name = name
self.current_ensemble = ensemble
self.saved_pulse_block_ensembles[name] = ensemble
self._save_ensembles_to_file()
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
self.sigCurrentEnsembleUpdated.emit(self.current_ensemble)
return
def load_ensemble(self, name):
"""
@param name:
@return:
"""
if name not in self.saved_pulse_block_ensembles:
self.log.error('PulseBlockEnsemble "{0}" could not be found in saved pulse block '
'ensembles. Load failed.'.format(name))
return
ensemble = self.saved_pulse_block_ensembles[name]
# set generator settings if found in ensemble metadata
if ensemble.sample_rate is not None:
self.sample_rate = ensemble.sample_rate
if ensemble.amplitude_dict is not None:
self.amplitude_dict = ensemble.amplitude_dict
if ensemble.activation_config is not None:
self.activation_config = ensemble.activation_config
if ensemble.laser_channel is not None:
self.laser_channel = ensemble.laser_channel
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
self.current_ensemble = ensemble
self.sigCurrentEnsembleUpdated.emit(ensemble)
return
def delete_ensemble(self, name):
""" Remove the ensemble with 'name' from the ensemble list and HDD. """
if name in list(self.saved_pulse_block_ensembles):
del(self.saved_pulse_block_ensembles[name])
if hasattr(self.current_ensemble, 'name'):
if self.current_ensemble.name == name:
self.current_ensemble = None
self.sigCurrentEnsembleUpdated.emit(self.current_ensemble)
self._save_ensembles_to_file()
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
else:
self.log.warning('PulseBlockEnsemble object with name "{0}" not found in saved '
'ensembles.\nTherefore nothing is removed.'.format(name))
return
def _get_ensembles_from_file(self):
""" Update the saved_pulse_block_ensembles dict from file """
ensemble_files = [f for f in os.listdir(self.ensemble_dir) if 'ensemble_dict.ens' in f]
if len(ensemble_files) == 0:
self.log.info('No serialized ensembles dict was found in {0}.'
''.format(self.ensemble_dir))
self.saved_pulse_block_ensembles = OrderedDict()
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
return
# raise error if more than one file is present
if len(ensemble_files) > 1:
self.log.error('More than one serialized ensemble dict was found in {0}.\n'
'Using {1}.'.format(self.ensemble_dir, ensemble_files[-1]))
ensemble_files = ensemble_files[-1]
try:
with open(os.path.join(self.ensemble_dir, ensemble_files), 'rb') as infile:
self.saved_pulse_block_ensembles = pickle.load(infile)
except:
self.saved_pulse_block_ensembles = OrderedDict()
self.log.error('Failed to deserialize ensemble dict "{0}" from "{1}".'
''.format(ensemble_files, self.ensemble_dir))
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
return
def _save_ensembles_to_file(self):
""" Saves the saved_pulse_block_ensembles dict to file """
try:
with open(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp'), 'wb') as outfile:
pickle.dump(self.saved_pulse_block_ensembles, outfile)
except:
self.log.error('Failed to serialize ensemble dict in "{0}".'
''.format(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp')))
return
# remove old file and rename temp file
try:
os.rename(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp'),
os.path.join(self.ensemble_dir, 'ensemble_dict.ens'))
except WindowsError:
os.remove(os.path.join(self.ensemble_dir, 'ensemble_dict.ens'))
os.rename(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp'),
os.path.join(self.ensemble_dir, 'ensemble_dict.ens'))
return
def save_sequence(self, name, sequence):
""" Serialize the PulseSequence object with name 'name' to file.
@param str name: name of the sequence object.
@param object sequence: a PulseSequence object, which is going to be
serialized to file.
@return: str: name of the serialized object, if needed.
"""
# TODO: Overwrite handling
sequence.name = name
self.current_sequence = sequence
self.saved_pulse_sequences[name] = sequence
self._save_sequences_to_file()
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
self.sigCurrentSequenceUpdated.emit(self.current_sequence)
def load_sequence(self, name):
"""
@param name:
@return:
"""
if name not in self.saved_pulse_sequences:
self.log.error('PulseSequence "{0}" could not be found in saved pulse sequences. '
'Load failed.'.format(name))
return
sequence = self.saved_pulse_sequences[name]
# set generator settings if found in seqeunce metadata
if sequence.sample_rate is not None:
self.sample_rate = sequence.sample_rate
if sequence.amplitude_dict is not None:
self.amplitude_dict = sequence.amplitude_dict
if sequence.activation_config is not None:
self.activation_config = sequence.activation_config
if sequence.laser_channel is not None:
self.laser_channel = sequence.laser_channel
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
self.current_sequence = sequence
self.sigCurrentSequenceUpdated.emit(sequence)
return
def delete_sequence(self, name):
""" Remove the sequence "name" from the sequence list and HDD.
@param str name: name of the sequence object, which should be deleted.
"""
if name in list(self.saved_pulse_sequences):
del(self.saved_pulse_sequences[name])
if hasattr(self.current_sequence, 'name'):
if self.current_sequence.name == name:
self.current_sequence = None
self.sigCurrentSequenceUpdated.emit(self.current_sequence)
self._save_sequences_to_file()
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
else:
self.log.warning('PulseBlockEnsemble object with name "{0}" not found in saved '
'ensembles.\nTherefore nothing is removed.'.format(name))
return
def generate_predefined_sequence(self, predefined_sequence_name, args):
"""
@param predefined_sequence_name:
@param args:
@return:
"""
gen_method = self.generate_methods[predefined_sequence_name]
try:
gen_method(*args)
except:
self.log.error('Generation of predefined sequence "{0}" failed.'
''.format(predefined_sequence_name))
return
self.sigPredefinedSequenceGenerated.emit(predefined_sequence_name)
return
def _get_sequences_from_file(self):
""" Update the saved_pulse_sequences dict from file """
sequence_files = [f for f in os.listdir(self.sequence_dir) if 'sequence_dict.sequ' in f]
if len(sequence_files) == 0:
self.log.info('No serialized sequence dict was found in {0}.'.format(self.sequence_dir))
self.saved_pulse_sequences = OrderedDict()
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
return
# raise error if more than one file is present
if len(sequence_files) > 1:
self.log.error('More than one serialized sequence dict was found in {0}.\n'
'Using {1}.'.format(self.sequence_dir, sequence_files[-1]))
sequence_files = sequence_files[-1]
try:
with open(os.path.join(self.sequence_dir, sequence_files), 'rb') as infile:
self.saved_pulse_sequences = pickle.load(infile)
except:
self.saved_pulse_sequences = OrderedDict()
self.log.error('Failed to deserialize sequence dict "{0}" from "{1}".'
''.format(sequence_files, self.sequence_dir))
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
return
def _save_sequences_to_file(self):
""" Saves the saved_pulse_sequences dict to file """
try:
with open(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp'), 'wb') as outfile:
pickle.dump(self.saved_pulse_sequences, outfile)
except:
self.log.error('Failed to serialize ensemble dict in "{0}".'
''.format(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp')))
return
# remove old file and rename temp file
try:
os.rename(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp'),
os.path.join(self.sequence_dir, 'sequence_dict.sequ'))
except WindowsError:
os.remove(os.path.join(self.sequence_dir, 'sequence_dict.sequ'))
os.rename(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp'),
os.path.join(self.sequence_dir, 'sequence_dict.sequ'))
return
#---------------------------------------------------------------------------
# END sequence/block generation
#---------------------------------------------------------------------------
#---------------------------------------------------------------------------
# BEGIN sequence/block sampling
#---------------------------------------------------------------------------
def _analyze_block_ensemble(self, ensemble):
"""
@param ensemble:
@return:
"""
state_length_bins_arr = np.array([], dtype=int)
number_of_elements = 0
for block, reps in ensemble.block_list:
number_of_elements += (reps+1)*len(block.element_list)
num_state_changes = (reps+1) * len(block.element_list)
tmp_length_bins = np.zeros(num_state_changes, dtype=int)
# Iterate over all repertitions of the current block
state_index = 0
for rep_no in range(reps+1):
# Iterate over the Block_Elements inside the current block
for elem_index, block_element in enumerate(block.element_list):
init_length_s = block_element.init_length_s
increment_s = block_element.increment_s
element_length_s = init_length_s + (rep_no * increment_s)
tmp_length_bins[state_index] = int(np.rint(element_length_s * self.sample_rate))
state_index += 1
state_length_bins_arr = np.append(state_length_bins_arr, tmp_length_bins)
number_of_samples = np.sum(state_length_bins_arr)
number_of_states = len(state_length_bins_arr)
return number_of_samples, number_of_elements, number_of_states, state_length_bins_arr
def sample_pulse_block_ensemble(self, ensemble_name, write_to_file=True, offset_bin=0,
name_tag=None):
""" General sampling of a PulseBlockEnsemble object, which serves as the construction plan.
@param str ensemble_name: Name, which should correlate with the name of on of the displayed
ensembles.
@param bool write_to_file: Write either to RAM or to File (depends on the available space
in RAM). If set to FALSE, this method will return the samples
(digital and analog) as numpy arrays
@param int offset_bin: If many pulse ensembles are samples sequentially, then the
offset_bin of the previous sampling can be passed to maintain
rotating frame across pulse_block_ensembles
@param str name_tag: a name tag, which is used to keep the sampled files together, which
where sampled from the same PulseBlockEnsemble object but where
different offset_bins were used.
@return tuple: of length 4 with
(analog_samples, digital_samples, [<created_files>], offset_bin).
analog_samples:
numpy arrays containing the sampled voltages
digital_samples:
numpy arrays containing the sampled logic levels
[<created_files>]:
list of strings, with the actual created files through the pulsing
device
offset_bin:
integer, which is used for maintaining the rotation frame.
This method is creating the actual samples (voltages and logic states) for each time step
of the analog and digital channels specified in the PulseBlockEnsemble.
Therefore it iterates through all blocks, repetitions and elements of the ensemble and
calculates the exact voltages (float64) according to the specified math_function. The
samples are later on stored inside a float32 array.
So each element is calculated with high precision (float64) and then down-converted to
float32 to be stored.
To preserve the rotating frame, an offset counter is used to indicate the absolute time
within the ensemble. All calculations are done with time bins (dtype=int) to avoid rounding
errors. Only in the last step when a single PulseBlockElement object is sampled these
integer bin values are translated into a floating point time.
The chunkwise write mode is used to save memory usage at the expense of time. Here for each
PulseBlockElement the write_to_file method in the HW module is called to avoid large
arrays inside the memory. In other words: The whole sample arrays are never created at any
time. This results in more function calls and general overhead causing the much longer time
to complete.
"""
# lock module if it's not already locked (sequence sampling in progress)
if self.getState() == 'idle':
self.lock()
sequence_sampling_in_progress = False
else:
sequence_sampling_in_progress = True
# determine if chunkwise writing is enabled (the overhead byte size is set)
chunkwise = self.sampling_overhead_bytes is not None
# Set the filename (excluding the channel naming suffix, i.e. '_ch1')
if name_tag is None:
filename = ensemble_name
else:
filename = name_tag
# check for old files associated with the new ensemble and delete them from host PC
if write_to_file:
# get sampled filenames on host PC referring to the same ensemble
# be careful, in contrast to linux os, windows os is in general case
# insensitive! Therefore one needs to check and remove all files
# matching the case insensitive case for windows os.
if 'win' in sys.platform:
# make it simple and make everything lowercase.
filename_list = [f for f in os.listdir(self.waveform_dir) if
f.lower().startswith(filename.lower() + '_ch')]
else:
filename_list = [f for f in os.listdir(self.waveform_dir) if
f.startswith(filename + '_ch')]
# delete all filenames in the list
for file in filename_list:
os.remove(os.path.join(self.waveform_dir, file))
if len(filename_list) != 0:
self.log.info('Found old sampled ensembles for name "{0}". Files deleted before '
'sampling: {1}'.format(filename, filename_list))
start_time = time.time()
# get ensemble
ensemble = self.saved_pulse_block_ensembles[ensemble_name]
# Ensemble parameters to determine the shape of sample arrays
ana_channels = ensemble.analog_channels
dig_channels = ensemble.digital_channels
ana_chnl_names = [chnl for chnl in self.activation_config if 'a_ch' in chnl]
dig_chnl_names = [chnl for chnl in self.activation_config if 'd_ch' in chnl]
if self.digital_channels != dig_channels or self.analog_channels != ana_channels:
self.log.error('Sampling of PulseBlockEnsemble "{0}" failed!\nMismatch in number of '
'analog and digital channels between logic ({1}, {2}) and '
'PulseBlockEnsemble ({3}, {4}).'
''.format(ensemble_name, self.analog_channels, self.digital_channels,
ana_channels, dig_channels))
return np.array([]), np.array([]), -1
number_of_samples, number_of_elements, number_of_states, state_length_bins_arr = self._analyze_block_ensemble(ensemble)
# The time bin offset for each element to be sampled to preserve rotating frame.
if chunkwise and write_to_file:
# Flags and counter for chunkwise writing
is_first_chunk = True
is_last_chunk = False
element_count = 0
else:
# Allocate huge sample arrays if chunkwise writing is disabled.
analog_samples = np.empty([ana_channels, number_of_samples], dtype = 'float32')
digital_samples = np.empty([dig_channels, number_of_samples], dtype = bool)
# Starting index for the sample array entrys
entry_ind = 0
# Iterate over all blocks within the PulseBlockEnsemble object
for block, reps in ensemble.block_list:
# Iterate over all repertitions of the current block
for rep_no in range(reps+1):
# Iterate over the Block_Elements inside the current block
for elem_ind, block_element in enumerate(block.element_list):
parameters = block_element.parameters
init_length_s = block_element.init_length_s
increment_s = block_element.increment_s
digital_high = block_element.digital_high
pulse_function = block_element.pulse_function
element_length_s = init_length_s + (rep_no*increment_s)
element_length_bins = int(np.rint(element_length_s * self.sample_rate))
# create floating point time array for the current element inside rotating frame
time_arr = (offset_bin + np.arange(element_length_bins, dtype='float64')) / self.sample_rate
if chunkwise and write_to_file:
# determine it the current element is the last one to be sampled.
# Toggle the is_last_chunk flag accordingly.
element_count += 1
if element_count == number_of_elements:
is_last_chunk = True
# allocate temporary sample arrays to contain the current element
analog_samples = np.empty([ana_channels, element_length_bins], dtype='float32')
digital_samples = np.empty([dig_channels, element_length_bins], dtype=bool)
# actually fill the allocated sample arrays with values.
for i, state in enumerate(digital_high):
digital_samples[i] = np.full(element_length_bins, state, dtype=bool)
for i, func_name in enumerate(pulse_function):
analog_samples[i] = np.float32(self._math_func[func_name](time_arr, parameters[i])/self.amplitude_dict[ana_chnl_names[i]])
# write temporary sample array to file
self._write_to_file[self.waveform_format](filename, analog_samples,
digital_samples,
number_of_samples, is_first_chunk,
is_last_chunk)
# set flag to FALSE after first write
is_first_chunk = False
else:
# if the ensemble should be sampled as a whole (chunkwise = False) fill the
# entries in the huge sample arrays
for i, state in enumerate(digital_high):
digital_samples[i, entry_ind:entry_ind+element_length_bins] = np.full(element_length_bins, state, dtype=bool)
for i, func_name in enumerate(pulse_function):
analog_samples[i, entry_ind:entry_ind+element_length_bins] = np.float32(self._math_func[func_name](time_arr, parameters[i])/self.amplitude_dict[ana_chnl_names[i]])
# increment the index offset of the overall sample array for the next
# element
entry_ind += element_length_bins
# if the rotating frame should be preserved (default) increment the offset
# counter for the time array.
if ensemble.rotating_frame:
offset_bin += element_length_bins
if not write_to_file:
# return a status message with the time needed for sampling the entire ensemble as a
# whole without writing to file.
self.log.info('Time needed for sampling and writing PulseBlockEnsemble to file as a '
'whole: {0} sec.'.format(int(np.rint(time.time() - start_time))))
# return the sample arrays for write_to_file was set to FALSE
if not sequence_sampling_in_progress:
self.unlock()
self.sigSampleEnsembleComplete.emit(filename, analog_samples, digital_samples)
return analog_samples, digital_samples, offset_bin
elif chunkwise:
# return a status message with the time needed for sampling and writing the ensemble
# chunkwise.
self.log.info('Time needed for sampling and writing to file chunkwise: {0} sec'
''.format(int(np.rint(time.time()-start_time))))
if not sequence_sampling_in_progress:
self.unlock()
self.sigSampleEnsembleComplete.emit(filename, np.array([]), np.array([]))
return np.array([]), np.array([]), offset_bin
else:
# If the sampling should not be chunkwise and write to file is enabled call the
# write_to_file method only once with both flags set to TRUE
is_first_chunk = True
is_last_chunk = True
self._write_to_file[self.waveform_format](filename, analog_samples, digital_samples,
number_of_samples, is_first_chunk,
is_last_chunk)
# return a status message with the time needed for sampling and writing the ensemble as
# a whole.
self.log.info('Time needed for sampling and writing PulseBlockEnsemble to file as a '
'whole: {0} sec'.format(int(np.rint(time.time()-start_time))))
if not sequence_sampling_in_progress:
self.unlock()
self.sigSampleEnsembleComplete.emit(filename, np.array([]), np.array([]))
return np.array([]), np.array([]), offset_bin
def sample_pulse_sequence(self, sequence_name, write_to_file=True):
""" Samples the PulseSequence object, which serves as the construction plan.
@param str ensemble_name: Name, which should correlate with the name of on of the displayed
ensembles.
@param bool write_to_file: Write either to RAM or to File (depends on the available space
in RAM). If set to FALSE, this method will return the samples
(digital and analog) as numpy arrays
The sequence object is sampled by call subsequently the sampling routine for the
PulseBlockEnsemble objects and passing if needed the rotating frame option.
Only those PulseBlockEnsemble object where sampled that are different! These can be
directly obtained from the internal attribute different_ensembles_dict of a PulseSequence.
Right now two 'simple' methods of sampling where implemented, which reuse the sample
function for the Pulse_Block_Ensembles. One, which samples by preserving the phase (i.e.
staying in the rotating frame) and the other which samples without keep a phase
relationship between the different entries of the PulseSequence object.
More sophisticated sequence sampling method can be implemented here.
"""
# lock module
if self.getState() == 'idle':
self.lock()
else:
self.log.error('Cannot sample sequence "{0}" because the sequence generator logic is '
'still busy (locked).\nFunction call ignored.'.format(sequence_name))
return
if write_to_file:
# get sampled filenames on host PC referring to the same ensemble
filename_list = [f for f in os.listdir(self.sequence_dir) if
f.startswith(sequence_name + '.seq')]
# delete all filenames in the list
for file in filename_list:
os.remove(os.path.join(self.sequence_dir, file))
if len(filename_list) != 0:
self.log.warning('Found old sequence for name "{0}". Files deleted before '
'sampling: {1}'.format(sequence_name, filename_list))
start_time = time.time()
ana_chnl_names = [chnl for chnl in self.activation_config if 'a_ch' in chnl]
ana_chnl_num = [int(chnl.split('ch')[-1]) for chnl in ana_chnl_names]
# get ensemble
sequence_obj = self.saved_pulse_sequences[sequence_name]
sequence_param_dict_list = []
# if all the Pulse_Block_Ensembles should be in the rotating frame, then each ensemble
# will be created in general with a different offset_bin. Therefore, in order to keep track
# of the sampled Pulse_Block_Ensembles one has to introduce a running number as an
# additional name tag, so keep the sampled files separate.
if sequence_obj.rotating_frame:
ensemble_index = 0 # that will indicate the ensemble index
offset_bin = 0 # that will be used for phase preserving
for ensemble_obj, seq_param in sequence_obj.ensemble_param_list:
# to make something like 001
name_tag = sequence_name + '_' + str(ensemble_index).zfill(3)
dummy1, \
dummy2, \
offset_bin_return = self.sample_pulse_block_ensemble(ensemble_obj.name,
write_to_file=write_to_file,
offset_bin=offset_bin,
name_tag=name_tag)
# the temp_dict is a format how the sequence parameter will be saved
temp_dict = dict()
name_list = []
for ch_num in ana_chnl_num:
name_list.append(name_tag + '_ch' + str(ch_num) + '.' + self.waveform_format)
temp_dict['name'] = name_list
# update the sequence parameter to the temp dict:
temp_dict.update(seq_param)
# add the whole dict to the list of dicts, containing information about how to
# write the sequence properly in the hardware file:
sequence_param_dict_list.append(temp_dict)
# for the next run, the returned offset_bin will serve as starting point for
# phase preserving.
offset_bin = offset_bin_return
ensemble_index += 1
else:
# if phase prevervation between the sequence entries is not needed, then only the
# different ensembles will be sampled, since the offset_bin does not matter for them:
for ensemble_name in sequence_obj.different_ensembles_dict:
self.sample_pulse_block_ensemble(ensemble_name, write_to_file=write_to_file,
offset_bin=0, name_tag=None)
# go now through the sequence list and replace all the entries with the output of the
# sampled ensemble file:
for ensemble_obj, seq_param in sequence_obj.ensemble_param_list:
temp_dict = dict()
name_list = []
for ch_num in ana_chnl_num:
name_list.append(ensemble_obj.name + '_ch' + str(ch_num) + '.' + self.waveform_format)
temp_dict['name'] = name_list
# update the sequence parameter to the temp dict:
temp_dict.update(seq_param)
sequence_param_dict_list.append(temp_dict)
if write_to_file:
# pass the whole information to the sequence creation method:
self._write_to_file[self.sequence_format](sequence_name, sequence_param_dict_list)
self.log.info('Time needed for sampling and writing Pulse Sequence to file: {0} sec.'
''.format(int(np.rint(time.time() - start_time))))
else:
self.log.info('Time needed for sampling Pulse Sequence: {0} sec.'
''.format(int(np.rint(time.time() - start_time))))
# unlock module
self.unlock()
self.sigSampleSequenceComplete.emit(sequence_name, sequence_param_dict_list)
return
#---------------------------------------------------------------------------
# END sequence/block sampling
#---------------------------------------------------------------------------
|
gpl-3.0
| -7,307,263,554,439,006,000
| 50.447394
| 191
| 0.595218
| false
| 4.409405
| true
| false
| false
|
rcgee/oq-hazardlib
|
openquake/hazardlib/site.py
|
1
|
18827
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2012-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Module :mod:`openquake.hazardlib.site` defines :class:`Site`.
"""
import numpy
from openquake.baselib.python3compat import range
from openquake.baselib.slots import with_slots
from openquake.baselib.general import split_in_blocks
from openquake.hazardlib.geo.mesh import Mesh
from openquake.hazardlib.geo.utils import cross_idl
@with_slots
class Site(object):
"""
Site object represents a geographical location defined by its position
as well as its soil characteristics.
:param location:
Instance of :class:`~openquake.hazardlib.geo.point.Point` representing
where the site is located.
:param vs30:
Average shear wave velocity in the top 30 m, in m/s.
:param vs30measured:
Boolean value, ``True`` if ``vs30`` was measured on that location
and ``False`` if it was inferred.
:param z1pt0:
Vertical distance from earth surface to the layer where seismic waves
start to propagate with a speed above 1.0 km/sec, in meters.
:param z2pt5:
Vertical distance from earth surface to the layer where seismic waves
start to propagate with a speed above 2.5 km/sec, in km.
:param backarc":
Boolean value, ``True`` if the site is in the subduction backarc and
``False`` if it is in the subduction forearc or is unknown
:raises ValueError:
If any of ``vs30``, ``z1pt0`` or ``z2pt5`` is zero or negative.
.. note::
:class:`Sites <Site>` are pickleable
"""
_slots_ = 'location vs30 vs30measured z1pt0 z2pt5 backarc'.split()
def __init__(self, location, vs30, vs30measured, z1pt0, z2pt5,
backarc=False):
if not vs30 > 0:
raise ValueError('vs30 must be positive')
if not z1pt0 > 0:
raise ValueError('z1pt0 must be positive')
if not z2pt5 > 0:
raise ValueError('z2pt5 must be positive')
self.location = location
self.vs30 = vs30
self.vs30measured = vs30measured
self.z1pt0 = z1pt0
self.z2pt5 = z2pt5
self.backarc = backarc
def __str__(self):
"""
>>> import openquake.hazardlib
>>> loc = openquake.hazardlib.geo.point.Point(1, 2, 3)
>>> str(Site(loc, 760.0, True, 100.0, 5.0))
'<Location=<Latitude=2.000000, Longitude=1.000000, Depth=3.0000>, \
Vs30=760.0000, Vs30Measured=True, Depth1.0km=100.0000, Depth2.5km=5.0000, \
Backarc=False>'
"""
return (
"<Location=%s, Vs30=%.4f, Vs30Measured=%r, Depth1.0km=%.4f, "
"Depth2.5km=%.4f, Backarc=%r>") % (
self.location, self.vs30, self.vs30measured, self.z1pt0,
self.z2pt5, self.backarc)
def __hash__(self):
return hash((self.location.x, self.location.y))
def __eq__(self, other):
return (self.location.x, self.location.y) == (
other.location.x, other.location.y)
def __repr__(self):
"""
>>> import openquake.hazardlib
>>> loc = openquake.hazardlib.geo.point.Point(1, 2, 3)
>>> site = Site(loc, 760.0, True, 100.0, 5.0)
>>> str(site) == repr(site)
True
"""
return self.__str__()
def _extract(array_or_float, indices):
try: # if array
return array_or_float[indices]
except TypeError: # if float
return array_or_float
@with_slots
class SiteCollection(object):
"""
A collection of :class:`sites <Site>`.
Instances of this class are intended to represent a large collection
of sites in a most efficient way in terms of memory usage.
.. note::
Because calculations assume that :class:`Sites <Site>` are on the
Earth's surface, all `depth` information in a :class:`SiteCollection`
is discarded. The collection `mesh` will only contain lon and lat. So
even if a :class:`SiteCollection` is created from sites containing
`depth` in their geometry, iterating over the collection will yield
:class:`Sites <Site>` with a reference depth of 0.0.
:param sites:
A list of instances of :class:`Site` class.
"""
dtype = numpy.dtype([
('sids', numpy.uint32),
('lons', numpy.float64),
('lats', numpy.float64),
('_vs30', numpy.float64),
('_vs30measured', numpy.bool),
('_z1pt0', numpy.float64),
('_z2pt5', numpy.float64),
('_backarc', numpy.bool),
])
_slots_ = dtype.names
@classmethod
def from_points(cls, lons, lats, sitemodel):
"""
Build the site collection from
:param lons:
a sequence of longitudes
:param lats:
a sequence of latitudes
:param sitemodel:
an object containing the attributes
reference_vs30_value,
reference_vs30_type,
reference_depth_to_1pt0km_per_sec,
reference_depth_to_2pt5km_per_sec,
reference_backarc
"""
assert len(lons) == len(lats), (len(lons), len(lats))
self = cls.__new__(cls)
self.complete = self
self.total_sites = len(lons)
self.sids = numpy.arange(len(lons), dtype=numpy.uint32)
self.lons = numpy.array(lons)
self.lats = numpy.array(lats)
self._vs30 = sitemodel.reference_vs30_value
self._vs30measured = sitemodel.reference_vs30_type == 'measured'
self._z1pt0 = sitemodel.reference_depth_to_1pt0km_per_sec
self._z2pt5 = sitemodel.reference_depth_to_2pt5km_per_sec
self._backarc = sitemodel.reference_backarc
return self
def __init__(self, sites):
self.complete = self
self.total_sites = n = len(sites)
self.sids = numpy.zeros(n, dtype=int)
self.lons = numpy.zeros(n, dtype=float)
self.lats = numpy.zeros(n, dtype=float)
self._vs30 = numpy.zeros(n, dtype=float)
self._vs30measured = numpy.zeros(n, dtype=bool)
self._z1pt0 = numpy.zeros(n, dtype=float)
self._z2pt5 = numpy.zeros(n, dtype=float)
self._backarc = numpy.zeros(n, dtype=bool)
for i in range(n):
self.sids[i] = i
self.lons[i] = sites[i].location.longitude
self.lats[i] = sites[i].location.latitude
self._vs30[i] = sites[i].vs30
self._vs30measured[i] = sites[i].vs30measured
self._z1pt0[i] = sites[i].z1pt0
self._z2pt5[i] = sites[i].z2pt5
self._backarc[i] = sites[i].backarc
# protect arrays from being accidentally changed. it is useful
# because we pass these arrays directly to a GMPE through
# a SiteContext object and if a GMPE is implemented poorly it could
# modify the site values, thereby corrupting site and all the
# subsequent calculation. note that this doesn't protect arrays from
# being changed by calling itemset()
for arr in (self._vs30, self._vs30measured, self._z1pt0, self._z2pt5,
self.lons, self.lats, self._backarc, self.sids):
arr.flags.writeable = False
def __toh5__(self):
array = numpy.zeros(self.total_sites, self.dtype)
for slot in self._slots_:
array[slot] = getattr(self, slot)
attrs = dict(total_sites=self.total_sites)
return array, attrs
def __fromh5__(self, array, attrs):
for slot in self._slots_:
setattr(self, slot, array[slot])
vars(self).update(attrs)
self.complete = self
@property
def mesh(self):
"""Return a mesh with the given lons and lats"""
return Mesh(self.lons, self.lats, depths=None)
@property
def indices(self):
"""The full set of indices from 0 to total_sites - 1"""
return numpy.arange(0, self.total_sites)
def split_in_tiles(self, hint):
"""
Split a SiteCollection into a set of tiles (SiteCollection instances).
:param hint: hint for how many tiles to generate
"""
tiles = []
for seq in split_in_blocks(range(len(self)), hint or 1):
indices = numpy.array(seq, int)
sc = SiteCollection.__new__(SiteCollection)
sc.complete = sc
sc.total_sites = len(indices)
sc.sids = self.sids[indices]
sc.lons = self.lons[indices]
sc.lats = self.lats[indices]
sc._vs30 = _extract(self._vs30, indices)
sc._vs30measured = _extract(self._vs30measured, indices)
sc._z1pt0 = _extract(self._z1pt0, indices)
sc._z2pt5 = _extract(self._z2pt5, indices)
sc._backarc = _extract(self._backarc, indices)
tiles.append(sc)
return tiles
def __iter__(self):
"""
Iterate through all :class:`sites <Site>` in the collection, yielding
one at a time.
"""
if isinstance(self.vs30, float): # from points
for i, location in enumerate(self.mesh):
yield Site(location, self._vs30, self._vs30measured,
self._z1pt0, self._z2pt5, self._backarc)
else: # from sites
for i, location in enumerate(self.mesh):
yield Site(location, self.vs30[i], self.vs30measured[i],
self.z1pt0[i], self.z2pt5[i], self.backarc[i])
def filter(self, mask):
"""
Create a FilteredSiteCollection with only a subset of sites
from this one.
:param mask:
Numpy array of boolean values of the same length as this sites
collection. ``True`` values should indicate that site with that
index should be included into the filtered collection.
:returns:
A new :class:`FilteredSiteCollection` instance, unless all the
values in ``mask`` are ``True``, in which case this site collection
is returned, or if all the values in ``mask`` are ``False``,
in which case method returns ``None``. New collection has data
of only those sites that were marked for inclusion in mask.
See also :meth:`expand`.
"""
assert len(mask) == len(self), (len(mask), len(self))
if mask.all():
# all sites satisfy the filter, return
# this collection unchanged
return self
if not mask.any():
# no sites pass the filter, return None
return None
# extract indices of Trues from the mask
[indices] = mask.nonzero()
return FilteredSiteCollection(indices, self)
def expand(self, data, placeholder):
"""
For non-filtered site collections just checks that data
has the right number of elements and returns it. It is
here just for API compatibility with filtered site collections.
"""
assert len(data) == len(self), (len(data), len(self))
return data
def __len__(self):
"""
Return the number of sites in the collection.
"""
return self.total_sites
def __repr__(self):
return '<SiteCollection with %d sites>' % self.total_sites
# adding a number of properties for the site model data
for name in 'vs30 vs30measured z1pt0 z2pt5 backarc'.split():
def getarray(sc, name=name): # sc is a SiteCollection
value = getattr(sc, '_' + name)
if isinstance(value, (float, bool)):
arr = numpy.array([value] * len(sc), dtype=type(value))
arr.flags.writeable = False
return arr
else:
return value
setattr(SiteCollection, name, property(getarray, doc='%s array' % name))
@with_slots
class FilteredSiteCollection(object):
"""
A class meant to store proper subsets of a complete collection of sites
in a memory-efficient way.
:param indices:
an array of indices referring to the complete site collection
:param complete:
the complete site collection the filtered collection was
derived from
Notice that if you filter a FilteredSiteCollection `fsc`, you will
get a different FilteredSiteCollection referring to the complete
SiteCollection `fsc.complete`, not to the filtered collection `fsc`.
"""
_slots_ = 'indices complete'.split()
def __init__(self, indices, complete):
if complete is not complete.complete:
raise ValueError(
'You should pass a full site collection, not %s' % complete)
self.indices = indices
self.complete = complete
@property
def total_sites(self):
"""The total number of the original sites, without filtering"""
return self.complete.total_sites
@property
def mesh(self):
"""Return a mesh with the given lons and lats"""
return Mesh(self.lons, self.lats, depths=None)
def filter(self, mask):
"""
Create a FilteredSiteCollection with only a subset of sites
from this one.
:param mask:
Numpy array of boolean values of the same length as this
filtered sites collection. ``True`` values should indicate
that site with that index should be included into the
filtered collection.
:returns:
A new :class:`FilteredSiteCollection` instance, unless all the
values in ``mask`` are ``True``, in which case this site collection
is returned, or if all the values in ``mask`` are ``False``,
in which case method returns ``None``. New collection has data
of only those sites that were marked for inclusion in mask.
See also :meth:`expand`.
"""
assert len(mask) == len(self), (len(mask), len(self))
if mask.all():
return self
elif not mask.any():
return None
indices = self.indices.take(mask.nonzero()[0])
return FilteredSiteCollection(indices, self.complete)
def expand(self, data, placeholder):
"""
Expand a short array `data` over a filtered site collection of the
same length and return a long array of size `total_sites` filled
with the placeholder.
The typical workflow is the following: there is a whole site
collection, the one that has an information about all the sites.
Then it gets filtered for performing some calculation on a limited
set of sites (like for instance filtering sites by their proximity
to a rupture). That filtering process can be repeated arbitrary
number of times, i.e. a collection that is already filtered can
be filtered for further limiting the set of sites to compute on.
Then the (supposedly expensive) computation is done on a limited
set of sites which still appears as just a :class:`SiteCollection`
instance, so that computation code doesn't need to worry about
filtering, it just needs to handle site collection objects. The
calculation result comes in a form of 1d or 2d numpy array (that
is, either one value per site or one 1d array per site) with length
equal to number of sites in a filtered collection. That result
needs to be expanded to an array of similar structure but the one
that holds values for all the sites in the original (unfiltered)
collection. This is what :meth:`expand` is for. It creates a result
array of ``total_sites`` length and puts values from ``data`` into
appropriate places in it remembering indices of sites that were
chosen for actual calculation and leaving ``placeholder`` value
everywhere else.
:param data:
1d or 2d numpy array with first dimension representing values
computed for site from this collection.
:param placeholder:
A scalar value to be put in result array for those sites that
were filtered out and no real calculation was performed for them.
:returns:
Array of length ``total_sites`` with values from ``data``
distributed in the appropriate places.
"""
len_data = data.shape[0]
assert len_data == len(self), (len_data, len(self))
assert len_data <= self.total_sites
assert self.indices[-1] < self.total_sites, (
self.indices[-1], self.total_sites)
if data.ndim == 1:
# single-dimensional array
result = numpy.empty(self.total_sites)
result.fill(placeholder)
result.put(self.indices, data)
return result
assert data.ndim == 2
# two-dimensional array
num_values = data.shape[1]
result = numpy.empty((self.total_sites, num_values))
result.fill(placeholder)
for i in range(num_values):
result[:, i].put(self.indices, data[:, i])
return result
def __iter__(self):
"""
Iterate through all :class:`sites <Site>` in the collection, yielding
one at a time.
"""
for i, location in enumerate(self.mesh):
yield Site(location, self.vs30[i], self.vs30measured[i],
self.z1pt0[i], self.z2pt5[i], self.backarc[i])
def __len__(self):
"""Return the number of filtered sites"""
return len(self.indices)
def __repr__(self):
return '<FilteredSiteCollection with %d of %d sites>' % (
len(self.indices), self.total_sites)
def _extract_site_param(fsc, name):
# extract the site parameter 'name' from the filtered site collection
return getattr(fsc.complete, name).take(fsc.indices)
# attach a number of properties filtering the arrays
for name in 'vs30 vs30measured z1pt0 z2pt5 backarc lons lats sids'.split():
prop = property(
lambda fsc, name=name: _extract_site_param(fsc, name),
doc='Extract %s array from FilteredSiteCollection' % name)
setattr(FilteredSiteCollection, name, prop)
|
agpl-3.0
| 877,749,385,576,351,600
| 37.738683
| 79
| 0.616296
| false
| 3.922292
| false
| false
| false
|
gsnbng/erpnext
|
erpnext/loan_management/doctype/loan_disbursement/test_loan_disbursement.py
|
1
|
3110
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.utils import (nowdate, add_days, get_datetime, get_first_day, get_last_day, date_diff, flt, add_to_date)
from erpnext.loan_management.doctype.loan.test_loan import (create_loan_type, create_loan_security_pledge, create_repayment_entry,
make_loan_disbursement_entry, create_loan_accounts, create_loan_security_type, create_loan_security, create_demand_loan, create_loan_security_price)
from erpnext.loan_management.doctype.process_loan_interest_accrual.process_loan_interest_accrual import process_loan_interest_accrual_for_demand_loans
from erpnext.loan_management.doctype.loan_interest_accrual.loan_interest_accrual import days_in_year
from erpnext.selling.doctype.customer.test_customer import get_customer_dict
class TestLoanDisbursement(unittest.TestCase):
def setUp(self):
create_loan_accounts()
create_loan_type("Demand Loan", 2000000, 13.5, 25, 0, 5, 'Cash', 'Payment Account - _TC', 'Loan Account - _TC',
'Interest Income Account - _TC', 'Penalty Income Account - _TC')
create_loan_security_type()
create_loan_security()
create_loan_security_price("Test Security 1", 500, "Nos", get_datetime() , get_datetime(add_to_date(nowdate(), hours=24)))
create_loan_security_price("Test Security 2", 250, "Nos", get_datetime() , get_datetime(add_to_date(nowdate(), hours=24)))
if not frappe.db.exists("Customer", "_Test Loan Customer"):
frappe.get_doc(get_customer_dict('_Test Loan Customer')).insert(ignore_permissions=True)
self.applicant = frappe.db.get_value("Customer", {'name': '_Test Loan Customer'}, 'name')
def test_loan_topup(self):
pledges = []
pledges.append({
"loan_security": "Test Security 1",
"qty": 4000.00,
"haircut": 50,
"loan_security_price": 500.00
})
loan_security_pledge = create_loan_security_pledge(self.applicant, pledges)
loan = create_demand_loan(self.applicant, "Demand Loan", loan_security_pledge.name,
posting_date=get_first_day(nowdate()))
loan.submit()
first_date = get_first_day(nowdate())
last_date = get_last_day(nowdate())
no_of_days = date_diff(last_date, first_date) + 1
accrued_interest_amount = (loan.loan_amount * loan.rate_of_interest * no_of_days) \
/ (days_in_year(get_datetime().year) * 100)
make_loan_disbursement_entry(loan.name, loan.loan_amount, disbursement_date=first_date)
process_loan_interest_accrual_for_demand_loans(posting_date=add_days(last_date, 1))
# Should not be able to create loan disbursement entry before repayment
self.assertRaises(frappe.ValidationError, make_loan_disbursement_entry, loan.name,
500000, first_date)
repayment_entry = create_repayment_entry(loan.name, self.applicant, add_days(get_last_day(nowdate()), 5),
"Regular Payment", 611095.89)
repayment_entry.submit()
loan.reload()
# After repayment loan disbursement entry should go through
make_loan_disbursement_entry(loan.name, 500000, disbursement_date=add_days(last_date, 16))
|
agpl-3.0
| 3,905,159,435,813,598,700
| 41.60274
| 150
| 0.736334
| false
| 2.871653
| true
| false
| false
|
jmhal/CCAPython
|
framework/manage/services.py
|
1
|
9027
|
from CCAPython.gov.cca import Services
from CCAPython.gov.cca.ports import ConnectionEventService
from CCAPython.gov.cca.ports import EventType
from CCAPython.framework.info.connectioninfo import ConnectionEvent
from CCAPython.framework.common.typemap import TypeMapDict
from CCAPython.framework.common.exceptions import PortNotFoundException
class ServicesHandle(Services, ConnectionEventService):
def __init__(self):
# Maps strings portName to a list (CCAPython.gov.cca.Ports, CCAPython.gov.cca.TypeMap).
# (portName) -> [Port, TypeMap]
self.d_usesPort = {}
self.d_providesPorts = {}
# Maps string ports names to string ports types
# (portName) -> (portType)
self.d_portType = {}
# Maps a CCAPython.gov.cca.ports.EventType value to a list of CCAPython.gov.cca.ports.EventListener
# (EventType) -> (ConnectionEventListener [])
self.d_listeners = {}
# A CCAPython.gov.cca.Type containing the properties of the component instance
self.d_instanceProperties = TypeMapDict()
# New methods
def initialize(self, fwk, componentID, properties, is_alias):
"""
input: a CCAPython.gov.cca.AbstractFramework fwk, a CCAPython.gov.cca.ComponentID componentID and a CCAPython.gov.cca.TypeMap properties
ouput: void
"""
self.framework = fwk
self.componentID = componentID
self.properties = properties
self.d_is_alias = is_alias
def getInstanceProperties():
"""
input: none
output: a CCAPython.gov.cca.TypeMap object
"""
return self.d_instanceProperties
def setInstanceProperties(self, properties):
"""
input: a CCAPython.gov.cca.TypeMap properties
output: none
"""
self.d_instanceProperties = properties
return
def setPortProperties(self, portName, properties):
"""
input: a string portName, a CCAPython.gov.cca.TypeMap properties
output: none
"""
if portName in self.d_providesPorts:
elf.d_providesPorts[portName][1] = properties
elif portName in self.d_usesPort:
self.d_usesPort[portName][1] = properties
else:
raise PortNotFoundException(portName)
def getProvidedPortNames(self):
"""
input: none
output: a list of strings
"""
return self.d_providesPorts.keys()
def getUsedPortNames(self):
"""
input: none
output: a list of strings
"""
return self.d_usesPort.keys()
def bindPort(self, portName, port):
"""
input: a string portName, a CCAPython.gov.cca.Port object
output: void
"""
if portName not in self.d_usesPort.keys():
raise PortNotFoundException(portName)
self.d_usesPort[portName] = [port, TypeMapDict()]
return
def getProvidesPort(self, name):
"""
input: string name
output: void
"""
if name not in self.d_providesPorts.keys():
raise PortNotFoundException(name)
return self.d_providesPorts[name][0]
def notifyConnectionEvent(self, portName, event):
"""
This method will notify the component from the calling Services of an event
input: string portName, a CCAPython.gov.cca.ports.EventType value event
output: void
"""
listenerList = []
for ev in self.d_listeners:
if ev == event:
listenerList += self.d_listeners[event]
tm = TypeMapDict()
tm.putString("cca.PortName", portName)
tm.putString("cca.PortType", self.d_portType[portName])
ce = ConnectionEvent(event, tm)
for listener in listenerList:
listener.connectionActivity(ce)
return
# Methods from CCAPython.gov.cca.Services
def getComponentID(self):
"""
input: none
output: a ComponentID object
"""
return self.componentID
def createTypeMap(self):
"""
input: none
output: a TypeMap object
throws CCAException
"""
return TypeMapDict()
def registerUsesPort(self, portName, _type, properties):
"""
input: string portName, string type, and TypeMap properties
output: void
throws CCAException
"""
if portName in self.d_providesPorts or portName in self.d_usesPort:
print portName + " is not unique. Not doing anything."
return
else:
self.d_usesPort[portName] = [None, properties]
self.d_portType[portName] = _type
if self.framework != None:
if self.framework.isProvidedService(_type):
self.framework.provideRequestedServices(self.d_componentID, portName, _type)
def unregisterUsesPort(self, portName):
"""
input: string portName
output: void
throws CCAException
"""
self.d_usesPort.pop(portName, None)
self.d_portType.pop(portName, None)
return
def addProvidesPort(self, inPort, portName, _type, properties):
"""
input: Port inPort, string portName, string type, and TypeMap properties
output: void
throws CCAException
"""
if portName in self.d_providesPorts or portName in self.d_usesPort:
print portName + " is not unique. Not doing anything."
return
if not self.d_is_alias and not inPort.isType(_type):
print "Port instance is not an instance of specified type"
return
self.d_providesPorts[portName] = [inPort, properties]
self.d_portType[portName] = _type
return
def removeProvidesPort(self, portName):
"""
input: string portName
output: void
throws CCAException
"""
self.d_providesPorts.pop(portName, None)
self.d_portType.pop(portName, None)
return
def getPortProperties(self, portName):
"""
input: string portName
output: a TypeMap object
"""
if portName in self.d_usesPort:
return self.d_usesPort[portName][1]
elif portName in self.d_providesPorts:
return self.d_providesPorts[portName][1]
else :
return None
def getPort(self, portName):
"""
input: string portName
output: a Port object
throws CCAException
"""
if portName in self.d_usesPort:
return self.d_usesPort[portName][0]
def getPortNonblocking(self, portName):
"""
input: string portName
output: a Port object
throws CCAException
"""
return self.getPort(portName)
def releasePort(self, portName):
"""
input: string portName
output: void
throws CCAException
"""
if portName in self.d_usesPort:
self.d_usesPort[portName] = None
def registerForRelease(self, callback):
"""
input: a CCAPython.gov.cca.ComponentRelease object callback
output: void
"""
self.framework.setInstanceRelease(self.componentID, callback)
# Methods from CCAPython.gov.cca.ports.ServiceRegistry
def addService(self, serviceType, portProvider):
"""
input: a string serviceType, a CCAPython.gov.cca.ports.ServiceProvider object portProvider
output: a boolean
throws CCAException
"""
self.framework.addServiceProvider(serviceType, self.componentID, portProvider)
return True
def addSingletonService(self, serviceType, server):
"""
input: a string serviceType, a CCAPython.gov.cca.Port object server
output: a boolean
throws CCAException
"""
self.framework.addServicePort(serviceType, server)
return true
def removeService(self, serviceType):
"""
input: a string serviceType
output: none
throws CCAException
"""
self.framework.removeFromRegistry(serviceType)
return None
# Methods from CCAPython.gov.cca.ports.ConnectionEventService
def addConnectionEventListener(self, et, cel):
"""
input: a CCAPython.gov.cca.ports.EventType et, a CCAPython.gov.cca.ports.ConnectionEventListener cel
output: void
"""
if et == EventType.Error:
return
if et == EventType.ALL:
self.addConnectionEventListener(EventType.ConnectPending)
self.addConnectionEventListener(EventType.Connected)
self.addConnectionEventListener(EventType.DisconnectPending)
self.addConnectionEventListener(EventType.Disconnected)
elif cel not in self.d_listeners[et]:
self.d_listeners[et].append(cel)
return
def removeConnectionEventListener(self, et, cel):
"""
input: a CCAPython.gov.cca.ports.EventType et, a CCAPython.gov.cca.ports.ConnectionEventListener cel
output: void
"""
if et == EventType.Error:
return
if et == EventType.ALL:
for event in self.d_listeners:
self.removeConnectionEventListener(event, cel)
return
else:
self.d_listeners[et].remove(cel)
return
|
apache-2.0
| 4,384,464,468,484,002,300
| 30.127586
| 142
| 0.643625
| false
| 4.006658
| false
| false
| false
|
JensTimmerman/radical.pilot
|
src/radical/pilot/utils/analysis.py
|
1
|
12671
|
import os
# ------------------------------------------------------------------------------
#
def get_experiment_frames(experiments, datadir=None):
"""
read profiles for all sessions in the given 'experiments' dict. That dict
is expected to be like this:
{ 'test 1' : [ [ 'rp.session.thinkie.merzky.016609.0007', 'stampede popen sleep 1/1/1/1 (?)'] ],
'test 2' : [ [ 'rp.session.ip-10-184-31-85.merzky.016610.0112', 'stampede shell sleep 16/8/8/4' ] ],
'test 3' : [ [ 'rp.session.ip-10-184-31-85.merzky.016611.0013', 'stampede shell mdrun 16/8/8/4' ] ],
'test 4' : [ [ 'rp.session.titan-ext4.marksant1.016607.0005', 'titan shell sleep 1/1/1/1 a' ] ],
'test 5' : [ [ 'rp.session.titan-ext4.marksant1.016607.0006', 'titan shell sleep 1/1/1/1 b' ] ],
'test 6' : [ [ 'rp.session.ip-10-184-31-85.merzky.016611.0013', 'stampede - isolated', ],
[ 'rp.session.ip-10-184-31-85.merzky.016612.0012', 'stampede - integrated', ],
[ 'rp.session.titan-ext4.marksant1.016607.0006', 'blue waters - integrated' ] ]
} name in
ie. iname in t is a list of experiment names, and each label has a list of
session/label pairs, where the label will be later used to label (duh) plots.
we return a similar dict where the session IDs are data frames
"""
import pandas as pd
exp_frames = dict()
if not datadir:
datadir = os.getcwd()
print 'reading profiles in %s' % datadir
for exp in experiments:
print " - %s" % exp
exp_frames[exp] = list()
for sid, label in experiments[exp]:
print " - %s" % sid
import glob
for prof in glob.glob ("%s/%s-pilot.*.prof" % (datadir, sid)):
print " - %s" % prof
frame = get_profile_frame (prof)
exp_frames[exp].append ([frame, label])
return exp_frames
# ------------------------------------------------------------------------------
#
def get_profile_frame (prof):
import pandas as pd
return pd.read_csv(prof)
# ------------------------------------------------------------------------------
#
tmp = None
def add_concurrency (frame, tgt, spec):
"""
add a column 'tgt' which is a cumulative sum of conditionals of enother row.
The purpose is the following: if a unit enters a component, the tgt row counter is
increased by 1, if the unit leaves the component, the counter is decreases by 1.
For any time, the resulting row contains the number of units which is in the
component. Or state. Or whatever.
The arguments are:
'tgt' : name of the new column
'spec' : a set of filters to determine if a unit enters or leaves
'spec' is expected to be a dict of the following format:
spec = { 'in' : [{'col1' : 'pat1',
'col2' : 'pat2'},
...],
'out' : [{'col3' : 'pat3',
'col4' : 'pat4'},
...]
}
where:
'in' : filter set to determine the unit entering
'out' : filter set to determine the unit leaving
'col' : name of column for which filter is defined
'event' : event which correlates to entering/leaving
'msg' : qualifier on the event, if event is not unique
Example:
spec = {'in' : [{'state' :'Executing'}],
'out' : [{'state' :'Done'},
{'state' :'Failed'},
{'state' :'Cancelled'}]
}
get_concurrency (df, 'concurrently_running', spec)
"""
import numpy
# create a temporary row over which we can do the commulative sum
# --------------------------------------------------------------------------
def _conc (row, spec):
# row must match any filter dict in 'spec[in/out]'
# for any filter dict it must match all col/pat pairs
# for each in filter
for f in spec['in']:
match = 1
# for each col/val in that filter
for col, pat in f.iteritems():
if row[col] != pat:
match = 0
break
if match:
# one filter matched!
# print " + : %-20s : %.2f : %-20s : %s " % (row['uid'], row['time'], row['event'], row['message'])
return 1
# for each out filter
for f in spec['out']:
match = 1
# for each col/val in that filter
for col, pat in f.iteritems():
if row[col] != pat:
match = 0
break
if match:
# one filter matched!
# print " - : %-20s : %.2f : %-20s : %s " % (row['uid'], row['time'], row['event'], row['message'])
return -1
# no filter matched
# print " : %-20s : %.2f : %-20s : %s " % (row['uid'], row['time'], row['event'], row['message'])
return 0
# --------------------------------------------------------------------------
# we only want to later look at changes of the concurrency -- leading or trailing
# idle times are to be ignored. We thus set repeating values of the cumsum to NaN,
# so that they can be filtered out when ploting: df.dropna().plot(...).
# That specifically will limit the plotted time range to the area of activity.
# The full time range can still be plotted when ommitting the dropna() call.
# --------------------------------------------------------------------------
def _time (x):
global tmp
if x != tmp: tmp = x
else : x = numpy.NaN
return x
# --------------------------------------------------------------------------
# sanitize concurrency: negative values indicate incorrect event ordering,
# so we set the repesctive values to 0
# --------------------------------------------------------------------------
def _abs (x):
if x < 0:
return numpy.NaN
return x
# --------------------------------------------------------------------------
frame[tgt] = frame.apply(lambda row: _conc(row, spec), axis=1).cumsum()
frame[tgt] = frame.apply(lambda row: _abs (row[tgt]), axis=1)
frame[tgt] = frame.apply(lambda row: _time(row[tgt]), axis=1)
# print frame[[tgt, 'time']]
# ------------------------------------------------------------------------------
#
t0 = None
def calibrate_frame(frame, spec):
"""
move the time axis of a profiling frame so that t_0 is at the first event
matching the given 'spec'. 'spec' has the same format as described in
'add_concurrency' (list of dicts with col:pat filters)
"""
# --------------------------------------------------------------------------
def _find_t0 (row, spec):
# row must match any filter dict in 'spec[in/out]'
# for any filter dict it must match all col/pat pairs
global t0
if t0 is not None:
# already found t0
return
# for each col/val in that filter
for f in spec:
match = 1
for col, pat in f.iteritems():
if row[col] != pat:
match = 0
break
if match:
# one filter matched!
t0 = row['time']
return
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
def _calibrate (row, t0):
if t0 is None:
# no t0...
return
return row['time'] - t0
# --------------------------------------------------------------------------
# we need to iterate twice over the frame: first to find t0, then to
# calibrate the time axis
global t0
t0 = None # no t0
frame.apply(lambda row: _find_t0 (row, spec), axis=1)
if t0 == None:
print "Can't recalibrate, no matching timestamp found"
return
frame['time'] = frame.apply(lambda row: _calibrate(row, t0 ), axis=1)
# ------------------------------------------------------------------------------
#
def create_plot():
"""
create a plot object and tune its layout to our liking.
"""
import matplotlib.pyplot as plt
fig, plot = plt.subplots(figsize=(12,6))
plot.xaxis.set_tick_params(width=1, length=7)
plot.yaxis.set_tick_params(width=1, length=7)
plot.spines['right' ].set_position(('outward', 10))
plot.spines['top' ].set_position(('outward', 10))
plot.spines['bottom'].set_position(('outward', 10))
plot.spines['left' ].set_position(('outward', 10))
plt.xticks(fontsize=14)
plt.yticks(fontsize=14)
fig.tight_layout()
return fig, plot
# ------------------------------------------------------------------------------
#
def frame_plot (frames, axis, title=None, logx=False, logy=False,
legend=True, figdir=None):
"""
plot the given axis from the give data frame. We create a plot, and plot
all frames given in the list. The list is expected to contain [frame,label]
pairs
frames: list of tuples of dataframes and labels
frames = [[stampede_df_1, 'stampede - popen'],
[stampede_df_2, 'stampede - shell'],
[stampede_df_3, 'stampede - ORTE' ]]
axis: tuple of data frame column index and axis label
axis = ['time', 'time (s)']
"""
# create figure and layout
fig, plot = create_plot()
# set plot title
if title:
plot.set_title(title, y=1.05, fontsize=18)
# plot the data frames
# NOTE: we need to set labels separately, because of
# https://github.com/pydata/pandas/issues/9542
labels = list()
for frame, label in frames:
try:
frame.dropna().plot(ax=plot, logx=logx, logy=logy,
x=axis[0][0], y=axis[1][0],
drawstyle='steps',
label=label, legend=False)
except Exception as e:
print "skipping frame '%s': '%s'" % (label, e)
if legend:
plot.legend(labels=labels, loc='upper right', fontsize=14, frameon=True)
# set axis labels
plot.set_xlabel(axis[0][1], fontsize=14)
plot.set_ylabel(axis[1][1], fontsize=14)
plot.set_frame_on(True)
# save as png and pdf. Use the title as base for names
if title: base = title
else : base = "%s_%s" % (axis[0][1], axis[1][1])
# clean up base name -- only keep alphanum and such
import re
base = re.sub('[^a-zA-Z0-9\.\-]', '_', base)
base = re.sub('_+', '_', base)
if not figdir:
figdir = os.getcwd()
print 'saving %s/%s.png' % (figdir, base)
fig.savefig('%s/%s.png' % (figdir, base), bbox_inches='tight')
print 'saving %s/%s.pdf' % (figdir, base)
fig.savefig('%s/%s.pdf' % (figdir, base), bbox_inches='tight')
return fig, plot
# ------------------------------------------------------------------------------
#
def create_analytical_frame (idx, kind, args, limits, step):
"""
create an artificial data frame, ie. a data frame which does not contain
data gathered from an experiment, but data representing an analytical
construct of some 'kind'.
idx: data frame column index to fill (a time column is always created)
kind: construct to use (only 'rate' is supporte right now)
args: construct specific parameters
limits: time range for which data are to be created
step: time steps for which data are to be created
"""
import pandas as pd
# --------------------------------------------------------------------------
def _frange(start, stop, step):
while start <= stop:
yield start
start += step
# --------------------------------------------------------------------------
if kind == 'rate' :
t_0 = args.get ('t_0', 0.0)
rate = args.get ('rate', 1.0)
data = list()
for t in _frange(limits[0], limits[1], step):
data.append ({'time': t+t_0, idx: t*rate})
return pd.DataFrame (data)
else:
raise ValueError ("No such frame kind '%s'" % kind)
# ------------------------------------------------------------------------------
|
mit
| -4,651,960,791,394,131,000
| 34.793785
| 113
| 0.475495
| false
| 4.072967
| true
| false
| false
|
ludojmj/treelud
|
server/paramiko/dsskey.py
|
1
|
6975
|
# Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
DSS keys.
"""
import os
from hashlib import sha1
from Crypto.PublicKey import DSA
from paramiko import util
from paramiko.common import zero_byte
from paramiko.py3compat import long
from paramiko.ssh_exception import SSHException
from paramiko.message import Message
from paramiko.ber import BER, BERException
from paramiko.pkey import PKey
class DSSKey (PKey):
"""
Representation of a DSS key which can be used to sign an verify SSH2
data.
"""
def __init__(self, msg=None, data=None, filename=None, password=None, vals=None, file_obj=None):
self.p = None
self.q = None
self.g = None
self.y = None
self.x = None
if file_obj is not None:
self._from_private_key(file_obj, password)
return
if filename is not None:
self._from_private_key_file(filename, password)
return
if (msg is None) and (data is not None):
msg = Message(data)
if vals is not None:
self.p, self.q, self.g, self.y = vals
else:
if msg is None:
raise SSHException('Key object may not be empty')
if msg.get_text() != 'ssh-dss':
raise SSHException('Invalid key')
self.p = msg.get_mpint()
self.q = msg.get_mpint()
self.g = msg.get_mpint()
self.y = msg.get_mpint()
self.size = util.bit_length(self.p)
def asbytes(self):
m = Message()
m.add_string('ssh-dss')
m.add_mpint(self.p)
m.add_mpint(self.q)
m.add_mpint(self.g)
m.add_mpint(self.y)
return m.asbytes()
def __str__(self):
return self.asbytes()
def __hash__(self):
h = hash(self.get_name())
h = h * 37 + hash(self.p)
h = h * 37 + hash(self.q)
h = h * 37 + hash(self.g)
h = h * 37 + hash(self.y)
# h might be a long by now...
return hash(h)
def get_name(self):
return 'ssh-dss'
def get_bits(self):
return self.size
def can_sign(self):
return self.x is not None
def sign_ssh_data(self, data):
digest = sha1(data).digest()
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q), long(self.x)))
# generate a suitable k
qsize = len(util.deflate_long(self.q, 0))
while True:
k = util.inflate_long(os.urandom(qsize), 1)
if (k > 2) and (k < self.q):
break
r, s = dss.sign(util.inflate_long(digest, 1), k)
m = Message()
m.add_string('ssh-dss')
# apparently, in rare cases, r or s may be shorter than 20 bytes!
rstr = util.deflate_long(r, 0)
sstr = util.deflate_long(s, 0)
if len(rstr) < 20:
rstr = zero_byte * (20 - len(rstr)) + rstr
if len(sstr) < 20:
sstr = zero_byte * (20 - len(sstr)) + sstr
m.add_string(rstr + sstr)
return m
def verify_ssh_sig(self, data, msg):
if len(msg.asbytes()) == 40:
# spies.com bug: signature has no header
sig = msg.asbytes()
else:
kind = msg.get_text()
if kind != 'ssh-dss':
return 0
sig = msg.get_binary()
# pull out (r, s) which are NOT encoded as mpints
sigR = util.inflate_long(sig[:20], 1)
sigS = util.inflate_long(sig[20:], 1)
sigM = util.inflate_long(sha1(data).digest(), 1)
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q)))
return dss.verify(sigM, (sigR, sigS))
def _encode_key(self):
if self.x is None:
raise SSHException('Not enough key information')
keylist = [0, self.p, self.q, self.g, self.y, self.x]
try:
b = BER()
b.encode(keylist)
except BERException:
raise SSHException('Unable to create ber encoding of key')
return b.asbytes()
def write_private_key_file(self, filename, password=None):
self._write_private_key_file('DSA', filename, self._encode_key(), password)
def write_private_key(self, file_obj, password=None):
self._write_private_key('DSA', file_obj, self._encode_key(), password)
def generate(bits=1024, progress_func=None):
"""
Generate a new private DSS key. This factory function can be used to
generate a new host key or authentication key.
:param int bits: number of bits the generated key should be.
:param function progress_func:
an optional function to call at key points in key generation (used
by ``pyCrypto.PublicKey``).
:return: new `.DSSKey` private key
"""
dsa = DSA.generate(bits, os.urandom, progress_func)
key = DSSKey(vals=(dsa.p, dsa.q, dsa.g, dsa.y))
key.x = dsa.x
return key
generate = staticmethod(generate)
### internals...
def _from_private_key_file(self, filename, password):
data = self._read_private_key_file('DSA', filename, password)
self._decode_key(data)
def _from_private_key(self, file_obj, password):
data = self._read_private_key('DSA', file_obj, password)
self._decode_key(data)
def _decode_key(self, data):
# private key file contains:
# DSAPrivateKey = { version = 0, p, q, g, y, x }
try:
keylist = BER(data).decode()
except BERException as e:
raise SSHException('Unable to parse key file: ' + str(e))
if (type(keylist) is not list) or (len(keylist) < 6) or (keylist[0] != 0):
raise SSHException('not a valid DSA private key file (bad ber encoding)')
self.p = keylist[1]
self.q = keylist[2]
self.g = keylist[3]
self.y = keylist[4]
self.x = keylist[5]
self.size = util.bit_length(self.p)
|
mit
| -2,605,472,682,343,415,000
| 33.227273
| 100
| 0.567455
| false
| 3.540609
| false
| false
| false
|
snipsco/snipsskills
|
snipsmanager/utils/microphone_setup.py
|
1
|
1997
|
# -*-: coding utf-8 -*-
""" Downloader for Snips assistants. """
import os
import shutil
from .os_helpers import cmd_exists, is_raspi_os, execute_command, pipe_commands
from .. import ASOUNDCONF_DEST_PATH
# pylint: disable=too-few-public-methods
class MicrophoneSetup:
""" Downloader for Snips assistants. """
ASOUNDCONF_PATH = "../config/asound.conf"
@staticmethod
def setup_asoundconf(microphone_id):
if not is_raspi_os():
return
if microphone_id == 'respeaker':
MicrophoneSetup._copy_asoundconf("asound.conf.respeaker")
elif microphone_id == 'jabra':
MicrophoneSetup._copy_asoundconf("asound.conf.jabra")
else:
MicrophoneSetup._copy_asoundconf("asound.conf.default")
@staticmethod
def _copy_asoundconf(asoundconf_file):
""" Copy asound.conf configuration to local path.
:param asoundconf_file: the name of the asound.conf configuration, as
present in the config folder.
"""
this_dir, this_filename = os.path.split(__file__)
asoundconf_path = os.path.join(this_dir, MicrophoneSetup.ASOUNDCONF_PATH, asoundconf_file)
shutil.copy2(asoundconf_path, ASOUNDCONF_DEST_PATH)
class RespeakerMicrophoneSetup:
@staticmethod
def setup(vendor_id, product_id):
if not is_raspi_os():
return
execute_command("sudo rm -f /lib/udev/rules.d/50-rspk.rules")
echo_command = ("echo ACTION==\"add\", SUBSYSTEMS==\"usb\", ATTRS{{idVendor}}==\"{}\", " +
"ATTRS{{idProduct}}==\"{}\", MODE=\"660\", GROUP=\"plugdev\"") \
.format(vendor_id, product_id)
tee_command = "sudo tee --append /lib/udev/rules.d/50-rspk.rules"
pipe_commands(echo_command, tee_command, silent=True)
execute_command("sudo adduser pi plugdev")
execute_command("sudo udevadm control --reload")
execute_command("sudo udevadm trigger")
|
mit
| -8,214,272,238,034,088,000
| 33.431034
| 98
| 0.625939
| false
| 3.503509
| false
| false
| false
|
edickie/ciftify
|
ciftify/bin/ciftify_seed_corr.py
|
1
|
8983
|
#!/usr/bin/env python3
"""
Produces a correlation map of the mean time series within the seed with
every voxel in the functional file.
Usage:
ciftify_seed_corr [options] <func> <seed>
Arguments:
<func> functional data (nifti or cifti)
<seed> seed mask (nifti, cifti or gifti)
Options:
--outputname STR Specify the output filename
--output-ts Also output write the from the seed to text
--roi-label INT Specify the numeric label of the ROI you want a seedmap for
--hemi HEMI If the seed is a gifti file, specify the hemisphere (R or L) here
--mask FILE brainmask
--fisher-z Apply the fisher-z transform (arctanh) to the correlation map
--weighted compute weighted average timeseries from the seed map
--use-TRs FILE Only use the TRs listed in the file provided (TR's in file starts with 1)
-v,--verbose Verbose logging
--debug Debug logging
-h, --help Prints this message
DETAILS:
The default output filename is created from the <func> and <seed> filenames,
(i.e. func.dscalar.nii + seed.dscalar.nii --> func_seed.dscalar.nii)
and written to same folder as the <func> input. Use the '--outputname'
argument to specify a different outputname. The output datatype matches the <func>
input.
The mean timeseries is calculated using ciftify_meants, '--roi-label', '--hemi',
'--mask', and '--weighted' arguments are passed to it. See ciftify_meants '--help' for
more info on their usage. The timeseries output (*_meants.csv) of this step can be
saved to disk using the '--output-ts' option.
If a mask is provided with the ('--mask') option. (Such as a brainmask) it will be
applied to both the seed and functional file.
The '--use-TRs' argument allows you to calcuate the correlation maps from specific
timepoints (TRs) in the timeseries. This option can be used to exclude outlier
timepoints or to limit the calculation to a subsample of the timecourse
(i.e. only the beggining or end). It expects a text file containing the integer numbers
TRs to keep (where the first TR=1).
Written by Erin W Dickie
"""
import os
import sys
import subprocess
import tempfile
import shutil
import logging
import logging.config
import numpy as np
import scipy as sp
import nibabel as nib
from docopt import docopt
import ciftify
from ciftify.utils import run
from ciftify.meants import MeantsSettings
# Read logging.conf
logger = logging.getLogger('ciftify')
logger.setLevel(logging.DEBUG)
class UserSettings(MeantsSettings):
def __init__(self, arguments):
MeantsSettings.__init__(self, arguments)
self.fisher_z = arguments['--fisher-z']
self.output_prefix = self.get_output_prefix(arguments['--outputname'])
self.outputcsv = self.get_outputcsv(arguments['--output-ts'])
self.TR_file = self.get_TRfile(arguments['--use-TRs'])
def get_output_prefix(self, outputname):
'''
output_prefix is outputname if it was specified
if not, it is created from the func and seed input paths
'''
## determine outbase if it has not been specified
if outputname:
output_prefix = outputname.replace('.nii.gz','').replace('.dscalar.nii','')
else:
outbase = '{}_{}'.format(self.func.base, self.seed.base)
output_prefix = os.path.join(os.path.dirname(self.func.path), outbase)
## uses utils funciton to make sure the output is writable, will sys.exit with error if not the case
ciftify.utils.check_output_writable(output_prefix)
return(output_prefix)
def get_outputcsv(self, output_ts):
'''set outputcsv name if this is asked for'''
if output_ts:
outputcsv = '{}_meants.csv'.format(self.output_prefix)
else:
outputcsv = None
return(outputcsv)
def get_TRfile(self, TRfile):
if TRfile:
ciftify.utils.check_input_readable(TRfile)
return(TRfile)
def main():
arguments = docopt(__doc__)
debug = arguments['--debug']
verbose = arguments['--verbose']
ch = logging.StreamHandler()
ch.setLevel(logging.WARNING)
if verbose:
ch.setLevel(logging.INFO)
if debug:
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
## set up the top of the log
logger.info('{}{}'.format(ciftify.utils.ciftify_logo(),
ciftify.utils.section_header('Starting ciftify_seed_corr')))
ciftify.utils.log_arguments(arguments)
settings = UserSettings(arguments)
with ciftify.utils.TempDir() as tmpdir:
logger.info('Creating tempdir:{} on host:{}'.format(tmpdir,
os.uname()[1]))
ret = run_ciftify_seed_corr(settings, tmpdir)
logger.info(ciftify.utils.section_header('Done ciftify_seed_corr'))
sys.exit(ret)
def run_ciftify_seed_corr(settings, tempdir):
logger.debug('func: type: {}, base: {}'.format(settings.func.type, settings.func.base))
logger.debug('seed: type: {}, base: {}'.format(settings.seed.type, settings.seed.base))
if ".dlabel.nii" in settings.seed.path:
logger.error("Sorry this function can't handle .dlabel.nii seeds")
sys.exit(1)
seed_ts = ciftify.meants.calc_meants_with_numpy(settings)
logger.debug('seed_ts shape before reshaping {}'.format(seed_ts.shape))
if ((len(seed_ts.shape) != 2) or (seed_ts.shape[0] != 1 and seed_ts.shape[1] !=1)):
logger.error("Incorrect shape dimensions. May have forgotten to indicate the '--weighted' or '-roi-label' file")
sys.exit(1)
seed_ts = seed_ts.reshape(seed_ts.shape[0]*seed_ts.shape[1])
logger.debug('seed_ts shape after reshaping {}'.format(seed_ts.shape))
logger.debug('Writing output with prefix: {}'.format(settings.output_prefix))
logger.debug('Writing meants: {}'.format(settings.outputcsv))
logger.info('Using numpy to calculate seed-correlation')
## convert to nifti
if settings.func.type == "cifti":
func_fnifti = os.path.join(tempdir,'func.nii.gz')
run(['wb_command','-cifti-convert','-to-nifti',settings.func.path, func_fnifti])
func_data, outA, header, dims = ciftify.niio.load_nifti(func_fnifti)
# import template, store the output paramaters
if settings.func.type == "nifti":
func_data, outA, header, dims = ciftify.niio.load_nifti(settings.func.path)
if settings.mask:
if settings.mask.type == "cifti":
mask_fnifti = os.path.join(tempdir,'mask.nii.gz')
run(['wb_command','-cifti-convert','-to-nifti', settings.mask.path, mask_fnifti])
mask_data, _, _, _ = ciftify.niio.load_nifti(mask_fnifti)
if settings.mask.type == "nifti":
mask_data, _, _, _ = ciftify.niio.load_nifti(settings.mask.path)
# decide which TRs go into the correlation
if settings.TR_file:
TR_file = np.loadtxt(settings.TR_file, int)
TRs = TR_file - 1 # shift TR-list to be zero-indexed
else:
TRs = np.arange(dims[3])
# get mean seed timeseries
## even if no mask given, mask out all zero elements..
std_array = np.std(func_data, axis=1)
std_nonzero = np.where(std_array > 0)[0]
idx_mask = std_nonzero
if settings.mask:
idx_of_mask = np.where(mask_data > 0)[0]
idx_mask = np.intersect1d(idx_mask, idx_of_mask)
# create output array
out = np.zeros([dims[0]*dims[1]*dims[2], 1])
# look through each time series, calculating r
for i in np.arange(len(idx_mask)):
out[idx_mask[i]] = np.corrcoef(seed_ts[TRs], func_data[idx_mask[i], TRs])[0][1]
# create the 3D volume and export
out = out.reshape([dims[0], dims[1], dims[2], 1])
out = nib.nifti1.Nifti1Image(out, outA)
## determine nifti filenames for the next two steps
if settings.func.type == "nifti":
if settings.fisher_z:
nifti_corr_output = os.path.join(tempdir, 'corr_out.nii.gz')
nifti_Zcorr_output = '{}.nii.gz'.format(settings.output_prefix)
else:
nifti_corr_output = '{}.nii.gz'.format(settings.output_prefix)
if settings.func.type == "cifti":
nifti_corr_output = os.path.join(tempdir, 'corr_out.nii.gz')
if settings.fisher_z:
nifti_Zcorr_output = os.path.join(tempdir, 'corrZ_out.nii.gz')
else:
nifti_Zcorr_output = nifti_corr_output
# write out nifti
out.to_filename(nifti_corr_output)
# do fisher-z transform on values
if settings.fisher_z:
run(['wb_command', "-volume-math 'atanh(x)'", nifti_Zcorr_output,
'-var', 'x', nifti_corr_output])
if settings.func.type == "cifti":
## convert back
run(['wb_command','-cifti-convert','-from-nifti',
nifti_Zcorr_output,
settings.func.path,
'{}.dscalar.nii'.format(settings.output_prefix),
'-reset-scalars'])
if __name__ == '__main__':
main()
|
mit
| -9,101,947,211,567,938,000
| 36.902954
| 120
| 0.649338
| false
| 3.393653
| false
| false
| false
|
Linutronix/elbe
|
elbepack/initvmaction.py
|
1
|
23779
|
# ELBE - Debian Based Embedded Rootfilesystem Builder
# Copyright (c) 2015-2017 Manuel Traut <manut@linutronix.de>
# Copyright (c) 2015-2018 Torben Hohn <torben.hohn@linutronix.de>
# Copyright (c) 2015 Silvio Fricke <silvio.fricke@gmail.com>
# Copyright (c) 2017 Philipp Arras <philipp.arras@linutronix.de>
# Copyright (c) 2017 Benedikt Spranger <b.spranger@linutronix.de>
# Copyright (c) 2017 John Ogness <john.ogness@linutronix.de>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import sys
import time
import os
import datetime
import libvirt
import elbepack
from elbepack.treeutils import etree
from elbepack.directories import elbe_exe
from elbepack.shellhelper import CommandError, system, command_out_stderr, \
command_out
from elbepack.filesystem import TmpdirFilesystem
from elbepack.elbexml import ElbeXML, ValidationError, ValidationMode
from elbepack.config import cfg
from elbepack.xmlpreprocess import PreprocessWrapper
def is_soap_local():
return cfg["soaphost"] in ("localhost", "127.0.0.1")
def cmd_exists(x):
return any(os.access(os.path.join(path, x), os.X_OK)
for path in os.environ["PATH"].split(os.pathsep))
# Create download directory with timestamp,
# if necessary
def ensure_outdir(opt):
if opt.outdir is None:
opt.outdir = "elbe-build-%s" % (
datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
print("Saving generated Files to %s" % opt.outdir)
class InitVMError(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
class InitVMAction:
actiondict = {}
@classmethod
def register(cls, tag):
def _register(action):
action.tag = tag
cls.actiondict[action.tag] = action
return action
return _register
@classmethod
def print_actions(cls):
print("available subcommands are:", file=sys.stderr)
for a in cls.actiondict:
print(" %s" % a, file=sys.stderr)
def __new__(cls, node):
action = cls.actiondict[node]
return object.__new__(action)
def __init__(self, node, initvmNeeded=True):
self.initvm = None
self.conn = None
self.node = node
# initvm might be running on a different host. Thus there's
# no need to talk with libvirt
if not is_soap_local():
return
# The tag initvmNeeded is required in order to be able to run `elbe
# initvm create`
try:
self.conn = libvirt.open("qemu:///system")
except libvirt.libvirtError as verr:
if not isinstance(verr.args[0], str):
raise
if verr.args[0].startswith('Failed to connect socket to'):
retries = 18
while retries > 0:
retries -= 1
time.sleep(10)
try:
self.conn = libvirt.open("qemu:///system")
except libvirt.libvirtError as verr:
if not isinstance(verr.args[0], str):
raise
if verr.args[0].startswith('Failed to connect socket to'):
pass
if self.conn:
break
if not self.conn:
print("", file=sys.stderr)
print("Accessing libvirt provider system not possible.", file=sys.stderr)
print("Even after waiting 180 seconds.", file=sys.stderr)
print("Make sure that package 'libvirt-daemon-system' is", file=sys.stderr)
print("installed, and the service is running properly", file=sys.stderr)
sys.exit(20)
elif verr.args[0].startswith('authentication unavailable'):
print("", file=sys.stderr)
print("Accessing libvirt provider system not allowed.", file=sys.stderr)
print("Users which want to use elbe need to be members of the 'libvirt' group.", file=sys.stderr)
print("'gpasswd -a <user> libvirt' and logging in again,", file=sys.stderr)
print("should fix the problem.", file=sys.stderr)
sys.exit(20)
elif verr.args[0].startswith('error from service: CheckAuthorization'):
print("", file=sys.stderr)
print("Accessing libvirt failed.", file=sys.stderr)
print("Probably entering the password for accssing libvirt", file=sys.stderr)
print("timed out. If this occured after 'elbe initvm create'", file=sys.stderr)
print("it should be safe to use 'elbe initvm start' to", file=sys.stderr)
print("continue.", file=sys.stderr)
sys.exit(20)
else:
# In case we get here, the exception is unknown, and we want to see it
raise
doms = self.conn.listAllDomains()
for d in doms:
if d.name() == cfg['initvm_domain']:
self.initvm = d
if not self.initvm and initvmNeeded:
sys.exit(20)
def execute(self, _initvmdir, _opt, _args):
raise NotImplementedError('execute() not implemented')
def initvm_state(self):
return self.initvm.info()[0]
@InitVMAction.register('start')
class StartAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
if self.initvm_state() == libvirt.VIR_DOMAIN_RUNNING:
print('Initvm already running.')
sys.exit(20)
elif self.initvm_state() == libvirt.VIR_DOMAIN_SHUTOFF:
# Domain is shut off. Let's start it!
self.initvm.create()
# Wait five seconds for the initvm to boot
# TODO: Instead of waiting for five seconds
# check whether SOAP server is reachable.
for _ in range(1, 5):
sys.stdout.write("*")
sys.stdout.flush()
time.sleep(1)
print("*")
@InitVMAction.register('ensure')
class EnsureAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
# initvm might be running on a different host, thus skipping
# the check
if not is_soap_local():
return
if self.initvm_state() == libvirt.VIR_DOMAIN_SHUTOFF:
system('%s initvm start' % elbe_exe)
elif self.initvm_state() == libvirt.VIR_DOMAIN_RUNNING:
stop = time.time() + 300
while time.time() < stop:
if command_out('%s control list_projects' % elbe_exe)[0] == 0:
break
time.sleep(10)
if time.time() > stop:
print("Waited for 5 minutes and the daemon is still not active."
" Exit.")
sys.exit(20)
else:
print("Elbe initvm in bad state.")
sys.exit(20)
@InitVMAction.register('stop')
class StopAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
if self.initvm_state() != libvirt.VIR_DOMAIN_RUNNING:
print('Initvm is not running.')
sys.exit(20)
while True:
sys.stdout.write("*")
sys.stdout.flush()
time.sleep(1)
state = self.initvm_state()
if state == libvirt.VIR_DOMAIN_SHUTDOWN:
continue
if state == libvirt.VIR_DOMAIN_SHUTOFF:
break
try:
self.initvm.shutdown()
except libvirt.libvirtError as e:
raise e
print("\nInitvm shutoff")
@InitVMAction.register('attach')
class AttachAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
if self.initvm_state() != libvirt.VIR_DOMAIN_RUNNING:
print('Error: Initvm not running properly.')
sys.exit(20)
print('Attaching to initvm console.')
system('virsh --connect qemu:///system console %s' % cfg['initvm_domain'])
def submit_and_dl_result(xmlfile, cdrom, opt):
# pylint: disable=too-many-statements
# pylint: disable=too-many-branches
try:
with PreprocessWrapper(xmlfile, opt) as ppw:
xmlfile = ppw.preproc
ret, prjdir, err = command_out_stderr(
'%s control create_project' % (elbe_exe))
if ret != 0:
print("elbe control create_project failed.", file=sys.stderr)
print(err, file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
prjdir = prjdir.strip()
cmd = '%s control set_xml %s %s' % (elbe_exe, prjdir, xmlfile)
ret, _, err = command_out_stderr(cmd)
if ret != 0:
print("elbe control set_xml failed2", file=sys.stderr)
print(err, file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
except CommandError:
# this is the failure from PreprocessWrapper
# it already printed the error message from
# elbe preprocess
print("Giving up", file=sys.stderr)
sys.exit(20)
if opt.writeproject:
with open(opt.writeproject, "w") as wpf:
wpf.write(prjdir)
if cdrom is not None:
print("Uploading CDROM. This might take a while")
try:
system(
'%s control set_cdrom "%s" "%s"' %
(elbe_exe, prjdir, cdrom))
except CommandError:
print("elbe control set_cdrom Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("Upload finished")
build_opts = ''
if opt.build_bin:
build_opts += '--build-bin '
if opt.build_sources:
build_opts += '--build-sources '
if cdrom:
build_opts += '--skip-pbuilder '
try:
system(
'%s control build "%s" %s' %
(elbe_exe, prjdir, build_opts))
except CommandError:
print("elbe control build Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("Build started, waiting till it finishes")
try:
system('%s control wait_busy "%s"' % (elbe_exe, prjdir))
except CommandError:
print('elbe control wait_busy Failed', file=sys.stderr)
print('', file=sys.stderr)
print('The project will not be deleted from the initvm.',
file=sys.stderr)
print('The files, that have been built, can be downloaded using:',
file=sys.stderr)
print('%s control get_files --output "%s" "%s"' % (elbe_exe,
opt.outdir,
prjdir),
file=sys.stderr)
print("", file=sys.stderr)
print('The project can then be removed using:',
file=sys.stderr)
print('%s control del_project "%s"' % (elbe_exe, prjdir),
file=sys.stderr)
print("", file=sys.stderr)
sys.exit(10)
print("")
print("Build finished !")
print("")
if opt.build_sdk:
try:
system(
'%s control build_sdk "%s" %s' %
(elbe_exe, prjdir, build_opts))
except CommandError:
print("elbe control build_sdk Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("SDK Build started, waiting till it finishes")
try:
system('%s control wait_busy "%s"' % (elbe_exe, prjdir))
except CommandError:
print('elbe control wait_busy Failed, while waiting for the SDK',
file=sys.stderr)
print('', file=sys.stderr)
print('The project will not be deleted from the initvm.',
file=sys.stderr)
print('The files, that have been built, can be downloaded using:',
file=sys.stderr)
print('%s control get_files --output "%s" "%s"' % (elbe_exe,
opt.outdir,
prjdir),
file=sys.stderr)
print("", file=sys.stderr)
print('The project can then be removed using:',
file=sys.stderr)
print('%s control del_project "%s"' % (elbe_exe, prjdir),
file=sys.stderr)
print("", file=sys.stderr)
sys.exit(10)
print("")
print("SDK Build finished !")
print("")
try:
system(
'%s control dump_file "%s" validation.txt' %
(elbe_exe, prjdir))
except CommandError:
print(
"Project failed to generate validation.txt",
file=sys.stderr)
print("Getting log.txt", file=sys.stderr)
try:
system(
'%s control dump_file "%s" log.txt' %
(elbe_exe, prjdir))
except CommandError:
print("Failed to dump log.txt", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
if opt.skip_download:
print("")
print("Listing available files:")
print("")
try:
system('%s control get_files "%s"' % (elbe_exe, prjdir))
except CommandError:
print("elbe control get_files Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("")
print(
'Get Files with: elbe control get_file "%s" <filename>' %
prjdir)
else:
print("")
print("Getting generated Files")
print("")
ensure_outdir(opt)
try:
system('%s control get_files --output "%s" "%s"' % (
elbe_exe, opt.outdir, prjdir))
except CommandError:
print("elbe control get_files Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
if not opt.keep_files:
try:
system('%s control del_project "%s"' % (
elbe_exe, prjdir))
except CommandError:
print("remove project from initvm failed",
file=sys.stderr)
sys.exit(20)
def extract_cdrom(cdrom):
""" Extract cdrom iso image
returns a TmpdirFilesystem() object containing
the source.xml, which is also validated.
"""
tmp = TmpdirFilesystem()
system('7z x -o%s "%s" source.xml' % (tmp.path, cdrom))
print("", file=sys.stderr)
if not tmp.isfile('source.xml'):
print(
"Iso image does not contain a source.xml file",
file=sys.stderr)
print(
"This is not supported by 'elbe initvm'",
file=sys.stderr)
print("", file=sys.stderr)
print("Exiting !!!", file=sys.stderr)
sys.exit(20)
try:
exml = ElbeXML(
tmp.fname('source.xml'),
url_validation=ValidationMode.NO_CHECK)
except ValidationError as e:
print(
"Iso image does contain a source.xml file.",
file=sys.stderr)
print(
"But that xml does not validate correctly",
file=sys.stderr)
print("", file=sys.stderr)
print("Exiting !!!", file=sys.stderr)
print(e)
sys.exit(20)
print("Iso Image with valid source.xml detected !")
print(
"Image was generated using Elbe Version %s" %
exml.get_elbe_version())
return tmp
@InitVMAction.register('create')
class CreateAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node, initvmNeeded=False)
def execute(self, initvmdir, opt, args):
# pylint: disable=too-many-branches
# pylint: disable=too-many-statements
if self.initvm is not None:
print("Initvm is already defined for the libvirt domain '%s'.\n" % cfg['initvm_domain'])
print("If you want to build in your old initvm, "
"use `elbe initvm submit <xml>`.")
print("If you want to remove your old initvm from libvirt "
"run `virsh --connect qemu:///system undefine %s`.\n" % cfg['initvm_domain'])
print("You can specify another libvirt domain by setting the "
"ELBE_INITVM_DOMAIN environment variable to an unused domain name.\n")
print("Note:")
print("\t1) You can reimport your old initvm via "
"`virsh --connect qemu:///system define <file>`")
print("\t where <file> is the corresponding libvirt.xml")
print("\t2) virsh --connect qemu:///system undefine does not delete the image "
"of your old initvm.")
sys.exit(20)
# Upgrade from older versions which used tmux
try:
system("tmux has-session -t ElbeInitVMSession 2>/dev/null")
print ("ElbeInitVMSession exists in tmux. "
"It may belong to an old elbe version. "
"Please stop it to prevent interfering with this version.", file=sys.stderr)
sys.exit(20)
except CommandError:
pass
# Init cdrom to None, if we detect it, we set it
cdrom = None
if len(args) == 1:
if args[0].endswith('.xml'):
# We have an xml file, use that for elbe init
xmlfile = args[0]
try:
xml = etree(xmlfile)
except ValidationError as e:
print("XML file is invalid: %s" % str(e))
# Use default XML if no initvm was specified
if not xml.has("initvm"):
xmlfile = os.path.join(
elbepack.__path__[0], "init/default-init.xml")
elif args[0].endswith('.iso'):
# We have an iso image, extract xml from there.
tmp = extract_cdrom(args[0])
xmlfile = tmp.fname('source.xml')
cdrom = args[0]
else:
print(
"Unknown file ending (use either xml or iso)",
file=sys.stderr)
sys.exit(20)
else:
# No xml File was specified, build the default elbe-init-with-ssh
xmlfile = os.path.join(
elbepack.__path__[0],
"init/default-init.xml")
try:
init_opts = ''
if opt.devel:
init_opts += ' --devel'
if opt.nesting:
init_opts += ' --nesting'
if not opt.build_bin:
init_opts += ' --skip-build-bin'
if not opt.build_sources:
init_opts += ' --skip-build-source'
with PreprocessWrapper(xmlfile, opt) as ppw:
if cdrom:
system('%s init %s --directory "%s" --cdrom "%s" "%s"' %
(elbe_exe, init_opts, initvmdir, cdrom, ppw.preproc))
else:
system(
'%s init %s --directory "%s" "%s"' %
(elbe_exe, init_opts, initvmdir, ppw.preproc))
except CommandError:
print("'elbe init' Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
# Read xml file for libvirt
with open(os.path.join(initvmdir, 'libvirt.xml')) as f:
xml = f.read()
# Register initvm in libvirt
try:
self.conn.defineXML(xml)
except CommandError:
print('Registering initvm in libvirt failed', file=sys.stderr)
print('Try `virsh --connect qemu:///system undefine %s` to delete existing initvm' % cfg['initvm_domain'],
file=sys.stderr)
sys.exit(20)
# Build initvm
try:
system('cd "%s"; make' % (initvmdir))
except CommandError:
print("Building the initvm Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
try:
system('%s initvm start' % elbe_exe)
except CommandError:
print("Starting the initvm Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
if len(args) == 1:
# if provided xml file has no initvm section xmlfile is set to a
# default initvm XML file. But we need the original file here
if args[0].endswith('.xml'):
# stop here if no project node was specified
try:
x = etree(args[0])
except ValidationError as e:
print("XML file is invalid: %s" % str(e))
sys.exit(20)
if not x.has('project'):
print("elbe initvm ready: use 'elbe initvm submit "
"myproject.xml' to build a project")
sys.exit(0)
xmlfile = args[0]
elif cdrom is not None:
xmlfile = tmp.fname('source.xml')
submit_and_dl_result(xmlfile, cdrom, opt)
@InitVMAction.register('submit')
class SubmitAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, opt, args):
try:
system('%s initvm ensure' % elbe_exe)
except CommandError:
print("Starting the initvm Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
# Init cdrom to None, if we detect it, we set it
cdrom = None
if len(args) == 1:
if args[0].endswith('.xml'):
# We have an xml file, use that for elbe init
xmlfile = args[0]
elif args[0].endswith('.iso'):
# We have an iso image, extract xml from there.
tmp = extract_cdrom(args[0])
xmlfile = tmp.fname('source.xml')
cdrom = args[0]
else:
print(
"Unknown file ending (use either xml or iso)",
file=sys.stderr)
sys.exit(20)
submit_and_dl_result(xmlfile, cdrom, opt)
@InitVMAction.register('sync')
class SyncAction(InitVMAction):
def __init__(self, node):
super(SyncAction, self).__init__(node)
def execute(self, _initvmdir, opt, args):
top_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
try:
system("rsync --info=name1,stats1 --archive --times "
"--exclude='.git*' --exclude='*.pyc' --exclude='elbe-build*' "
"--exclude='initvm' --exclude='__pycache__' --exclude='docs' "
"--exclude='examples' "
"--rsh='ssh -p %s' --chown=root:root "
"%s/ root@localhost:/var/cache/elbe/devel" %
(cfg["sshport"], top_dir))
except CommandError as E:
print(E)
|
gpl-3.0
| 8,525,162,397,998,769,000
| 33.263689
| 118
| 0.529711
| false
| 4.074537
| false
| false
| false
|
Capitains/Nautilus
|
capitains_nautilus/flask_ext.py
|
1
|
7289
|
from pkg_resources import resource_filename
import logging
from copy import deepcopy
from collections import defaultdict
from flask import Blueprint, Response
from capitains_nautilus.apis.cts import CTSApi
from capitains_nautilus.apis.dts import DTSApi
def _all_origins():
return "*"
class FlaskNautilus(object):
""" HTTP API Interfaces for MyCapytains resolvers
:param prefix: Prefix on which to install the extension
:param app: Application on which to register
:param name: Name to use for the blueprint
:param resolver: Resolver
:type resolver: Resolver
:param flask_caching: HTTP Cache should be a FlaskCaching Cache object
:type flask_caching: Cache
:cvar access_Control_Allow_Methods: Dictionary with route name and allowed methods over CORS
:cvar access_Control_Allow_Origin: Dictionary with route name and allowed host over CORS or "*"
:param logger: Logging handler.
:type logger: logging
:param apis: Set of APIs to connect to Nautilus
:type apis: set of classes
:cvar ROUTES: List of triple length tuples
:cvar Access_Control_Allow_Methods: Dictionary with route name and allowed methods over CORS
:cvar Access_Control_Allow_Origin: Dictionary with route name and allowed host over CORS or "*"
:cvar LoggingHandler: Logging handler to be set for the blueprint
:ivar logger: Logging handler
:type logger: logging.Logger
:ivar resolver: CapiTainS resolver
"""
Access_Control_Allow_Origin = "*"
LoggingHandler = logging.StreamHandler
def __init__(self, prefix="", app=None, name=None,
resolver=None,
flask_caching=None,
access_Control_Allow_Origin=None,
access_Control_Allow_Methods=None,
logger=None, apis=None
):
self._extensions = {}
self.logger = None
self.retriever = None
self.resolver = resolver
self.setLogger(logger)
self.name = name
self.prefix = prefix
self.blueprint = None
self.ROUTES = []
self.CACHED = []
self.routes = []
if apis is None:
from warnings import warn
warn(
"The parameter `apis` will need to be set-up explicitly starting 2.0.0",
DeprecationWarning
)
apis = {CTSApi(), DTSApi()}
self.Access_Control_Allow_Methods = access_Control_Allow_Methods
if not self.Access_Control_Allow_Methods:
self.Access_Control_Allow_Methods = {}
if access_Control_Allow_Origin:
self.Access_Control_Allow_Origin = defaultdict(_all_origins)
self.Access_Control_Allow_Origin.update(access_Control_Allow_Origin)
else:
self.Access_Control_Allow_Origin = FlaskNautilus.Access_Control_Allow_Origin
for api in apis:
api.init_extension(self)
self.__flask_caching__ = flask_caching
if self.name is None:
self.name = __name__
if app:
self.init_app(app=app)
def register(self, extension, extension_name):
""" Register an extension into the Nautilus Router
:param extension: Extension
:param extension_name: Name of the Extension
:return:
"""
self._extensions[extension_name] = extension
self.ROUTES.extend([
tuple(list(t) + [extension_name])
for t in extension.ROUTES
])
self.CACHED.extend([
(f_name, extension_name)
for f_name in extension.CACHED
])
# This order allows for user defaults to overwrite extension ones
self.Access_Control_Allow_Methods.update({
k: v
for k, v in extension.Access_Control_Allow_Methods.items()
if k not in self.Access_Control_Allow_Methods
})
@property
def flaskcache(self):
return self.__flask_caching__
def setLogger(self, logger):
""" Set up the Logger for the application
:param logger: logging.Logger object
:return: Logger instance
"""
self.logger = logger
if logger is None:
self.logger = logging.getLogger("capitains_nautilus")
formatter = logging.Formatter("[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s")
stream = FlaskNautilus.LoggingHandler()
stream.setLevel(logging.INFO)
stream.setFormatter(formatter)
self.logger.addHandler(stream)
if self.resolver:
self.resolver.logger = self.logger
return self.logger
def init_app(self, app):
""" Initiate the extension on the application
:param app: Flask Application
:return: Blueprint for Flask Nautilus registered in app
:rtype: Blueprint
"""
self.init_blueprint(app)
if self.flaskcache is not None:
for func, extension_name in self.CACHED:
func = getattr(self._extensions[extension_name], func)
setattr(
self._extensions[extension_name],
func.__name__,
self.flaskcache.memoize()(func)
)
return self.blueprint
def init_blueprint(self, app):
""" Properly generates the blueprint, registering routes and filters and connecting the app and the blueprint
:return: Blueprint of the extension
:rtype: Blueprint
"""
self.blueprint = Blueprint(
self.name,
self.name,
template_folder=resource_filename("capitains_nautilus", "data/templates"),
url_prefix=self.prefix
)
# Register routes
for url, name, methods, extension_name in self.ROUTES:
self.blueprint.add_url_rule(
url,
view_func=self.view(name, extension_name),
endpoint=name[2:],
methods=methods
)
app.register_blueprint(self.blueprint)
return self.blueprint
def view(self, function_name, extension_name):
""" Builds response according to a function name
:param function_name: Route name / function name
:param extension_name: Name of the extension holding the function
:return: Function
"""
if isinstance(self.Access_Control_Allow_Origin, dict):
d = {
"Access-Control-Allow-Origin": self.Access_Control_Allow_Origin[function_name],
"Access-Control-Allow-Methods": self.Access_Control_Allow_Methods[function_name]
}
else:
d = {
"Access-Control-Allow-Origin": self.Access_Control_Allow_Origin,
"Access-Control-Allow-Methods": self.Access_Control_Allow_Methods[function_name]
}
def r(*x, **y):
val = getattr(self._extensions[extension_name], function_name)(*x, **y)
if isinstance(val, Response):
val.headers.extend(d)
return val
else:
val = list(val)
val[2].update(d)
return tuple(val)
return r
|
mpl-2.0
| -642,994,500,773,671,200
| 32.131818
| 117
| 0.600494
| false
| 4.406892
| false
| false
| false
|
sbg2133/miscellaneous_projects
|
carina/ItoNH.py
|
1
|
1115
|
import numpy as np
import matplotlib.pyplot as plt
from astropy.io import fits
import aplpy
from astropy.wcs import WCS
import sys, os
from getIQU import IQU
from astropy import coordinates as coord
from astropy.coordinates import SkyCoord
from astropy import units as u
from scipy.interpolate import griddata
plt.ion()
root_dir = '/home/wizwit/miscellaneous_projects/carina/carinaData'
blast250_file = os.path.join(root_dir, 'smooth/3.0_arcmin/carinaneb_250_smoothed_3.0_rl.fits')
beta = 1.27
def getPsi(path_to_file):
I, Q, U, __, wcs = IQU(path_to_file)
Pvals = np.sqrt(Q**2 + U**2)
pvals = Pvals/I
# pvals /= pol_eff[band_idx]
psi = 0.5*np.arctan2(U,Q)
return I, Q, U, wcs, psi
I, __, __, wcs_250, __, = getPsi(blast250_file)
#tau_d = (nu/nu0)**beta
# See Walker pg. 71
# nu0 = frequency at which dust emission becomes optically thin
#nu0 = 0.103 * Td # 0.103 (THz/K) * Td
#Inu_dust = Bnu(Td)*(1.0 - np.exp(1.0 - e**(-1.0*tau_d))
# See Walker pg. 69
# Av = 1.086*tau_d
# N_H = 1.79e21 * Av # (atoms/cm**2 mag)
# 1) Solve tau_d for temperature
# 2) Plug into Inu_dust equation
|
gpl-3.0
| -8,403,188,222,198,603,000
| 24.340909
| 94
| 0.673543
| false
| 2.488839
| false
| false
| false
|
trasa/sprout
|
sprout/sprout/servicehosts.py
|
1
|
1382
|
import os
from fabric.api import *
def create_objects(cfg, service_hosts):
""" Turn a list of service host info into objects that can do
starting, stopping of services, or other things that
we think up.
"""
return [ServiceHost(
s['hostname'],
s['services'],
cfg.get_remote_user())
for s in service_hosts]
class ServiceHost(object):
def __init__(self, hostname, services, remote_user):
self.hostname = hostname
self.services = services
self.remote_user = remote_user
self.connected = False
def _connect(self):
if not self.connected:
# connect to self.hostname
env.user = self.remote_user
env.host_string = self.hostname
self.connected = True
def _run_service(self, service_name, state):
sudo('/sbin/service %s %s' % (service_name, state))
def _run_all_services(self, state):
for service_name in self.services:
self._run_service(service_name, state)
def start(self):
self._connect()
_run_all_services('start')
def stop(self):
self._connect()
self._run_all_services('stop')
def restart(self):
""" Restart the services on this host."""
self._connect()
self._run_all_services('restart')
|
apache-2.0
| -7,280,064,624,537,456,000
| 26.64
| 65
| 0.575977
| false
| 4.175227
| false
| false
| false
|
hqcckes/python-scheduler
|
Server/message.py
|
1
|
2650
|
# coding=utf-8
import os
import sys
import codecs
import json
import logging
import logging.config
from rpyc import Service
from rpyc.utils.server import ThreadedServer
from ConfigParser import SafeConfigParser
class Message(Service):
@staticmethod
def exposed_send(message):
import urllib2
logger.info(u"短信内容:" + message)
lx = u"0"
dlzh = cf.get(u"message", u"username")
dlmm = cf.get(u"message", u"password")
sjhm = cf.get(u"message", u"phone")
url = cf.get(u"message", u"url")
dxnr = urllib2.quote(message.encode(u"GB18030"))
fhls = u"0"
data = u"LX=" + lx + u"&DLZH=" + dlzh + u"&DLMM=" + dlmm + u"&SJHM=" + sjhm + u"&DXNR=" + dxnr + u"&FHLS=" + fhls
url = url + data
request = urllib2.Request(url)
response = urllib2.urlopen(request).read()
response = response.decode(u"GB18030")
if response == u"0":
logger.info(u"警报短信发送成功!")
else:
logger.warning(u"警报短信发送失败,返回码:" + response)
return response
def setup_logging(path=u"message.json", level=logging.INFO, env_key=u"LOG_CFG"):
"""
加载日志配置
:param path: 默认路径
:param level: 默认日志等级
:param env_key: 环境变量
:return:
"""
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with codecs.open(filename=path, mode=u"rb", encoding=u"utf8") as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
logging.basicConfig(level=level)
def read_conf(path=u"message.conf"):
"""
加载配置
:param path: 配置文件路径
:return: ConfigParser
"""
if not os.path.exists(path):
logger.error(u"没有找到配置文件:\"message.conf\" !")
sys.exit(2)
config = SafeConfigParser()
with codecs.open(path, u"rb", encoding=u"utf8") as c_file:
config.readfp(c_file)
return config
if __name__ == u'__main__':
# 系统文件分隔符
sep = os.sep
# 脚本当前所在路径,用GB18030解码以解决中文路径问题
c_path = os.path.split(os.path.realpath(__file__))[0].decode(u"GB18030")
# 加载日志配置
setup_logging(path=os.path.join(c_path, u"config/message.json"))
logger = logging.getLogger(__name__)
# 加载配置文件
config_file = os.path.join(c_path, u"config/message.conf")
cf = read_conf(path=config_file)
service = ThreadedServer(Message, port=9999, auto_register=False)
service.start()
|
gpl-3.0
| -404,603,665,324,014,900
| 26.066667
| 121
| 0.605911
| false
| 2.673985
| true
| false
| false
|
ContinuumIO/ashiba
|
enaml/enaml/qt/qt_stack_item.py
|
1
|
3902
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#------------------------------------------------------------------------------
from atom.api import Typed
from enaml.widgets.stack_item import ProxyStackItem
from .QtGui import QFrame
from .q_single_widget_layout import QSingleWidgetLayout
from .qt_container import QtContainer
from .qt_widget import QtWidget
class QStackItem(QFrame):
""" A QFrame subclass which acts as an item QStack.
"""
def __init__(self, *args, **kwargs):
""" Initialize a QStackItem.
Parameters
----------
*args, **kwargs
The position and keyword arguments required to initialize
a QWidget.
"""
super(QStackItem, self).__init__(*args, **kwargs)
self._stack_widget = None
self.setLayout(QSingleWidgetLayout())
def stackWidget(self):
""" Get the stack widget for this stack item.
Returns
-------
result : QWidget or None
The stack widget being managed by this item.
"""
return self._stack_widget
def setStackWidget(self, widget):
""" Set the stack widget for this stack item.
Parameters
----------
widget : QWidget
The QWidget to use as the stack widget in this item.
"""
self._stack_widget = widget
self.layout().setWidget(widget)
class QtStackItem(QtWidget, ProxyStackItem):
""" A Qt implementation of an Enaml ProxyStackItem.
"""
#: A reference to the widget created by the proxy.
widget = Typed(QStackItem)
#--------------------------------------------------------------------------
# Initialization API
#--------------------------------------------------------------------------
def create_widget(self):
""" Create the underlying QStackItem widget.
"""
self.widget = QStackItem(self.parent_widget())
def init_layout(self):
""" Initialize the layout for the underyling widget.
"""
super(QtStackItem, self).init_layout()
self.widget.setStackWidget(self.stack_widget())
#--------------------------------------------------------------------------
# Utility Methods
#--------------------------------------------------------------------------
def stack_widget(self):
""" Find and return the stack widget child for this widget.
"""
d = self.declaration.stack_widget()
if d is not None:
return d.proxy.widget
#--------------------------------------------------------------------------
# Child Events
#--------------------------------------------------------------------------
def child_added(self, child):
""" Handle the child added event for a QtStackItem.
"""
super(QtStackItem, self).child_added(child)
if isinstance(child, QtContainer):
self.widget.setStackWidget(self.stack_widget())
def child_removed(self, child):
""" Handle the child added event for a QtStackItem.
"""
super(QtStackItem, self).child_removed(child)
if isinstance(child, QtContainer):
self.widget.setStackWidget(self.stack_widget())
#--------------------------------------------------------------------------
# Widget Update Methods
#--------------------------------------------------------------------------
def set_visible(self, visible):
""" An overridden visibility setter.
This setter disables changing visibility on the widget since
the visibility is controlled entirely by the parent stack.
"""
pass
|
bsd-3-clause
| -7,036,452,119,728,805,000
| 30.467742
| 79
| 0.494106
| false
| 5.330601
| false
| false
| false
|
forgeservicelab/ansible-roles.django_saml_app
|
templates/settings.py
|
1
|
5840
|
"""
Django settings for samldemo project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
import logging
logging.basicConfig(level=logging.DEBUG)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@!poms#fy-w!ad&i945blb)arnx!(zj$37x1b$n9l_8*$2=m-0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djangosaml2',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
#'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'samldemo.urls'
WSGI_APPLICATION = 'samldemo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'djangosaml2.backends.Saml2Backend',
)
LOGIN_URL = '/saml2/login/'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer'
import saml2
SAML_CONFIG = {
# full path to the xmlsec1 binary programm
'xmlsec_binary': '/usr/bin/xmlsec1',
# your entity id, usually your subdomain plus the url to the metadata view
'entityid': '{{ ansible_fqdn }}/saml2/metadata/',
# directory with attribute mapping
'attribute_map_dir': '/usr/local/lib/python2.7/dist-packages/saml2/attributemaps',
# this block states what services we provide
'service': {
# we are just a lonely SP
'sp' : {
'allow_unsolicited': True,
'name': 'Federated Django sample SP',
'endpoints': {
# url and binding to the assetion consumer service view
# do not change the binding or service name
'assertion_consumer_service': [
('http://{{ ansible_fqdn }}/saml2/acs/',
saml2.BINDING_HTTP_POST),
],
# url and binding to the single logout service view
# do not change the binding or service name
'single_logout_service': [
('http://{{ ansible_fqdn }}/saml2/ls/',
saml2.BINDING_HTTP_REDIRECT),
],
},
# attributes that this project need to identify a user
'required_attributes': ['cn'],
# attributes that may be useful to have but not required
'optional_attributes': ['eduPersonAffiliation'],
# in this section the list of IdPs we talk to are defined
'idp': {
# we do not need a WAYF service since there is
# only an IdP defined here. This IdP should be
# present in our metadata
# the keys of this dictionary are entity ids
'{{ django_saml_app_idp }}/saml2/idp/metadata.php': {
'single_sign_on_service': {
saml2.BINDING_HTTP_REDIRECT: '{{ django_saml_app_idp }}/saml2/idp/SSOService.php',
},
'single_logout_service': {
saml2.BINDING_HTTP_REDIRECT: '{{ django_saml_app_idp }}/saml2/idp/SingleLogoutService.php',
},
},
},
},
},
# where the remote metadata is stored
'metadata': {
'local': [os.path.join(BASE_DIR, 'remote_metadata.xml')],
},
# set to 1 to output debugging information
'debug': 1,
# certificate
'key_file': os.path.join(BASE_DIR, 'key'),
'cert_file': os.path.join(BASE_DIR, 'cert.crt'),
# own metadata settings
'contact_person': [
{'given_name': 'Tomas',
'sur_name': 'Karasek',
'company': 'Digile',
'email_address': 'tomas.karasek@digile.fi',
'contact_type': 'technical'},
{'given_name': 'Tomas',
'sur_name': 'Karasek',
'company': 'Digile',
'email_address': 'tomas.karasek@digile.fi',
'contact_type': 'administrative'},
],
# you can set multilanguage information here
'organization': {
'name': [('Digile', 'en')],
'display_name': [('Digile', 'en')],
'url': [('http://forgeservicelab.fi', 'en')],
},
'valid_for': 24, # how long is our metadata valid
}
SAML_ATTRIBUTE_MAPPING = {
# cn is in the OID notation urn:oid:2.5.4.3
'cn': ('username', ),
'mail': ('email', ),
'givenName': ('first_name', ),
'sn': ('last_name', )
}
|
mit
| 8,473,894,465,641,903,000
| 28.5
| 113
| 0.62089
| false
| 3.582822
| false
| false
| false
|
wil/pyroman
|
examples/example1/03_standard_chains.py
|
1
|
1452
|
"""
Pyroman uses some standard chains, set in it's config.
These chains are used by the "allow()", "reject()" and "drop()" commandos
for nicer rule writing, and probably should do exactly that.
If you want maximal performance, you'll want to change these to ACCEPT and DROP
directly by calling 'Firewall.accept = "ACCEPT"' and removing the lines below.
The (small) benefits of using this approach is that you can easily disable
the rules (by modifying 'drop' and 'reject') without reloading your firewall
and that you get complete traffic counters in these chains.
The variables "Firewall.accept", "Firewall.drop" and "Firewall.reject" are
used here, so you can change them in one place only.
"""
Firewall.accept="accept"
add_chain(Firewall.accept)
# Kernel and iptables can do new string matches?
if Firewall.iptables_version(min="1.3.4") and \
Firewall.kernel_version(min="2.6.12"):
# Drop bittorrent traffic
iptables(Firewall.accept, '-m string --string "BitTorrent protocol" ' + \
'--algo bm --from 0 --to 100 -j DROP')
# add accept default rule to the chain
iptables(Firewall.accept, "-j ACCEPT")
# this is a silent drop
Firewall.drop="drop"
add_chain(Firewall.drop)
iptables(Firewall.drop, "-j DROP")
# .. these are clean "reject" rules (i.e. send 'connection refused' back)
Firewall.reject="reject"
add_chain(Firewall.reject)
iptables(Firewall.reject, "-p tcp -j REJECT --reject-with tcp-reset")
iptables(Firewall.reject, "-j REJECT")
|
mit
| 4,925,883,790,830,189,000
| 39.333333
| 79
| 0.74449
| false
| 3.585185
| false
| false
| false
|
ViderumGlobal/ckanext-requestdata
|
ckanext/requestdata/controllers/package.py
|
1
|
3021
|
from ckan.lib import base
from ckan.common import c, _
from ckan import logic
import ckan.model as model
import ckan.lib.helpers as h
from ckan.plugins import toolkit
from ckan.controllers.package import PackageController as _PackageController
import ckan.lib.navl.dictization_functions as dict_fns
from ckanext.requestdata.helpers import has_query_param
get_action = logic.get_action
NotAuthorized = logic.NotAuthorized
ValidationError = logic.ValidationError
clean_dict = logic.clean_dict
try:
# Support CKAN 2.6
redirect = base.redirect
except ImportError:
# Redirect is not redirect_to in CKAN 2.7
redirect = h.redirect_to
abort = base.abort
tuplize_dict = logic.tuplize_dict
parse_params = logic.parse_params
class PackageController(_PackageController):
def create_metadata_package(self):
# Handle metadata-only datasets
if has_query_param('metadata'):
package_type = 'requestdata-metadata-only'
form_vars = {
'errors': {},
'dataset_type': package_type,
'action': 'new',
'error_summary': {},
'data': {
'tag_string': '',
'group_id': None,
'type': package_type
},
'stage': ['active']
}
if toolkit.request.method == 'POST':
context = {'model': model, 'session': model.Session,
'user': c.user, 'auth_user_obj': c.userobj}
data_dict = clean_dict(dict_fns.unflatten(
tuplize_dict(parse_params(toolkit.request.POST))))
data_dict['type'] = package_type
try:
package = get_action('package_create')(context, data_dict)
url = h.url_for(controller='package', action='read',
id=package['name'])
redirect(url)
except NotAuthorized:
abort(403, _('Unauthorized to create a dataset.'))
except ValidationError, e:
errors = e.error_dict
error_summary = e.error_summary
form_vars = {
'errors': errors,
'dataset_type': package_type,
'action': 'new',
'error_summary': error_summary,
'stage': ['active']
}
form_vars['data'] = data_dict
extra_vars = {
'form_vars': form_vars,
'form_snippet': 'package/new_package_form.html',
'dataset_type': package_type
}
return toolkit.render('package/new.html',
extra_vars=extra_vars)
else:
return self.new()
else:
return self.new()
|
agpl-3.0
| -8,142,442,439,098,392,000
| 33.329545
| 78
| 0.503807
| false
| 4.619266
| false
| false
| false
|
Motolea/pentagram
|
aplicatiepentagram/Pentagram/views.py
|
1
|
3440
|
from django.shortcuts import render
from django.contrib.auth.models import User
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework import status
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.authtoken.models import Token
from Pentagram.models import Photo
from Pentagram.models import Comment
from Pentagram.models import Like
from Pentagram.serializers import UserSerializer
from Pentagram.serializers import PhotoSerializer
from Pentagram.serializers import CommentSerializer
from rest_framework.permissions import AllowAny
from rest_framework.decorators import permission_classes
@api_view(['GET', 'POST'])
def photos(request):
if request.method == "GET":
photos = Photo.objects.all()
serializer = PhotoSerializer(photos, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
if request.method == "POST":
photo_serializer = PhotoSerializer(data=request.data)
if photo_serializer.is_valid():
photo_serializer.save()
return Response(status=status.HTTP_201_CREATED)
return Response(status=status.HTTP_400_BAD_REQUEST, data=photo_serializer.errors)
@api_view(['GET','POST'])
@permission_classes((AllowAny,))
def users(request):
if request.method == "GET":
users = User.objects.all()
serializer = UserSerializer(users, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
if request.method == "POST":
user_serializer = UserSerializer(data=request.data)
if user_serializer.is_valid():
user_serializer.save()
return Response(status=status.HTTP_201_CREATED)
return Response(status=status.HTTP_400_BAD_REQUEST, data=user_serializer.errors)
@api_view(['GET','POST'])
def comments(request, id_photo):
if request.method == "GET":
comments = Comment.objects.filter(photo_id=id_photo)
serializer = CommentSerializer(comments, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
if request.method == "POST":
request.POST['photo'] = id_photo
comment_serializer = CommentSerializer(data=request.data)
if comment_serializer.is_valid():
comment_serializer.save()
return Response(status=status.HTTP_201_CREATED)
return Response(status=status.HTTP_400_BAD_REQUEST, data=comment_serializer.errors)
@api_view(['GET', 'POST'])
def like(request, id_photo):
if request.method == 'GET':
counter = Like.objects.filter(photo_id=id_photo).count()
return Response(status=status.HTTP_200_OK, data=counter)
if request.method == 'POST':
if Like.objects.filter(photo=id_photo, user=request.user.id).count() == 0:
Like.objects.create(photo_id=id_photo, user=request.user).save()
return Response(status=status.HTTP_201_CREATED)
else:
Like.objects.filter(photo=id_photo, user=request.user.id).delete()
return Response(status=status.HTTP_205_RESET_CONTENT)
class CustomObtainAuthToken(ObtainAuthToken):
def post(self, request, *args, **kwargs):
response = super(CustomObtainAuthToken, self).post(request, *args, **kwargs)
token = Token.objects.get(key=response.data['token'])
return Response({'token': token.key, 'id': token.user_id})
|
gpl-3.0
| 409,339,148,438,177,900
| 39.952381
| 91
| 0.701744
| false
| 3.860831
| false
| false
| false
|
hlange/LogSoCR
|
.waf/waflib/extras/ocaml.py
|
1
|
9469
|
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
"ocaml support"
import os, re
from waflib import Utils, Task
from waflib.Logs import error
from waflib.TaskGen import feature, before_method, after_method, extension
EXT_MLL = ['.mll']
EXT_MLY = ['.mly']
EXT_MLI = ['.mli']
EXT_MLC = ['.c']
EXT_ML = ['.ml']
open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
def filter_comments(txt):
meh = [0]
def repl(m):
if m.group(1): meh[0] += 1
elif m.group(2): meh[0] -= 1
elif not meh[0]: return m.group()
return ''
return foo.sub(repl, txt)
def scan(self):
node = self.inputs[0]
code = filter_comments(node.read())
global open_re
names = []
import_iterator = open_re.finditer(code)
if import_iterator:
for import_match in import_iterator:
names.append(import_match.group(1))
found_lst = []
raw_lst = []
for name in names:
nd = None
for x in self.incpaths:
nd = x.find_resource(name.lower()+'.ml')
if not nd: nd = x.find_resource(name+'.ml')
if nd:
found_lst.append(nd)
break
else:
raw_lst.append(name)
return (found_lst, raw_lst)
native_lst=['native', 'all', 'c_object']
bytecode_lst=['bytecode', 'all']
@feature('ocaml')
def init_ml(self):
Utils.def_attrs(self,
type = 'all',
incpaths_lst = [],
bld_incpaths_lst = [],
mlltasks = [],
mlytasks = [],
mlitasks = [],
native_tasks = [],
bytecode_tasks = [],
linktasks = [],
bytecode_env = None,
native_env = None,
compiled_tasks = [],
includes = '',
uselib = '',
are_deps_set = 0)
@feature('ocaml')
@after_method('init_ml')
def init_envs_ml(self):
self.islibrary = getattr(self, 'islibrary', False)
global native_lst, bytecode_lst
self.native_env = None
if self.type in native_lst:
self.native_env = self.env.derive()
if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a'
self.bytecode_env = None
if self.type in bytecode_lst:
self.bytecode_env = self.env.derive()
if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
if self.type == 'c_object':
self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
@feature('ocaml')
@before_method('apply_vars_ml')
@after_method('init_envs_ml')
def apply_incpaths_ml(self):
inc_lst = self.includes.split()
lst = self.incpaths_lst
for dir in inc_lst:
node = self.path.find_dir(dir)
if not node:
error("node not found: " + str(dir))
continue
if not node in lst:
lst.append(node)
self.bld_incpaths_lst.append(node)
# now the nodes are added to self.incpaths_lst
@feature('ocaml')
@before_method('process_source')
def apply_vars_ml(self):
for i in self.incpaths_lst:
if self.bytecode_env:
app = self.bytecode_env.append_value
app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
if self.native_env:
app = self.native_env.append_value
app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
for name in self.uselib.split():
for vname in varnames:
cnt = self.env[vname+'_'+name]
if cnt:
if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
if self.native_env: self.native_env.append_value(vname, cnt)
@feature('ocaml')
@after_method('process_source')
def apply_link_ml(self):
if self.bytecode_env:
ext = self.islibrary and '.cma' or '.run'
linktask = self.create_task('ocalink')
linktask.bytecode = 1
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.env = self.bytecode_env
self.linktasks.append(linktask)
if self.native_env:
if self.type == 'c_object': ext = '.o'
elif self.islibrary: ext = '.cmxa'
else: ext = ''
linktask = self.create_task('ocalinkx')
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.env = self.native_env
self.linktasks.append(linktask)
# we produce a .o file to be used by gcc
self.compiled_tasks.append(linktask)
@extension(*EXT_MLL)
def mll_hook(self, node):
mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'))
mll_task.env = self.native_env.derive()
self.mlltasks.append(mll_task)
self.source.append(mll_task.outputs[0])
@extension(*EXT_MLY)
def mly_hook(self, node):
mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')])
mly_task.env = self.native_env.derive()
self.mlytasks.append(mly_task)
self.source.append(mly_task.outputs[0])
task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'))
task.env = self.native_env.derive()
@extension(*EXT_MLI)
def mli_hook(self, node):
task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'))
task.env = self.native_env.derive()
self.mlitasks.append(task)
@extension(*EXT_MLC)
def mlc_hook(self, node):
task = self.create_task('ocamlcc', node, node.change_ext('.o'))
task.env = self.native_env.derive()
self.compiled_tasks.append(task)
@extension(*EXT_ML)
def ml_hook(self, node):
if self.native_env:
task = self.create_task('ocamlx', node, node.change_ext('.cmx'))
task.env = self.native_env.derive()
task.incpaths = self.bld_incpaths_lst
self.native_tasks.append(task)
if self.bytecode_env:
task = self.create_task('ocaml', node, node.change_ext('.cmo'))
task.env = self.bytecode_env.derive()
task.bytecode = 1
task.incpaths = self.bld_incpaths_lst
self.bytecode_tasks.append(task)
def compile_may_start(self):
if not getattr(self, 'flag_deps', ''):
self.flag_deps = 1
# the evil part is that we can only compute the dependencies after the
# source files can be read (this means actually producing the source files)
if getattr(self, 'bytecode', ''): alltasks = self.generator.bytecode_tasks
else: alltasks = self.generator.native_tasks
self.signature() # ensure that files are scanned - unfortunately
tree = self.generator.bld
for node in self.inputs:
lst = tree.node_deps[self.uid()]
for depnode in lst:
for t in alltasks:
if t == self: continue
if depnode in t.inputs:
self.set_run_after(t)
# TODO necessary to get the signature right - for now
delattr(self, 'cache_sig')
self.signature()
return Task.Task.runnable_status(self)
class ocamlx(Task.Task):
"""native caml compilation"""
color = 'GREEN'
run_str = '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
scan = scan
runnable_status = compile_may_start
class ocaml(Task.Task):
"""bytecode caml compilation"""
color = 'GREEN'
run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
scan = scan
runnable_status = compile_may_start
class ocamlcmi(Task.Task):
"""interface generator (the .i files?)"""
color = 'BLUE'
run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLINCLUDES} -o ${TGT} -c ${SRC}'
before = ['ocamlcc', 'ocaml', 'ocamlcc']
class ocamlcc(Task.Task):
"""ocaml to c interfaces"""
color = 'GREEN'
run_str = 'cd ${TGT[0].bld_dir()} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${OCAMLINCLUDES} -c ${SRC[0].abspath()}'
class ocamllex(Task.Task):
"""lexical generator"""
color = 'BLUE'
run_str = '${OCAMLLEX} ${SRC} -o ${TGT}'
before = ['ocamlcmi', 'ocaml', 'ocamlcc']
class ocamlyacc(Task.Task):
"""parser generator"""
color = 'BLUE'
run_str = '${OCAMLYACC} -b ${tsk.base()} ${SRC}'
before = ['ocamlcmi', 'ocaml', 'ocamlcc']
def base(self):
node = self.outputs[0]
s = os.path.splitext(node.name)[0]
return node.bld_dir() + os.sep + s
def link_may_start(self):
if getattr(self, 'bytecode', 0): alltasks = self.generator.bytecode_tasks
else: alltasks = self.generator.native_tasks
for x in alltasks:
if not x.hasrun:
return Task.ASK_LATER
if not getattr(self, 'order', ''):
# now reorder the inputs given the task dependencies
# this part is difficult, we do not have a total order on the tasks
# if the dependencies are wrong, this may not stop
seen = []
pendant = []+alltasks
while pendant:
task = pendant.pop(0)
if task in seen: continue
for x in task.run_after:
if not x in seen:
pendant.append(task)
break
else:
seen.append(task)
self.inputs = [x.outputs[0] for x in seen]
self.order = 1
return Task.Task.runnable_status(self)
class ocalink(Task.Task):
"""bytecode caml link"""
color = 'YELLOW'
run_str = '${OCAMLC} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS} ${SRC}'
runnable_status = link_may_start
after = ['ocaml', 'ocamlcc']
class ocalinkx(Task.Task):
"""native caml link"""
color = 'YELLOW'
run_str = '${OCAMLOPT} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS_OPT} ${SRC}'
runnable_status = link_may_start
after = ['ocamlx', 'ocamlcc']
def configure(conf):
opt = conf.find_program('ocamlopt', var='OCAMLOPT', mandatory=False)
occ = conf.find_program('ocamlc', var='OCAMLC', mandatory=False)
if (not opt) or (not occ):
conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
v = conf.env
v['OCAMLC'] = occ
v['OCAMLOPT'] = opt
v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX', mandatory=False)
v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC', mandatory=False)
v['OCAMLFLAGS'] = ''
where = conf.cmd_and_log(conf.env.OCAMLC + ['-where']).strip()+os.sep
v['OCAMLLIB'] = where
v['LIBPATH_OCAML'] = where
v['INCLUDES_OCAML'] = where
v['LIB_OCAML'] = 'camlrun'
|
agpl-3.0
| 1,413,643,713,363,514,400
| 27.607251
| 117
| 0.659521
| false
| 2.664322
| false
| false
| false
|
vlegoff/tsunami
|
src/primaires/format/editeurs/floatedit/__init__.py
|
1
|
4072
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant l'éditeur 'floatedit'.
Si des redéfinitions de contexte-éditeur standard doivent être faites, elles
seront placées dans ce package
Note importante : ce package contient la définition d'un éditeur, mais
celui-ci peut très bien être étendu par d'autres modules. Au quel cas,
les extensions n'apparaîtront pas ici.
"""
from primaires.interpreteur.editeur.presentation import Presentation
from primaires.interpreteur.editeur.description import Description
from primaires.salle.editeurs.redit.edt_details import EdtDetails
class EdtFloatedit(Presentation):
"""Classe définissant l'éditeur de description flottante 'floatedit'."""
nom = "floatedit"
def __init__(self, personnage, flottante):
"""Constructeur de l'éditeur"""
if personnage:
instance_connexion = personnage.instance_connexion
else:
instance_connexion = None
Presentation.__init__(self, instance_connexion, flottante)
if personnage and flottante:
self.construire(flottante)
def __getnewargs__(self):
return (None, None)
def construire(self, flottante):
"""Construction de l'éditeur"""
# Description
description = self.ajouter_choix("description", "d", Description,
flottante)
description.parent = self
description.apercu = "{objet.description.paragraphes_indentes}"
description.aide_courte = \
"| |tit|" + "Description flottante '{}'".format(
flottante.cle).ljust(76) + "|ff||\n" + self.opts.separateur
# Détails
details = self.ajouter_choix("details", "e", EdtDetails, flottante,
"details")
details.parent = self
details.aide_courte = \
"Entrez le nom d'un |cmd|détail existant|ff| pour l'éditer ou " \
"un |cmd|nouveau détail|ff|\n" \
"pour le créer ; |ent|/|ff| pour revenir à la fenêtre parente.\n" \
"Options :\n" \
" - |ent|/s <détail existant> / <synonyme 1> (/ <synonyme 2> / " \
"...)|ff| : permet\n" \
" de modifier les synonymes du détail passée en paramètre. " \
"Pour chaque\n" \
" synonyme donné à l'option, s'il existe, il sera supprimé ; " \
"sinon, il sera\n" \
" ajouté à la liste.\n" \
" - |ent|/d <détail existant>|ff| : supprime le détail " \
"indiqué\n\n"
|
bsd-3-clause
| -354,768,347,464,347,840
| 41.505263
| 79
| 0.679297
| false
| 3.539001
| false
| false
| false
|
brunoabud/ic
|
plugins/ICGRAY2BGR/plugin_object.py
|
1
|
1150
|
# coding: utf-8
# Copyright (C) 2016 Bruno Abude Cardoso
#
# Imagem Cinemática is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Imagem Cinemática is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import cv2
class ICGRAY2BGR(object):
def __init__(self, plugin_path):
self.plugin_path = plugin_path
self.parameters = []
def parameter_changed(self, param_name, value):
return None
def apply_filter(self, frame):
colorspace, data, pos, timestamp = frame
data = cv2.cvtColor(data, cv2.COLOR_GRAY2BGR)
return ("BGR", data)
def release_plugin(self, error_level=0):
pass
|
gpl-3.0
| 2,798,068,830,518,351,400
| 32.764706
| 75
| 0.705575
| false
| 3.644444
| false
| false
| false
|
meppe/ros-ort
|
src/frcnn/src/lib/pycocotools/mask.py
|
1
|
4062
|
__author__ = 'tsungyi'
import lib.pycocotools._mask as _mask
# Interface for manipulating masks stored in RLE format.
#
# RLE is a simple yet efficient format for storing binary masks. RLE
# first divides a vector (or vectorized image) into a series of piecewise
# constant regions and then for each piece simply stores the length of
# that piece. For example, given M=[0 0 1 1 1 0 1] the RLE counts would
# be [2 3 1 1], or for M=[1 1 1 1 1 1 0] the counts would be [0 6 1]
# (note that the odd counts are always the numbers of zeros). Instead of
# storing the counts directly, additional compression is achieved with a
# variable bitrate representation based on a common scheme called LEB128.
#
# Compression is greatest given large piecewise constant regions.
# Specifically, the size of the RLE is proportional to the number of
# *boundaries* in M (or for an image the number of boundaries in the y
# direction). Assuming fairly simple shapes, the RLE representation is
# O(sqrt(n)) where n is number of pixels in the object. Hence space usage
# is substantially lower, especially for large simple objects (large n).
#
# Many common operations on masks can be computed directly using the RLE
# (without need for decoding). This includes computations such as area,
# union, intersection, etc. All of these operations are linear in the
# size of the RLE, in other words they are O(sqrt(n)) where n is the area
# of the object. Computing these operations on the original mask is O(n).
# Thus, using the RLE can result in substantial computational savings.
#
# The following API functions are defined:
# encode - Encode binary masks using RLE.
# decode - Decode binary masks encoded via RLE.
# merge - Compute union or intersection of encoded masks.
# iou - Compute intersection over union between masks.
# area - Compute area of encoded masks.
# toBbox - Get bounding boxes surrounding encoded masks.
# frPyObjects - Convert polygon, bbox, and uncompressed RLE to encoded RLE mask.
#
# Usage:
# Rs = encode( masks )
# masks = decode( Rs )
# R = merge( Rs, intersect=false )
# o = iou( dt, gt, iscrowd )
# a = area( Rs )
# bbs = toBbox( Rs )
# Rs = frPyObjects( [pyObjects], h, w )
#
# In the API the following formats are used:
# Rs - [dict] Run-length encoding of binary masks
# R - dict Run-length encoding of binary mask
# masks - [hxwxn] Binary mask(s) (must have type np.ndarray(dtype=uint8) in column-major order)
# iscrowd - [nx1] list of np.ndarray. 1 indicates corresponding gt image has crowd region to ignore
# bbs - [nx4] Bounding box(es) stored as [x y w h]
# poly - Polygon stored as [[x1 y1 x2 y2...],[x1 y1 ...],...] (2D list)
# dt,gt - May be either bounding boxes or encoded masks
# Both poly and bbs are 0-indexed (bbox=[0 0 1 1] encloses first pixel).
#
# Finally, a note about the intersection over union (iou) computation.
# The standard iou of a ground truth (gt) and detected (dt) object is
# iou(gt,dt) = area(intersect(gt,dt)) / area(union(gt,dt))
# For "crowd" regions, we use a modified criteria. If a gt object is
# marked as "iscrowd", we allow a dt to match any subregion of the gt.
# Choosing gt' in the crowd gt that best matches the dt can be done using
# gt'=intersect(dt,gt). Since by definition union(gt',dt)=dt, computing
# iou(gt,dt,iscrowd) = iou(gt',dt) = area(intersect(gt,dt)) / area(dt)
# For crowd gt regions we use this modified criteria above for the iou.
#
# To compile run "python setup.py build_ext --inplace"
# Please do not contact us for help with compiling.
#
# Microsoft COCO Toolbox. version 2.0
# Data, paper, and tutorials available at: http://mscoco.org/
# Code written by Piotr Dollar and Tsung-Yi Lin, 2015.
# Licensed under the Simplified BSD License [see coco/license.txt]
encode = _mask.encode
decode = _mask.decode
iou = _mask.iou
merge = _mask.merge
area = _mask.area
toBbox = _mask.toBbox
frPyObjects = _mask.frPyObjects
|
gpl-3.0
| -2,878,177,237,587,652,600
| 48.54878
| 100
| 0.699655
| false
| 3.334975
| false
| false
| false
|
joshcai/utdcs
|
processing/views.py
|
1
|
3030
|
# Create your views here.
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.template import RequestContext, loader
from django.core.urlresolvers import reverse
from django.shortcuts import render, get_object_or_404
from django.db.models import Q
from website import secrets
import datetime
from processing.models import Post
#debugging
#import pdb
def render_post(current_post):
newpost = """var sketchProc=function(processingInstance){ with (processingInstance){
var xWidth=400;
var yHeight=400;
frameRate(45);
size(xWidth, yHeight);"""
newpost += current_post
newpost +="}};"
return newpost
def index(request, page_num=1):
post_entries = Post.objects.order_by('-date').exclude(deleted=True)
context ={
'post_entries': post_entries[(float(page_num)-1)*5:float(page_num)*5],
'page_num': page_num,
'request': request,
}
if float(page_num) > 1:
context['prev'] = True
if float(page_num)*5 < len(post_entries): # this can be optimized later - (code is already hitting database once)
context['next'] = True
return render(request, 'processing/index.html', context)
def submit(request):
if request.method == 'POST':
if request.POST['title'] and request.POST['content']:
d = datetime.datetime.now()
if request.POST['author']:
auth = request.POST['author']
else:
auth = "Anonymous"
p = Post(title=request.POST['title'],
content=request.POST['content'],
content_rendered=render_post(request.POST['content']),
author=auth,
date=d,
date_str=d.strftime('%B %d, %Y %I:%M%p'))
p.save()
return HttpResponseRedirect(reverse('processing:index'))
else:
context={
'title': request.POST['title'],
'content': request.POST['content'],
'error_message': "Title and content required<br />",
'url': reverse('processing:submit'),
'request': request,
}
return render(request, 'processing/newpost.html', context)
return render(request, 'processing/newpost.html', {'url': reverse('processing:submit'), 'request': request})
def login(request):
context={'request': request}
if request.method == 'POST':
if request.POST['password'] == secrets.login_password:
request.session['logged_in'] = True
return HttpResponseRedirect(reverse('blog:index'))
else:
context['error_message'] = "Invalid password<br />"
return render(request, 'blog/login.html', context)
def delete(request, post_id):
if 'logged_in' in request.session and request.session['logged_in']:
post = get_object_or_404(Post, pk=post_id)
post.deleted = True
post.save()
return HttpResponseRedirect(reverse('blog:index'))
def post(request, post_id):
post = get_object_or_404(Post, pk=post_id)
context={
'post': post,
'request': request,
}
query = Post.objects.all().exclude(deleted=True)
next = query.filter(pk__gt=post_id)
if next:
context['next'] = next[0]
prev = query.filter(pk__lt=post_id).order_by('id').reverse()
if prev:
context['prev'] = prev[0]
return render(request,'processing/post.html', context)
|
mit
| 2,868,276,417,460,019,000
| 30.894737
| 114
| 0.69604
| false
| 3.196203
| false
| false
| false
|
azumimuo/family-xbmc-addon
|
plugin.video.specto/resources/lib/sources/kissanime_tv.py
|
1
|
4887
|
# -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,json
from resources.lib.libraries import cleantitle
from resources.lib.libraries import client
from resources.lib.libraries import control
from resources.lib.libraries import cache
from resources.lib import resolvers
class source:
def __init__(self):
self.base_link = 'http://kissanime.io/'
#self.base_link = client.source(self.base_link, output='geturl')
self.search_link = '/wp-admin/admin-ajax.php'
self.movie_list = '/720p-1080p-bluray-movies-list/'
def get_movie(self, imdb, title, year):
try:
leter = title[0]
result = cache.get(self.filmxy_cache,9000,leter)
print "r1",result
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [i for i in result if cleantitle.movie(title) == cleantitle.movie(i[2])]
print "r2",result
result = [i[0] for i in result if any(x in i[1] for x in years)][0]
print "r3",result
url = client.replaceHTMLCodes(result)
url = url.encode('utf-8')
return url
except Exception as e:
control.log('Filmxy ERROR %s' % e)
return
def filmxy_cache(self, leter=''):
try:
url = urlparse.urljoin(self.base_link, self.search_link)
#control.log('>>>>>>>>>>>>---------- CACHE %s' % url)
headers = {'X-Requested-With':"XMLHttpRequest"}
params = {"action":"ajax_process2", "query":leter.upper()}
params = urllib.urlencode(params)
result = client.request(url, post=params, headers=headers)
result = client.parseDOM(result, 'p')
result = [(client.parseDOM(i, 'a', ret='href')[0], client.parseDOM(i, 'a')[0], client.parseDOM(i, 'a')[0]) for i in result]
result = [(re.sub('http.+?//.+?/','/', i[0]), re.findall("\(\d+\)", i[1]), i[2].split('(')[0]) for i in result]
#control.log('>>>>>>>>>>>>---------- CACHE-4 %s' % result)
result = [(i[0], i[1][0], i[2].strip()) for i in result if len(i[1]) > 0]
return result
except Exception as e:
control.log('Filmxy Cache ERROR %s' % e)
return
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
print "URL",url
sources = []
if url == None: return sources
url1 = urlparse.urljoin(self.base_link, url)
result = client.request(url1)
url1 = client.parseDOM(result, 'a', attrs = {'id': 'main-down'}, ret='href')[0]
print "LINKS1",url1
result = client.request(url1)
print "LINKS2", result
for quality in ['720p', '1080p']:
links = client.parseDOM(result, 'div', attrs = {'class': '.+?'+quality})[0]
links = client.parseDOM(links, 'li')
links = [(client.parseDOM(i, 'a', ret='href')[0]) for i in links]
if '1080p' in quality: q = '1080p'
elif '720p' in quality or 'hd' in quality: q = 'HD'
else: q = 'SD'
for j in links:
print "j",j
host = j.split('/')[2]
host = host.strip().lower()
host = client.replaceHTMLCodes(host)
if not host in hostDict: raise Exception()
host = host.encode('utf-8')
print "HOST",host, j
sources.append({'source': host, 'quality': q, 'provider': 'Filmxy', 'url': j})
print "LINKS3", links
return sources
except Exception as e:
control.log('Filmxy Source ERROR %s' % e)
return sources
def resolve(self, url):
try:
#url = client.request(url, output='geturl')
#if 'requiressl=yes' in url: url = url.replace('http://', 'https://')
#else: url = url.replace('https://', 'http://')
url = resolvers.request(url)
return url
except:
return
|
gpl-2.0
| 5,392,704,476,561,538,000
| 37.179688
| 135
| 0.544506
| false
| 3.845004
| false
| false
| false
|
openmips/stbgui
|
lib/python/Components/Renderer/NextEpgInfo.py
|
2
|
3268
|
from Components.VariableText import VariableText
from Renderer import Renderer
from enigma import eLabel, eEPGCache, eServiceReference
from time import localtime, strftime
from skin import parseColor
class NextEpgInfo(Renderer, VariableText):
def __init__(self):
Renderer.__init__(self)
VariableText.__init__(self)
self.epgcache = eEPGCache.getInstance()
self.numberOfItems = 1
self.hideLabel = 0
self.timecolor = ""
self.labelcolor = ""
self.foregroundColor = "00?0?0?0"
self.numOfSpaces = 1
GUI_WIDGET = eLabel
def changed(self, what):
self.text = ""
reference = self.source.service
info = reference and self.source.info
if info:
currentEvent = self.source.getCurrentEvent()
if currentEvent:
if not self.epgcache.startTimeQuery(eServiceReference(reference.toString()), currentEvent.getBeginTime() + currentEvent.getDuration()):
spaces = " "*self.numOfSpaces
if self.numberOfItems == 1:
event = self.epgcache.getNextTimeEntry()
if event:
if self.hideLabel:
self.text = "%s%s%s%s%s" % (self.timecolor, strftime("%H:%M", localtime(event.getBeginTime())), spaces, self.foregroundColor, event.getEventName())
else:
self.text = "%s%s:%s%s%s" % (self.labelcolor, pgettext("now/next: 'next' event label", "Next"), spaces, self.foregroundColor, event.getEventName())
else:
for x in range(self.numberOfItems):
event = self.epgcache.getNextTimeEntry()
if event:
self.text += "%s%s%s%s%s\n" % (self.timecolor, strftime("%H:%M", localtime(event.getBeginTime())), spaces, self.foregroundColor, event.getEventName())
if not self.hideLabel:
self.text = self.text and "%s%s\n%s" % (self.labelcolor, pgettext("now/next: 'next' event label", "Next"), self.text) or ""
def applySkin(self, desktop, parent):
attribs = []
for (attrib, value) in self.skinAttributes:
if attrib == "NumberOfItems":
self.numberOfItems = int(value)
attribs.append((attrib, value))
if attrib == "noLabel":
self.hideLabel = int(value)
attribs.append((attrib, value))
if attrib == "numOfSpaces":
self.numOfSpaces = int(value)
attribs.append((attrib, value))
if attrib == "timeColor":
self.timecolor = self.hex2strColor(parseColor(value).argb())
attribs.append((attrib, value))
if attrib == "labelColor":
self.labelcolor = self.hex2strColor(parseColor(value).argb())
attribs.append((attrib, value))
if attrib == "foregroundColor":
self.foregroundColor = self.hex2strColor(parseColor(value).argb())
attribs.append((attrib, value))
for (attrib, value) in attribs:
self.skinAttributes.remove((attrib, value))
self.timecolor = self.formatColorString(self.timecolor)
self.labelcolor = self.formatColorString(self.labelcolor)
self.foregroundColor = self.formatColorString(self.foregroundColor)
return Renderer.applySkin(self, desktop, parent)
# hex:
# 0 1 2 3 4 5 6 7 8 9 a b c d e f
# converts to:
# 0 1 2 3 4 5 6 7 8 9 : ; < = > ?
def hex2strColor(self, rgb):
out = ""
for i in range(28,-1,-4):
out += "%s" % chr(0x30 + (rgb>>i & 0xf))
return out
def formatColorString(self, color):
if color:
return "%s%s" % ('\c', color)
return "%s%s" % ('\c', self.foregroundColor)
|
gpl-2.0
| 6,980,544,276,022,878,000
| 37.011628
| 158
| 0.678703
| false
| 3.091769
| false
| false
| false
|
WilliamMayor/pinscher
|
pinscher/Keyfile.py
|
1
|
1219
|
import string
import pickle
import os
import utilities
class Keyfile:
LENGTH = 32
CHARACTERS = string.digits + string.letters + string.punctuation + ' '
@staticmethod
def create(path, database_path, **kwargs):
k = Keyfile()
k.path = path
k.database_path = os.path.abspath(database_path)
k.key = kwargs.get('key', utilities.generate_key())
k.iv = kwargs.get('iv', utilities.generate_iv())
k.length = kwargs.get('length', Keyfile.LENGTH)
k.characters = kwargs.get('characters', Keyfile.CHARACTERS)
k.save()
return Keyfile.load(path)
@staticmethod
def load(path):
k = pickle.load(open(path, 'rb'))
k.path = path
return k
def __getstate__(self):
_dict = self.__dict__.copy()
del _dict['path']
return _dict
def __setstate__(self, _dict):
self.__dict__.update(_dict)
def __hash__(self):
return self.path.__hash__()
def __eq__(self, other):
return self.path == other.path
def save(self):
pickle.dump(self, open(self.path, 'wb'))
def delete(self):
os.remove(self.path)
os.remove(self.database_path)
|
gpl-3.0
| -6,969,443,440,557,586,000
| 23.38
| 74
| 0.575062
| false
| 3.693939
| false
| false
| false
|
PyCQA/astroid
|
astroid/brain/brain_type.py
|
1
|
2187
|
"""
Astroid hooks for type support.
Starting from python3.9, type object behaves as it had __class_getitem__ method.
However it was not possible to simply add this method inside type's body, otherwise
all types would also have this method. In this case it would have been possible
to write str[int].
Guido Van Rossum proposed a hack to handle this in the interpreter:
https://github.com/python/cpython/blob/67e394562d67cbcd0ac8114e5439494e7645b8f5/Objects/abstract.c#L181-L184
This brain follows the same logic. It is no wise to add permanently the __class_getitem__ method
to the type object. Instead we choose to add it only in the case of a subscript node
which inside name node is type.
Doing this type[int] is allowed whereas str[int] is not.
Thanks to Lukasz Langa for fruitful discussion.
"""
from astroid import extract_node, inference_tip, nodes
from astroid.const import PY39_PLUS
from astroid.exceptions import UseInferenceDefault
from astroid.manager import AstroidManager
def _looks_like_type_subscript(node):
"""
Try to figure out if a Name node is used inside a type related subscript
:param node: node to check
:type node: astroid.node_classes.NodeNG
:return: true if the node is a Name node inside a type related subscript
:rtype: bool
"""
if isinstance(node, nodes.Name) and isinstance(node.parent, nodes.Subscript):
return node.name == "type"
return False
def infer_type_sub(node, context=None):
"""
Infer a type[...] subscript
:param node: node to infer
:type node: astroid.node_classes.NodeNG
:param context: inference context
:type context: astroid.context.InferenceContext
:return: the inferred node
:rtype: nodes.NodeNG
"""
node_scope, _ = node.scope().lookup("type")
if node_scope.qname() != "builtins":
raise UseInferenceDefault()
class_src = """
class type:
def __class_getitem__(cls, key):
return cls
"""
node = extract_node(class_src)
return node.infer(context=context)
if PY39_PLUS:
AstroidManager().register_transform(
nodes.Name, inference_tip(infer_type_sub), _looks_like_type_subscript
)
|
lgpl-2.1
| -6,311,474,582,028,243,000
| 32.646154
| 108
| 0.715592
| false
| 3.663317
| false
| false
| false
|
gwwfps/boxrps
|
admin.py
|
1
|
15675
|
#!/usr/bin/env python
import logging
import yaml
import cgi
from xml.dom import minidom as md
from datetime import datetime, timedelta
from collections import defaultdict
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.ext import db
from django.utils import simplejson
from models import *
from utils import render_to, parse_item
class AdminHandler(webapp.RequestHandler):
def get(self):
self.response.out.write('Hello world!')
class ParseHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/parse.html')
def post(self):
all_members = Member.all()
parsed = md.parseString(self.request.get('log').encode('utf-8'))
members = []
for member in parsed.getElementsByTagName('member'):
try:
name = member.firstChild.firstChild
except AttributeError:
continue
if name is None:
continue
name = name.toxml().strip().capitalize()
class_ = member.childNodes[1].firstChild.toxml().upper()
m = Member.gql('WHERE name = :1', name).get()
if not m:
new_member = Member(name=name, class_=class_)
new_member.put()
else:
m.class_=class_
m.put()
members.append(name)
items = []
for item in parsed.getElementsByTagName('item'):
try:
name = item.firstChild.firstChild.toxml()
except AttributeError:
continue
time = item.childNodes[1].firstChild.toxml()
looter = item.childNodes[2].firstChild.toxml()
pt = item.childNodes[3].firstChild.toxml()
items.append(parse_item(name) + (time, looter, pt))
render_to(self.response, 'admin/parseadd.html',
members=set(members), all_members=all_members, events=Event.all().order('name'),
datetime=parsed.getElementsByTagName('start')[0].firstChild.toxml(),
items=items)
class RaidHanlder(webapp.RequestHandler):
def get(self):
pass
def post(self):
pass
class EventHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/events.html', events=Event.all())
def post(self):
batch = self.request.get('batch')
batch = batch.split('\n')
for line in batch:
event, pt = line.split('\t')
Event(name=cgi.escape(event), default_pt = int(float(pt.strip()))).put()
self.get()
class AjaxHandler(webapp.RequestHandler):
def post(self):
action = self.request.get('action')
if action == 'addevent':
event = Event(name=self.request.get('name'),
default_pt=int(self.request.get('pt')))
event.put()
elif action == 'geteventpt':
event = Event.get(db.Key(self.request.get('key')))
if event:
self.response.out.write(simplejson.dumps({'pt':event.default_pt}))
elif action == 'addraid':
date = datetime.strptime(self.request.get('date'), '%Y.%m.%d %H:%M')
pt = int(self.request.get('pt'))
note = self.request.get('note')
members = self.request.get('members').split('|')[0:-1]
loot = self.request.get('loot').split('|')[0:-1]
memcache = {}
for m in Member.all():
memcache[m.name] = m
key = self.request.get('key')
if key:
encounter = Encounter.get(db.Key(key))
else:
encounter = None
if encounter:
delta = 0
oldpt = encounter.pt
if not encounter.pt == pt:
delta = pt - encounter.pt
encounter.pt = pt
encounter.note = note
encounter.datetime = date
old_members = set([m.name for m in encounter.attending_members()])
members = set([member.strip().capitalize() for member in members])
remaining = old_members & members
newly_added = members - old_members
removed = old_members - members
for m in remaining:
member = memcache[m]
member.earned += delta
member.balance += delta
member.put()
for m in newly_added:
nm = memcache.get(m.strip().capitalize())
if not nm:
nm = Member(name=m)
memcache[m] = nm
nm.earned += pt
nm.balance += pt
nm.put()
encounter.attendees.append(nm.key())
for m in removed:
dm = memcache[m]
dm.earned -= oldpt
dm.balance -= oldpt
dm.put()
encounter.attendees.remove(dm.key())
encounter.put()
Member.recalculate_attendance()
lset = {}
for l in encounter.loots:
lset[str(l.key())] = l
plset = set(lset.keys())
for piece in loot:
_, name, time, looter, cost, lkey = piece.split(';')
looter = looter.strip().capitalize()
cost = int(cost)*(-1)
time = datetime.strptime(time, '%Y.%m.%d %H:%M')
if lkey in lset:
plset.remove(lkey)
l = lset[lkey]
if not l.looter.name == looter or not l.cost == cost:
m = l.looter
m.spent -= l.cost
m.balance -= l.cost
m.put()
ltr = memcache[looter]
ltr.spent += cost
ltr.balance += cost
ltr.put()
l.looter = ltr
l.cost = cost
l.put()
else:
item = Item.gql('WHERE name = :1', name).get()
if not item:
item = Item(name=name, gid=0, default_cost=cost)
item.put()
looter = memcache[looter]
looter.spent += cost
looter.balance += cost
looter.put()
loot = Loot(encounter=encounter, cost=cost, looter=looter,
datetime=time, item=item)
loot.put()
for rkey in plset:
l = lset[rkey]
m = l.looter
m.spent -= l.cost
m.balance -= l.cost
m.put()
l.delete()
else:
event = Event.get(db.Key(self.request.get('event')))
attendees = []
for member in members:
m = memcache.get(member.strip().capitalize())
if not m:
m = Member(name=member)
memcache[member.strip().capitalize()] = m
m.earned += pt
m.balance += pt
m.put()
attendees.append(m.key())
encounter = Encounter(event=event, note=note, pt=pt, datetime=date,
attendees=attendees)
encounter.put()
Member.recalculate_attendance()
for piece in loot:
logging.info(piece.encode('utf-8'))
id, name, time, looter, cost, _ = piece.split(';')
looter = looter.strip().capitalize()
try:
id = int(id)
except ValueError:
id = 0
time = datetime.strptime(time, '%Y.%m.%d %H:%M')
looter = memcache[looter]
cost = int(cost)*(-1)
item = Item.gql('WHERE name = :1', name).get()
if item:
if id:
item.gid = id
item.put()
else:
item = Item(name=name, gid=id, default_cost=cost)
item.put()
looter.spent += cost
looter.balance += cost
looter.put()
loot = Loot(encounter=encounter, cost=cost, looter=looter,
datetime=time, item=item)
loot.put()
self.response.out.write(simplejson.dumps({'key': str(encounter.key())}))
elif action == "deladjustment":
aid = self.request.get('aid')
adj = Adjustment.get(db.Key(aid))
m = adj.member
m.balance -= adj.pt
m.adjusted -= adj.pt
m.put()
adj.delete()
self.response.out.write(simplejson.dumps({}))
class ImportHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/import.html')
def post(self):
text = self.request.get('import')
for line in text.split('\n'):
line = line.split('\t')
name = line[2].capitalize()
earned = int(float(line[6]))
spent = (-1)*int(float(line[7]))
adjusted = int(float(line[8]))
balance = int(float(line[9]))
m = Member.gql('WHERE name = :1', name).get()
if m:
m.earned = earned
m.spent = spent
m.balance = balance
m.adjusted = adjusted
else:
m = Member(name=name, spent=spent, earned=earned,
balance=balance, adjusted=adjusted)
m.put()
self.get()
class AdjustmentHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/adjust.html', members=Member.all(),
adjustments=Adjustment.all())
def post(self):
member = Member.gql('WHERE name = :1', self.request.get('member').capitalize()).get()
if member:
pt = int(self.request.get('pt'))
reason = self.request.get('reason')
dt = datetime.now()
Adjustment(pt=pt, member=member, reason=reason, datetime=dt).put()
member.adjusted += pt
member.balance += pt
member.usable = min(member.balance, member.attendance * member.balance / 100)
member.put()
self.get()
class YamlHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'dump.html',
dump='<form action="/o/yaml" method="POST"><input type="submit" /> </form>')
def post(self):
stream = file('rps.yaml', 'r')
data = yaml.load(stream)
id_to_key = {}
items = defaultdict(list)
att = defaultdict(list)
memcache = {}
for m in Member.all():
memcache[m.name] = m.key()
for entry in data:
if 'adjustment_id' in entry:
continue
# member = Member.gql('WHERE name = :1', entry['member_name']).get()
# adj = Adjustment(pt=int(entry['adjustment_value']),
# reason=entry['adjustment_reason'],
# datetime=datetime.fromtimestamp(entry['adjustment_date']),
# member=member)
# adj.put()
# elif 'item_id' in entry:
if 'item_id' in entry:
items[entry['raid_id']].append((entry['item_name'],
entry['item_buyer'],
entry['item_value'],
entry['item_date']))
elif 'raid_added_by' in entry:
event = Event.gql('WHERE name = :1', entry['raid_name']).get()
if event:
if not entry['raid_note']:
entry['raid_note'] = ''
raid = Encounter(event=event, note=entry['raid_note'],
pt=int(entry['raid_value']),
datetime=datetime.fromtimestamp(entry['raid_date']))
raid.put()
id_to_key[entry['raid_id']] = raid.key()
else:
logging.error(entry)
elif 'member_lastraid' in entry:
continue
else:
try:
att[entry['raid_id']].append(entry['member_name'])
except KeyError:
logging.error(entry)
for rid, key in id_to_key.items():
r = Encounter.get(key)
for member in att[rid]:
m = memcache[member.capitalize()]
if m:
r.attendees.append(m)
else:
logging.error(member)
r.put()
for name, buyer, value, date in items[rid]:
try:
value = int(float(value))*(-1)
except UnicodeEncodeError:
logging.error(name)
item = Item.gql('WHERE name = :1', name).get()
if not item:
item = Item(name=name, default_cost=value)
item.put()
loot = Loot(item=item, encounter=key, cost=value,
looter=memcache[buyer],
datetime=datetime.fromtimestamp(date))
loot.put()
render_to(self.response, 'dump.html', dump=data)
class EditRaidHandler(webapp.RequestHandler):
def get(self, key):
raid = Encounter.get(db.Key(key))
if raid:
render_to(self.response, 'admin/parseadd.html', key=key,
members=set([m.name for m in raid.attending_members()]),
all_members=Member.all(), events=Event.all(),
datetime=raid.datetime.strftime('%Y.%m.%d %H:%M'),
items=[(i.item.gid, i.item.name, i.datetime.strftime('%Y.%m.%d %H:%M'), i.looter.name, (-1)*i.cost, str(i.key())) for i in raid.loots],
raid=raid)
def main():
application = webapp.WSGIApplication([('/o/', AdminHandler),
('/o/parse', ParseHandler),
('/o/events', EventHandler),
('/o/ajax', AjaxHandler),
('/o/import', ImportHandler),
('/o/adjust', AdjustmentHandler),
('/o/yaml', YamlHandler),
('/o/editraid/(.+)', EditRaidHandler)],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
|
mit
| -3,077,891,950,705,445,000
| 38.987245
| 157
| 0.45008
| false
| 4.490117
| false
| false
| false
|
googleads/google-ads-python
|
google/ads/googleads/v7/services/types/campaign_budget_service.py
|
1
|
6329
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v7.enums.types import (
response_content_type as gage_response_content_type,
)
from google.ads.googleads.v7.resources.types import (
campaign_budget as gagr_campaign_budget,
)
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
from google.rpc import status_pb2 as status # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v7.services",
marshal="google.ads.googleads.v7",
manifest={
"GetCampaignBudgetRequest",
"MutateCampaignBudgetsRequest",
"CampaignBudgetOperation",
"MutateCampaignBudgetsResponse",
"MutateCampaignBudgetResult",
},
)
class GetCampaignBudgetRequest(proto.Message):
r"""Request message for
[CampaignBudgetService.GetCampaignBudget][google.ads.googleads.v7.services.CampaignBudgetService.GetCampaignBudget].
Attributes:
resource_name (str):
Required. The resource name of the campaign
budget to fetch.
"""
resource_name = proto.Field(proto.STRING, number=1,)
class MutateCampaignBudgetsRequest(proto.Message):
r"""Request message for
[CampaignBudgetService.MutateCampaignBudgets][google.ads.googleads.v7.services.CampaignBudgetService.MutateCampaignBudgets].
Attributes:
customer_id (str):
Required. The ID of the customer whose
campaign budgets are being modified.
operations (Sequence[google.ads.googleads.v7.services.types.CampaignBudgetOperation]):
Required. The list of operations to perform
on individual campaign budgets.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
response_content_type (google.ads.googleads.v7.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response content type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
"""
customer_id = proto.Field(proto.STRING, number=1,)
operations = proto.RepeatedField(
proto.MESSAGE, number=2, message="CampaignBudgetOperation",
)
partial_failure = proto.Field(proto.BOOL, number=3,)
validate_only = proto.Field(proto.BOOL, number=4,)
response_content_type = proto.Field(
proto.ENUM,
number=5,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
class CampaignBudgetOperation(proto.Message):
r"""A single operation (create, update, remove) on a campaign
budget.
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
FieldMask that determines which resource
fields are modified in an update.
create (google.ads.googleads.v7.resources.types.CampaignBudget):
Create operation: No resource name is
expected for the new budget.
update (google.ads.googleads.v7.resources.types.CampaignBudget):
Update operation: The campaign budget is
expected to have a valid resource name.
remove (str):
Remove operation: A resource name for the removed budget is
expected, in this format:
``customers/{customer_id}/campaignBudgets/{budget_id}``
"""
update_mask = proto.Field(
proto.MESSAGE, number=4, message=field_mask.FieldMask,
)
create = proto.Field(
proto.MESSAGE,
number=1,
oneof="operation",
message=gagr_campaign_budget.CampaignBudget,
)
update = proto.Field(
proto.MESSAGE,
number=2,
oneof="operation",
message=gagr_campaign_budget.CampaignBudget,
)
remove = proto.Field(proto.STRING, number=3, oneof="operation",)
class MutateCampaignBudgetsResponse(proto.Message):
r"""Response message for campaign budget mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results (Sequence[google.ads.googleads.v7.services.types.MutateCampaignBudgetResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE, number=3, message=status.Status,
)
results = proto.RepeatedField(
proto.MESSAGE, number=2, message="MutateCampaignBudgetResult",
)
class MutateCampaignBudgetResult(proto.Message):
r"""The result for the campaign budget mutate.
Attributes:
resource_name (str):
Returned for successful operations.
campaign_budget (google.ads.googleads.v7.resources.types.CampaignBudget):
The mutated campaign budget with only mutable fields after
mutate. The field will only be returned when
response_content_type is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(proto.STRING, number=1,)
campaign_budget = proto.Field(
proto.MESSAGE, number=2, message=gagr_campaign_budget.CampaignBudget,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
apache-2.0
| -8,298,692,412,461,880,000
| 36.229412
| 128
| 0.681624
| false
| 4.070096
| false
| false
| false
|
amonapp/amon
|
amon/apps/_account/forms.py
|
1
|
6520
|
from django import forms
from django.contrib.auth import authenticate
from django.conf import settings
from django.contrib.auth import get_user_model
# from amon.apps.notifications.models import notifications_model
# from amon.apps.alerts.models import alerts_model
# from amon.apps.account.models import user_preferences_model, forgotten_pass_tokens_model
# from amon.apps.api.models import api_key_model
from timezone_field import TimeZoneFormField
from amon.apps.account.mailer import send_email_forgotten_password
User = get_user_model()
class LoginForm(forms.Form):
email = forms.EmailField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Email'}))
password = forms.CharField(required=True, widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Password'}))
remember_me = forms.BooleanField(widget=forms.CheckboxInput(), label='Remember Me', required=False)
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
if email and password:
user = authenticate(email=email, password=password)
if user:
return self.cleaned_data
raise forms.ValidationError("Invalid login details")
def clean_remember_me(self):
remember_me = self.cleaned_data.get('remember_me')
if not remember_me:
settings.SESSION_EXPIRE_AT_BROWSER_CLOSE = True
else:
settings.SESSION_EXPIRE_AT_BROWSER_CLOSE = False
return remember_me
class AdminUserForm(forms.Form):
email = forms.EmailField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Email'}))
password = forms.CharField(required=True, widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Password'}))
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
if email and password:
user = User.objects.filter(email=email).count()
if user:
raise forms.ValidationError("User already exists")
return self.cleaned_data
def save(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
user = User.objects.create_user(email, password)
user.is_admin = True
user.is_staff = True
user.is_superuser = True
user.save()
# notifications_model.save(data={"email": email}, provider_id='email')
# api_key_model.add_initial_data()
class ProfileForm(forms.Form):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
user_preferences = user_preferences_model.get_preferences(user_id=self.user.id)
user_timezone = user_preferences.get('timezone', 'UTC')
super(ProfileForm, self).__init__(*args, **kwargs)
self.fields['timezone'].widget.attrs.update({'select2-dropdown': '', 'data-size': 360})
self.fields['timezone'].initial = user_timezone
self.fields['email'].initial = self.user.email
email = forms.EmailField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Email'}))
timezone = TimeZoneFormField()
# Check email uniqueness
def clean_email(self):
email = self.cleaned_data.get('email')
if email:
if self.user.email != email:
unique = User.objects.filter(email__iexact=email).count()
if unique > 0:
raise forms.ValidationError(u'An user with this email address already exists.')
return email
def save(self):
data = {'timezone': str(self.cleaned_data['timezone'])}
# user_preferences_model.save_preferences(user_id=self.user.id, data=data)
self.user.email = self.cleaned_data['email']
self.user.save()
class ChangePasswordForm(forms.Form):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
super(ChangePasswordForm, self).__init__(*args, **kwargs)
current_password = forms.CharField(required=True, widget=(forms.PasswordInput(attrs={'placeholder': 'Password'})))
new_password = forms.CharField(required=True, widget=(forms.PasswordInput(attrs={'placeholder': 'Password'})))
def clean_current_password(self):
password = self.cleaned_data.get('current_password')
if self.user.check_password(password):
return self.cleaned_data
raise forms.ValidationError("Your current password is not correct")
def save(self):
password = self.cleaned_data.get('new_password')
self.user.set_password(password)
self.user.save()
return True
class ForgottenPasswordForm(forms.Form):
def __init__(self, *args, **kwargs):
super(ForgottenPasswordForm, self).__init__(*args, **kwargs)
email = forms.EmailField(required=True, widget=(forms.TextInput(attrs={'placeholder': 'Your Login Email'})))
def clean(self):
email = self.cleaned_data.get('email')
if email:
user = User.objects.filter(email=email).count()
if user == 0:
raise forms.ValidationError("User does not exists")
return self.cleaned_data
def save(self):
email = self.cleaned_data.get('email')
token = forgotten_pass_tokens_model.set_token(email=email)
send_email_forgotten_password(token=token, recipients=[email])
return True
class ResetPasswordForm(forms.Form):
password = forms.CharField(
required=True,
label='Your new password',
widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Password'})
)
repeat_password = forms.CharField(
required=True,
label='Confirm it',
widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Repeat Password'})
)
def clean(self):
repeat_password = self.cleaned_data.get('repeat_password')
password = self.cleaned_data.get('password')
if repeat_password and password:
if repeat_password != password:
raise forms.ValidationError("Passwords does not match")
return self.cleaned_data
def save(self, user=None):
password = self.cleaned_data.get('password')
user.set_password(password)
user.save()
|
agpl-3.0
| 8,258,274,880,384,959,000
| 29.905213
| 128
| 0.639724
| false
| 4.137056
| false
| false
| false
|
Micronaet/micronaet-production
|
production_accounting_external_closed/report/production_parser.py
|
1
|
2846
|
# -*- coding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2001-2015 Micronaet S.r.l. (<http://www.micronaet.it>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from openerp.report import report_sxw
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp import SUPERUSER_ID, api
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class Parser(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context)
self.localcontext.update({
'get_closed_object': self.get_closed_object,
'get_date': self.get_date,
})
def get_date(self, ):
''' For report time
'''
return datetime.now().strftime(DEFAULT_SERVER_DATETIME_FORMAT)
def get_closed_object(self, ):
''' List of order
'''
sol_pool = self.pool.get('sale.order.line')
sol_ids = sol_pool.search(self.cr, self.uid, [
('mrp_id.state', 'not in', ('cancel', 'done')),
('mrp_id', '!=', False),
('go_in_production', '=', True),
('mx_closed', '=', True),
])
items = []
for item in sorted(sol_pool.browse(
self.cr, self.uid, sol_ids),
key=lambda x: (x.mrp_id.name,x.mrp_sequence)):
if item.product_uom_qty > item.product_uom_maked_sync_qty:
items.append(item)
return items
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
| -6,230,255,985,330,158,000
| 35.961039
| 79
| 0.608925
| false
| 4.136628
| false
| false
| false
|
PU-Crypto/AES
|
Rijndael/KeySchedule.py
|
1
|
1376
|
# -*- coding: utf-8 -*-
#KeySchedule
from Rijndael.SubBytes import *
from Rijndael.Tables import RijndaelRcon
import math
def RotWord(Spalte):
#Verschiebe die Plaetze im Array
output = list()
output.append(Spalte[1])
output.append(Spalte[2])
output.append(Spalte[3])
output.append(Spalte[0])
return output
def XorRcon(Spalte, SpalteVor4, RconCount):
#Verknuepfe Schritt fuer Schritt die Sonderfaelle(immer die erste Spalte eines RoundKeys) Xor, inklusive der RconTabelle
output = list()
Rcon = RijndaelRcon.Rcon[RconCount]
for i in range(0,4):
output.append(format(int(Spalte[i],16)^int(SpalteVor4[i], 16)^int(format(Rcon[i], '#04x'),16), '#04x'))
return output
def Xor(Spalte, SpalteVor4):
#Verknuepfe Wert fuer Wert Xor
output = list()
for i in range(0,4):
output.append(format(int(Spalte[i], 16)^int(SpalteVor4[i], 16), '#04x'))#Hexadezimal
return output
def KeySchedule(Key):
#Erweitere den Schluessel auf insgesamt 10 weitere von einander abhaengige Schluessel
roundCounter = 0
for i in range(4,41,4):
Key.append(RotWord(Key[i-1]))
Key[i] = TranslateToSBox(Key[i])
Key[i] = XorRcon(Key[i],Key[i-4],roundCounter)
roundCounter += 1
for j in range(i+1,i+4):
Key.append(Xor(Key[j-1],Key[j-4]))
return Key
|
lgpl-3.0
| 7,545,850,936,786,939,000
| 28.913043
| 124
| 0.653343
| false
| 2.656371
| false
| false
| false
|
sbt9uc/osf.io
|
tests/api_tests/users/test_views.py
|
1
|
32030
|
# -*- coding: utf-8 -*-
import urlparse
from nose.tools import * # flake8: noqa
from website.models import Node
from website.util.sanitize import strip_html
from tests.base import ApiTestCase
from tests.factories import AuthUserFactory, DashboardFactory, FolderFactory, ProjectFactory
from api.base.settings.defaults import API_BASE
class TestUsers(ApiTestCase):
def setUp(self):
super(TestUsers, self).setUp()
self.user_one = AuthUserFactory()
self.user_two = AuthUserFactory()
def tearDown(self):
super(TestUsers, self).tearDown()
def test_returns_200(self):
res = self.app.get('/{}users/'.format(API_BASE))
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_find_user_in_users(self):
url = "/{}users/".format(API_BASE)
res = self.app.get(url)
user_son = res.json['data']
ids = [each['id'] for each in user_son]
assert_in(self.user_two._id, ids)
def test_all_users_in_users(self):
url = "/{}users/".format(API_BASE)
res = self.app.get(url)
user_son = res.json['data']
ids = [each['id'] for each in user_son]
assert_in(self.user_one._id, ids)
assert_in(self.user_two._id, ids)
def test_find_multiple_in_users(self):
url = "/{}users/?filter[fullname]=fred".format(API_BASE)
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_in(self.user_one._id, ids)
assert_in(self.user_two._id, ids)
def test_find_single_user_in_users(self):
url = "/{}users/?filter[fullname]=my".format(API_BASE)
self.user_one.fullname = 'My Mom'
self.user_one.save()
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_in(self.user_one._id, ids)
assert_not_in(self.user_two._id, ids)
def test_find_no_user_in_users(self):
url = "/{}users/?filter[fullname]=NotMyMom".format(API_BASE)
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_not_in(self.user_one._id, ids)
assert_not_in(self.user_two._id, ids)
def test_users_list_takes_profile_image_size_param(self):
size = 42
url = "/{}users/?profile_image_size={}".format(API_BASE, size)
res = self.app.get(url)
user_json = res.json['data']
for user in user_json:
profile_image_url = user['attributes']['profile_image_url']
query_dict = urlparse.parse_qs(urlparse.urlparse(profile_image_url).query)
assert_equal(int(query_dict.get('size')[0]), size)
class TestUserDetail(ApiTestCase):
def setUp(self):
super(TestUserDetail, self).setUp()
self.user_one = AuthUserFactory()
self.user_one.social['twitter'] = 'howtopizza'
self.user_one.save()
self.user_two = AuthUserFactory()
def tearDown(self):
super(TestUserDetail, self).tearDown()
def test_gets_200(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_get_correct_pk_user(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
user_json = res.json['data']
assert_equal(user_json['attributes']['fullname'], self.user_one.fullname)
assert_equal(user_json['attributes']['twitter'], 'howtopizza')
def test_get_incorrect_pk_user_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url)
user_json = res.json['data']
assert_not_equal(user_json['attributes']['fullname'], self.user_one.fullname)
def test_get_incorrect_pk_user_not_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url, auth=self.user_one.auth)
user_json = res.json['data']
assert_not_equal(user_json['attributes']['fullname'], self.user_one.fullname)
assert_equal(user_json['attributes']['fullname'], self.user_two.fullname)
def test_user_detail_takes_profile_image_size_param(self):
size = 42
url = "/{}users/{}/?profile_image_size={}".format(API_BASE, self.user_one._id, size)
res = self.app.get(url)
user_json = res.json['data']
profile_image_url = user_json['attributes']['profile_image_url']
query_dict = urlparse.parse_qs(urlparse.urlparse(profile_image_url).query)
assert_equal(int(query_dict.get('size')[0]), size)
class TestUserNodes(ApiTestCase):
def setUp(self):
super(TestUserNodes, self).setUp()
self.user_one = AuthUserFactory()
self.user_one.social['twitter'] = 'howtopizza'
self.user_one.save()
self.user_two = AuthUserFactory()
self.public_project_user_one = ProjectFactory(title="Public Project User One",
is_public=True,
creator=self.user_one)
self.private_project_user_one = ProjectFactory(title="Private Project User One",
is_public=False,
creator=self.user_one)
self.public_project_user_two = ProjectFactory(title="Public Project User Two",
is_public=True,
creator=self.user_two)
self.private_project_user_two = ProjectFactory(title="Private Project User Two",
is_public=False,
creator=self.user_two)
self.deleted_project_user_one = FolderFactory(title="Deleted Project User One",
is_public=False,
creator=self.user_one,
is_deleted=True)
self.folder = FolderFactory()
self.deleted_folder = FolderFactory(title="Deleted Folder User One",
is_public=False,
creator=self.user_one,
is_deleted=True)
self.dashboard = DashboardFactory()
def tearDown(self):
super(TestUserNodes, self).tearDown()
def test_authorized_in_gets_200(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_anonymous_gets_200(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_get_projects_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_projects_not_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_projects_logged_in_as_different_user(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_two._id)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_two._id, ids)
assert_not_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
class TestUserRoutesNodeRoutes(ApiTestCase):
def setUp(self):
super(TestUserRoutesNodeRoutes, self).setUp()
self.user_one = AuthUserFactory()
self.user_one.social['twitter'] = 'howtopizza'
self.user_two = AuthUserFactory()
self.public_project_user_one = ProjectFactory(title="Public Project User One", is_public=True, creator=self.user_one)
self.private_project_user_one = ProjectFactory(title="Private Project User One", is_public=False, creator=self.user_one)
self.public_project_user_two = ProjectFactory(title="Public Project User Two", is_public=True, creator=self.user_two)
self.private_project_user_two = ProjectFactory(title="Private Project User Two", is_public=False, creator=self.user_two)
self.deleted_project_user_one = FolderFactory(title="Deleted Project User One", is_public=False, creator=self.user_one, is_deleted=True)
self.folder = FolderFactory()
self.deleted_folder = FolderFactory(title="Deleted Folder User One", is_public=False, creator=self.user_one, is_deleted=True)
self.dashboard = DashboardFactory()
def tearDown(self):
super(TestUserRoutesNodeRoutes, self).tearDown()
Node.remove()
def test_get_200_path_users_me_userone_logged_in(self):
url = "/{}users/me/".format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
def test_get_200_path_users_me_usertwo_logged_in(self):
url = "/{}users/me/".format(API_BASE)
res = self.app.get(url, auth=self.user_two.auth)
assert_equal(res.status_code, 200)
def test_get_403_path_users_me_no_user(self):
# TODO: change expected exception from 403 to 401 for unauthorized users
url = "/{}users/me/".format(API_BASE)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_get_404_path_users_user_id_me_user_logged_in(self):
url = "/{}users/{}/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_me_no_user(self):
url = "/{}users/{}/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_me_unauthorized_user(self):
url = "/{}users/{}/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_200_path_users_user_id_user_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
def test_get_200_path_users_user_id_no_user(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_get_200_path_users_user_id_unauthorized_user(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['id'], self.user_two._id)
def test_get_200_path_users_me_nodes_user_logged_in(self):
url = "/{}users/me/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_403_path_users_me_nodes_no_user(self):
# TODO: change expected exception from 403 to 401 for unauthorized users
url = "/{}users/me/nodes/".format(API_BASE)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_get_200_path_users_user_id_nodes_user_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_200_path_users_user_id_nodes_no_user(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
# an anonymous/unauthorized user can only see the public projects user_one contributes to.
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_200_path_users_user_id_nodes_unauthorized_user(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth)
assert_equal(res.status_code, 200)
# an anonymous/unauthorized user can only see the public projects user_one contributes to.
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_404_path_users_user_id_nodes_me_user_logged_in(self):
url = "/{}users/{}/nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_nodes_me_unauthorized_user(self):
url = "/{}users/{}/nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_nodes_me_no_user(self):
url = "/{}users/{}/nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_me_user_logged_in(self):
url = "/{}nodes/me/".format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_me_no_user(self):
url = "/{}nodes/me/".format(API_BASE)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_user_id_user_logged_in(self):
url = "/{}nodes/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_user_id_unauthorized_user(self):
url = "/{}nodes/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_user_id_no_user(self):
url = "/{}nodes/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
class TestUserUpdate(ApiTestCase):
def setUp(self):
super(TestUserUpdate, self).setUp()
self.user_one = AuthUserFactory.build(
fullname='Martin Luther King Jr.',
given_name='Martin',
family_name='King',
suffix='Jr.',
social=dict(
github='userOneGithub',
scholar='userOneScholar',
personal='http://www.useronepersonalwebsite.com',
twitter='userOneTwitter',
linkedIn='userOneLinkedIn',
impactStory='userOneImpactStory',
orcid='userOneOrcid',
researcherId='userOneResearcherId'
)
)
self.user_one.save()
self.user_one_url = "/v2/users/{}/".format(self.user_one._id)
self.user_two = AuthUserFactory()
self.user_two.save()
self.new_user_one_data = {
'id': self.user_one._id,
'fullname': 'el-Hajj Malik el-Shabazz',
'given_name': 'Malcolm',
'middle_names': 'Malik el-Shabazz',
'family_name': 'X',
'suffix': 'Sr.',
'gitHub': 'newGitHub',
'scholar': 'newScholar',
'personal_website': 'http://www.newpersonalwebsite.com',
'twitter': 'http://www.newpersonalwebsite.com',
'linkedIn': 'newLinkedIn',
'impactStory': 'newImpactStory',
'orcid': 'newOrcid',
'researcherId': 'newResearcherId',
}
def tearDown(self):
super(TestUserUpdate, self).tearDown()
def test_patch_user_logged_out(self):
res = self.app.patch_json_api(self.user_one_url, {
'fullname': self.new_user_one_data['fullname'],
}, expect_errors=True)
assert_equal(res.status_code, 401)
def test_patch_user_without_required_field(self):
# PATCH does not require required fields
res = self.app.patch_json_api(self.user_one_url, {
'family_name': self.new_user_one_data['family_name'],
}, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['family_name'], self.new_user_one_data['family_name'])
self.user_one.reload()
assert_equal(self.user_one.family_name, self.new_user_one_data['family_name'])
def test_put_user_without_required_field(self):
# PUT requires all required fields
res = self.app.put_json_api(self.user_one_url, {
'family_name': self.new_user_one_data['family_name'],
}, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_partial_patch_user_logged_in(self):
# Test to make sure new fields are patched and old fields stay the same
res = self.app.patch_json_api(self.user_one_url, {
'id': self.user_one._id,
'fullname': 'new_fullname',
'gitHub': 'even_newer_github',
'suffix': 'The Millionth'
}, auth=self.user_one.auth)
self.user_one.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['fullname'], 'new_fullname')
assert_equal(res.json['data']['suffix'], 'The Millionth')
assert_equal(res.json['data']['gitHub'], 'even_newer_github')
assert_equal(res.json['data']['given_name'], self.user_one.given_name)
assert_equal(res.json['data']['middle_names'], self.user_one.middle_names)
assert_equal(res.json['data']['family_name'], self.user_one.family_name)
assert_equal(res.json['data']['personal_website'], self.user_one.social['personal'])
assert_equal(res.json['data']['twitter'], self.user_one.social['twitter'])
assert_equal(res.json['data']['linkedIn'], self.user_one.social['linkedIn'])
assert_equal(res.json['data']['impactStory'], self.user_one.social['impactStory'])
assert_equal(res.json['data']['orcid'], self.user_one.social['orcid'])
assert_equal(res.json['data']['researcherId'], self.user_one.social['researcherId'])
assert_equal(self.user_one.fullname, 'new_fullname')
assert_equal(self.user_one.suffix, 'The Millionth')
assert_equal(self.user_one.social['github'], 'even_newer_github')
def test_partial_patch_user_logged_in(self):
# Test to make sure new fields are patched and old fields stay the same
res = self.app.patch_json_api(self.user_one_url, {
'id': self.user_one._id,
'fullname': 'new_fullname',
'suffix': 'The Millionth'
}, auth=self.user_one.auth)
self.user_one.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], 'new_fullname')
assert_equal(res.json['data']['attributes']['suffix'], 'The Millionth')
assert_equal(res.json['data']['attributes']['gitHub'], self.user_one.social['github'])
assert_equal(res.json['data']['attributes']['given_name'], self.user_one.given_name)
assert_equal(res.json['data']['attributes']['middle_names'], self.user_one.middle_names)
assert_equal(res.json['data']['attributes']['family_name'], self.user_one.family_name)
assert_equal(res.json['data']['attributes']['personal_website'], self.user_one.social['personal'])
assert_equal(res.json['data']['attributes']['twitter'], self.user_one.social['twitter'])
assert_equal(res.json['data']['attributes']['linkedIn'], self.user_one.social['linkedIn'])
assert_equal(res.json['data']['attributes']['impactStory'], self.user_one.social['impactStory'])
assert_equal(res.json['data']['attributes']['orcid'], self.user_one.social['orcid'])
assert_equal(res.json['data']['attributes']['researcherId'], self.user_one.social['researcherId'])
assert_equal(self.user_one.fullname, 'new_fullname')
assert_equal(self.user_one.suffix, 'The Millionth')
assert_equal(self.user_one.social['github'], self.user_one.social['github'])
def test_partial_put_user_logged_in(self):
# Test to make sure new fields are patched and old fields stay the same
res = self.app.put_json_api(self.user_one_url, {
'id': self.user_one._id,
'fullname': 'new_fullname',
'gitHub': 'even_newer_github',
'suffix': 'The Millionth'
}, auth=self.user_one.auth)
self.user_one.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], 'new_fullname')
assert_equal(res.json['data']['attributes']['suffix'], 'The Millionth')
assert_equal(res.json['data']['attributes']['gitHub'], 'even_newer_github')
assert_equal(res.json['data']['attributes']['given_name'], self.user_one.given_name)
assert_equal(res.json['data']['attributes']['middle_names'], self.user_one.middle_names)
assert_equal(res.json['data']['attributes']['family_name'], self.user_one.family_name)
assert_equal(self.user_one.fullname, 'new_fullname')
assert_equal(self.user_one.suffix, 'The Millionth')
assert_equal(self.user_one.social['github'], 'even_newer_github')
def test_put_user_logged_in(self):
# Logged in user updates their user information via put
res = self.app.put_json_api(self.user_one_url, self.new_user_one_data, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], self.new_user_one_data['fullname'])
assert_equal(res.json['data']['attributes']['given_name'], self.new_user_one_data['given_name'])
assert_equal(res.json['data']['attributes']['middle_names'], self.new_user_one_data['middle_names'])
assert_equal(res.json['data']['attributes']['family_name'], self.new_user_one_data['family_name'])
assert_equal(res.json['data']['attributes']['suffix'], self.new_user_one_data['suffix'])
assert_equal(res.json['data']['attributes']['gitHub'], self.new_user_one_data['gitHub'])
assert_equal(res.json['data']['attributes']['personal_website'], self.new_user_one_data['personal_website'])
assert_equal(res.json['data']['attributes']['twitter'], self.new_user_one_data['twitter'])
assert_equal(res.json['data']['attributes']['linkedIn'], self.new_user_one_data['linkedIn'])
assert_equal(res.json['data']['attributes']['impactStory'], self.new_user_one_data['impactStory'])
assert_equal(res.json['data']['attributes']['orcid'], self.new_user_one_data['orcid'])
assert_equal(res.json['data']['attributes']['researcherId'], self.new_user_one_data['researcherId'])
self.user_one.reload()
assert_equal(self.user_one.fullname, self.new_user_one_data['fullname'])
assert_equal(self.user_one.given_name, self.new_user_one_data['given_name'])
assert_equal(self.user_one.middle_names, self.new_user_one_data['middle_names'])
assert_equal(self.user_one.family_name, self.new_user_one_data['family_name'])
assert_equal(self.user_one.suffix, self.new_user_one_data['suffix'])
assert_equal(self.user_one.social['github'], self.new_user_one_data['gitHub'])
assert_equal(self.user_one.social['personal'], self.new_user_one_data['personal_website'])
assert_equal(self.user_one.social['twitter'], self.new_user_one_data['twitter'])
assert_equal(self.user_one.social['linkedIn'], self.new_user_one_data['linkedIn'])
assert_equal(self.user_one.social['impactStory'], self.new_user_one_data['impactStory'])
assert_equal(self.user_one.social['orcid'], self.new_user_one_data['orcid'])
assert_equal(self.user_one.social['researcherId'], self.new_user_one_data['researcherId'])
def test_put_user_logged_out(self):
res = self.app.put_json_api(self.user_one_url, self.new_user_one_data, expect_errors=True)
assert_equal(res.status_code, 401)
def test_put_wrong_user(self):
# User tries to update someone else's user information via put
res = self.app.put_json_api(self.user_one_url, self.new_user_one_data, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_patch_wrong_user(self):
# User tries to update someone else's user information via patch
res = self.app.patch_json_api(self.user_one_url, {
'fullname': self.new_user_one_data['fullname'],
}, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
self.user_one.reload()
assert_not_equal(self.user_one.fullname, self.new_user_one_data['fullname'])
def test_update_user_sanitizes_html_properly(self):
"""Post request should update resource, and any HTML in fields should be stripped"""
bad_fullname = 'Malcolm <strong>X</strong>'
bad_family_name = 'X <script>alert("is")</script> a cool name'
res = self.app.patch_json_api(self.user_one_url, {
'fullname': bad_fullname,
'family_name': bad_family_name,
}, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], strip_html(bad_fullname))
assert_equal(res.json['data']['attributes']['family_name'], strip_html(bad_family_name))
class TestDeactivatedUser(ApiTestCase):
def setUp(self):
super(TestDeactivatedUser, self).setUp()
self.user = AuthUserFactory()
def test_deactivated_user_returns_410_response(self):
url = '/{}users/{}/'.format(API_BASE, self.user._id)
res = self.app.get(url, auth=self.user.auth , expect_errors=False)
assert_equal(res.status_code, 200)
self.user.is_disabled = True
self.user.save()
res = self.app.get(url, auth=self.user.auth , expect_errors=True)
assert_equal(res.status_code, 410)
class TestExceptionFormatting(ApiTestCase):
def setUp(self):
super(TestExceptionFormatting, self).setUp()
self.user = AuthUserFactory.build(
fullname='Martin Luther King Jr.',
given_name='Martin',
family_name='King',
suffix='Jr.',
social=dict(
github='userOneGithub',
scholar='userOneScholar',
personal='http://www.useronepersonalwebsite.com',
twitter='userOneTwitter',
linkedIn='userOneLinkedIn',
impactStory='userOneImpactStory',
orcid='userOneOrcid',
researcherId='userOneResearcherId'
)
)
self.url = '/{}users/{}/'.format(API_BASE, self.user._id)
self.user_two = AuthUserFactory()
def test_updates_user_with_no_fullname(self):
res = self.app.put_json_api(self.url, auth=self.user.auth, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert('fullname' in res.json['errors'][0]['meta']['field'])
assert('This field is required.' in res.json['errors'][0]['detail'])
def test_updates_user_unauthorized(self):
res = self.app.put_json_api(self.url, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert_equal(errors[0], {'detail': "Authentication credentials were not provided."})
def test_updates_user_forbidden(self):
res = self.app.put_json_api(self.url, auth=self.user_two.auth, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert_equal(errors[0], {'detail': 'You do not have permission to perform this action.'})
def test_user_does_not_exist_formatting(self):
url = '/{}users/{}/'.format(API_BASE, '12345')
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert_equal(errors[0], {'detail': 'Not found.'})
def test_basic_auth_me_wrong_password(self):
url = '/{}users/{}/'.format(API_BASE, 'me')
res = self.app.get(url, auth=(self.user.username, 'nottherightone'), expect_errors=True)
assert_equal(res.status_code, 401)
|
apache-2.0
| 1,204,231,969,254,728,200
| 46.949102
| 144
| 0.613862
| false
| 3.383332
| true
| false
| false
|
griffy/Pyap
|
pyap/library/db.py
|
1
|
2688
|
# Pyap - The Python Audio Player Library
#
# Copyright (c) 2012 Joel Griffith
# Copyright (c) 2005 Joe Wreschnig
# Copyright (c) 2002 David I. Lehn
# Copyright (c) 2005-2011 the SQLAlchemy authors and contributors
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from sqlalchemy import create_engine
from sqlalchemy import Table, Column, Integer, Unicode, MetaData
from sqlalchemy.schema import ForeignKey
from sqlalchemy.orm import mapper, relationship, sessionmaker
from pyap.audio import Audio
from pyap.playlist import Playlist
def setup(uri):
# TODO: echo should be false
if uri is None:
engine = create_engine('sqlite:///:memory:', echo=True)
else:
engine = create_engine('sqlite:///' + uri, echo=True)
metadata = MetaData()
#audio_types_table = Table('audio_types', metadata,
# Column('id', Integer, primary_key=True),
# Column('type', Unicode, unique=True)
#)
audio_table = Table('audio', metadata,
Column('id', Integer, primary_key=True),
Column('uri', Unicode, unique=True, index=True),
Column('type', Integer, nullable=False),
Column('artist', Unicode),
Column('title', Unicode),
Column('album', Unicode),
Column('track', Integer),
Column('length', Integer)
)
playlist_table = Table('playlists', metadata,
Column('id', Integer, primary_key=True),
Column('name', Unicode, unique=True, index=True)
)
# many-to-many junction table for audio and playlists
audio_playlist_table = Table('audio_playlists', metadata,
Column('audio_id', Integer, ForeignKey('audio.id')),
Column('playlist_id', Integer, ForeignKey('playlists.id'))
)
metadata.create_all(engine)
mapper(Audio, audio_table)
mapper(Playlist, playlist_table, properties={
'audio': relationship(Audio, secondary=audio_playlist_table,
backref='playlists')}
)
return sessionmaker(bind=engine)
|
gpl-2.0
| 4,940,832,737,651,426,000
| 34.84
| 68
| 0.682292
| false
| 4.097561
| false
| false
| false
|
magchips/labalyzer
|
setup.py
|
1
|
3470
|
#!/usr/bin/env python
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2010 <Atreju Tauschinsky> <Atreju.Tauschinsky@gmx.de>
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
###################### DO NOT TOUCH THIS (HEAD TO THE SECOND PART) ######################
import os
import sys
try:
import DistUtilsExtra.auto
except ImportError:
print >> sys.stderr, 'To build labalyzer you need https://launchpad.net/python-distutils-extra'
sys.exit(1)
assert DistUtilsExtra.auto.__version__ >= '2.18', 'needs DistUtilsExtra.auto >= 2.18'
def update_config(values = {}):
oldvalues = {}
try:
fin = file('labalyzer_lib/labalyzerconfig.py', 'r')
fout = file(fin.name + '.new', 'w')
for line in fin:
fields = line.split(' = ') # Separate variable from value
if fields[0] in values:
oldvalues[fields[0]] = fields[1].strip()
line = "%s = %s\n" % (fields[0], values[fields[0]])
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("ERROR: Can't find labalyzer_lib/labalyzerconfig.py")
sys.exit(1)
return oldvalues
def update_desktop_file(datadir):
try:
fin = file('labalyzer.desktop.in', 'r')
fout = file(fin.name + '.new', 'w')
for line in fin:
if 'Icon=' in line:
line = "Icon=%s\n" % (datadir + 'media/labalyzer.svg')
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("ERROR: Can't find labalyzer.desktop.in")
sys.exit(1)
class InstallAndUpdateDataDirectory(DistUtilsExtra.auto.install_auto):
def run(self):
values = {'__labalyzer_data_directory__': "'%s'" % (self.prefix + '/share/labalyzer/'),
'__version__': "'%s'" % self.distribution.get_version()}
previous_values = update_config(values)
update_desktop_file(self.prefix + '/share/labalyzer/')
DistUtilsExtra.auto.install_auto.run(self)
update_config(previous_values)
##################################################################################
###################### YOU SHOULD MODIFY ONLY WHAT IS BELOW ######################
##################################################################################
DistUtilsExtra.auto.setup(
name='labalyzer',
version='0.1',
license='GPL-3',
#author='Your Name',
#author_email='email@ubuntu.com',
#description='UI for managing …',
#long_description='Here a longer description',
#url='https://launchpad.net/labalyzer',
cmdclass={'install': InstallAndUpdateDataDirectory}
)
|
gpl-3.0
| 3,448,645,533,441,506,300
| 34.030303
| 99
| 0.580161
| false
| 3.72103
| false
| false
| false
|
olga-perederieieva/pyDEA
|
pyDEA/main.py
|
1
|
3347
|
''' This module contains methods for running pyDEA from terminal.
'''
import sys
from pyDEA.core.data_processing.parameters import parse_parameters_from_file
from pyDEA.core.utils.run_routine import RunMethodTerminal
from pyDEA.core.utils.dea_utils import clean_up_pickled_files, get_logger
def main(filename, output_format='xlsx', output_dir='', sheet_name_usr=''):
''' Main function to run DEA models from terminal.
Args:
filename (str): path to file with parameters.
output_format (str, optional): file format of solution file.
This value is used
only if OUTPUT_FILE in parameters is empty or set to auto.
Defaults to xlsx.
output_dir (str, optional): directory where solution must
be written.
If it is not given, solution will be written to current folder.
This value is used
only if OUTPUT_FILE in parameters is empty or set to auto.
sheet_name_usr (str, optional): name of the sheet in xls- or
xlsx-file with
input data from which data will be read. If input data file is
in csv format,
this value is ignored.
'''
print('Params file', filename, 'output_format', output_format,
'output_dir', output_dir, 'sheet_name_usr', sheet_name_usr)
logger = get_logger()
logger.info('Params file "%s", output format "%s", output directory "%s", sheet name "%s".',
filename, output_format, output_dir, sheet_name_usr)
params = parse_parameters_from_file(filename)
params.print_all_parameters()
run_method = RunMethodTerminal(params, sheet_name_usr, output_format,
output_dir)
run_method.run(params)
clean_up_pickled_files()
logger.info('pyDEA exited.')
if __name__ == '__main__':
args = sys.argv[1:]
logger = get_logger()
logger.info('pyDEA started as a console application.')
print('args = {0}'.format(args))
if len(args) < 1 or len(args) > 4:
logger.error('Invalid number of input arguments. At least one '
'argument must be given, no more than 4 arguments, but %d were given.',
len(args))
raise ValueError('Invalid number of input arguments. At least one '
'argument must be given, no more than 4 arguments'
' are expected. Input arguments are:\n (1) path to'
' file with parameters (compulsory)\n'
'(2) output file format, possible values: xls, xlsx'
' and csv, default value is xlsx (optional), this'
' value is used only if auto or empty string was set'
' for OUTPUT_FILE in parameters file \n'
'(3) output directory (optional, if not specified,'
' output is written to current directory)\n'
'(4) sheet name from which data should be read '
'(optional, if not specified, data is read from'
' the first sheet)')
try:
main(*args)
except Exception as excinfo:
logger.error(excinfo)
raise
|
mit
| 2,119,575,361,617,346,300
| 45.486111
| 96
| 0.574544
| false
| 4.456724
| false
| false
| false
|
wireservice/csvkit
|
setup.py
|
1
|
2897
|
#!/usr/bin/env python
import sys
from setuptools import setup
install_requires = [
'agate>=1.6.1',
'agate-excel>=0.2.2',
'agate-dbf>=0.2.0',
'agate-sql>=0.5.3',
'six>=1.6.1',
'setuptools',
]
if sys.version_info < (2, 7):
install_requires.append('argparse>=1.2.1')
install_requires.append('ordereddict>=1.1')
install_requires.append('simplejson>=3.6.3')
setup(
name='csvkit',
version='1.0.6',
description='A suite of command-line tools for working with CSV, the king of tabular file formats.',
long_description=open('README.rst').read(),
author='Christopher Groskopf',
author_email='chrisgroskopf@gmail.com',
url='https://github.com/wireservice/csvkit',
project_urls={
'Documentation': 'https://csvkit.readthedocs.io/en/latest/',
},
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
],
packages=[
'csvkit',
'csvkit.convert',
'csvkit.utilities'
],
entry_points={
'console_scripts': [
'csvclean = csvkit.utilities.csvclean:launch_new_instance',
'csvcut = csvkit.utilities.csvcut:launch_new_instance',
'csvformat = csvkit.utilities.csvformat:launch_new_instance',
'csvgrep = csvkit.utilities.csvgrep:launch_new_instance',
'csvjoin = csvkit.utilities.csvjoin:launch_new_instance',
'csvjson = csvkit.utilities.csvjson:launch_new_instance',
'csvlook = csvkit.utilities.csvlook:launch_new_instance',
'csvpy = csvkit.utilities.csvpy:launch_new_instance',
'csvsort = csvkit.utilities.csvsort:launch_new_instance',
'csvsql = csvkit.utilities.csvsql:launch_new_instance',
'csvstack = csvkit.utilities.csvstack:launch_new_instance',
'csvstat = csvkit.utilities.csvstat:launch_new_instance',
'in2csv = csvkit.utilities.in2csv:launch_new_instance',
'sql2csv = csvkit.utilities.sql2csv:launch_new_instance'
]
},
install_requires=install_requires
)
|
mit
| 7,940,495,066,647,459,000
| 37.626667
| 104
| 0.622023
| false
| 3.752591
| false
| false
| false
|
ZachMassia/platformio
|
platformio/builder/scripts/nordicnrf51.py
|
1
|
2079
|
# Copyright 2014-2016 Ivan Kravets <me@ikravets.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Builder for Nordic nRF51 series ARM microcontrollers.
"""
from os.path import join
from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild, Default,
DefaultEnvironment, SConscript)
env = DefaultEnvironment()
SConscript(env.subst(join("$PIOBUILDER_DIR", "scripts", "basearm.py")))
if env.subst("$BOARD") == "rfduino":
env.Append(
CPPFLAGS=["-fno-builtin"],
LINKFLAGS=["--specs=nano.specs"]
)
env.Replace(
UPLOADER=join("$PIOPACKAGES_DIR", "tool-rfdloader", "rfdloader"),
UPLOADERFLAGS=["-q", "$UPLOAD_PORT"],
UPLOADCMD='"$UPLOADER" $UPLOADERFLAGS $SOURCES'
)
#
# Target: Build executable and linkable firmware
#
target_elf = env.BuildProgram()
#
# Target: Build the .bin file
#
if "uploadlazy" in COMMAND_LINE_TARGETS:
target_firm = join("$BUILD_DIR", "firmware.hex")
else:
target_firm = env.ElfToHex(join("$BUILD_DIR", "firmware"), target_elf)
#
# Target: Print binary size
#
target_size = env.Alias("size", target_elf, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Upload by default .bin file
#
if env.subst("$BOARD") == "rfduino":
upload = env.Alias(
["upload", "uploadlazy"], target_firm,
[lambda target, source, env: env.AutodetectUploadPort(), "$UPLOADCMD"])
else:
upload = env.Alias(["upload", "uploadlazy"], target_firm, env.UploadToDisk)
AlwaysBuild(upload)
#
# Target: Define targets
#
Default([target_firm, target_size])
|
apache-2.0
| 3,195,199,141,947,326,000
| 26
| 79
| 0.685426
| false
| 3.391517
| false
| false
| false
|
Oli76/rwslib
|
rwslib/builders.py
|
1
|
77124
|
# -*- coding: utf-8 -*-
__author__ = 'isparks'
import uuid
from xml.etree import cElementTree as ET
from datetime import datetime
from string import ascii_letters
from rwslib.builder_constants import *
"""
builders.py provides convenience classes for building ODM documents for clinical data and metadata post messages.
"""
# -----------------------------------------------------------------------------------------------------------------------
# Constants
VALID_ID_CHARS = ascii_letters + '_'
# -----------------------------------------------------------------------------------------------------------------------
# Utilities
def now_to_iso8601():
"""Returns NOW date/time as a UTC date/time formated as iso8601 string"""
utc_date = datetime.utcnow()
return dt_to_iso8601(utc_date)
def dt_to_iso8601(dt):
"""Turn a datetime into an ISO8601 formatted string"""
return dt.strftime("%Y-%m-%dT%H:%M:%S")
def bool_to_yes_no(val):
"""Convert True/False to Yes/No"""
return 'Yes' if val else 'No'
def bool_to_true_false(val):
"""Convert True/False to TRUE / FALSE"""
return 'TRUE' if val else 'FALSE'
def indent(elem, level=0):
"""Indent a elementree structure"""
i = "\n" + level * " "
if len(elem) > 0:
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def make_element(builder, tag, content):
"""Make an element with this tag and text content"""
builder.start(tag, {})
builder.data(content) # Must be UTF-8 encoded
builder.end(tag)
# -----------------------------------------------------------------------------------------------------------------------
# Classes
class ODMElement(object):
"""Base class for ODM XML element classes"""
def __call__(self, *args):
"""Collect all children passed in call"""
for child in args:
self << child
return self
def __lshift__(self, other):
"""__lshift__ should be overridden in descendant classes to accept child elements and incorporate them.
By default takes no child elements
"""
raise ValueError("%s takes no child elements" % self.__class__.__name__)
def add(self, *args):
"""Like call but adds a set of args"""
for child in args:
self << child
return self
def set_single_attribute(self, other, trigger_klass, property_name):
"""Used to set guard the setting of an attribute which is singular and can't be set twice"""
if isinstance(other, trigger_klass):
# Check property exists
if not hasattr(self, property_name):
raise AttributeError("%s has no property %s" % (self.__class__.__name__, property_name))
if getattr(self, property_name) is None:
setattr(self, property_name, other)
else:
raise ValueError(
'%s already has a %s element set.' % (self.__class__.__name__, other.__class__.__name__,))
def set_list_attribute(self, other, trigger_klass, property_name):
"""Used to set guard the setting of a list attribute, ensuring the same element is not added twice."""
# Check property exists
if isinstance(other, trigger_klass):
if not hasattr(self, property_name):
raise AttributeError("%s has no property %s" % (self.__class__.__name__, property_name))
val = getattr(self, property_name, [])
if other in val:
raise ValueError("%s already exists in %s" % (other.__class__.__name__, self.__class__.__name__))
else:
val.append(other)
setattr(self, property_name, val)
class UserRef(ODMElement):
def __init__(self, oid):
self.oid = oid
def build(self, builder):
builder.start("UserRef", dict(UserOID=self.oid))
builder.end("UserRef")
class LocationRef(ODMElement):
def __init__(self, oid):
self.oid = oid
def build(self, builder):
builder.start("LocationRef", dict(LocationOID=self.oid))
builder.end("LocationRef")
class ReasonForChange(ODMElement):
def __init__(self, reason):
self.reason = reason
def build(self, builder):
builder.start("ReasonForChange", {})
builder.data(self.reason)
builder.end("ReasonForChange")
class DateTimeStamp(ODMElement):
def __init__(self, date_time):
self.date_time = date_time
def build(self, builder):
builder.start("DateTimeStamp", {})
if isinstance(self.date_time, datetime):
builder.data(dt_to_iso8601(self.date_time))
else:
builder.data(self.date_time)
builder.end("DateTimeStamp")
class AuditRecord(ODMElement):
"""AuditRecord is supported only by ItemData in Rave"""
EDIT_MONITORING = 'Monitoring'
EDIT_DATA_MANAGEMENT = 'DataManagement'
EDIT_DB_AUDIT = 'DBAudit'
EDIT_POINTS = [EDIT_MONITORING, EDIT_DATA_MANAGEMENT, EDIT_DB_AUDIT]
def __init__(self, edit_point=None, used_imputation_method=None, identifier=None, include_file_oid=None):
self._edit_point = None
self.edit_point = edit_point
self.used_imputation_method = used_imputation_method
self._id = None
self.id = identifier
self.include_file_oid = include_file_oid
self.user_ref = None
self.location_ref = None
self.reason_for_change = None
self.date_time_stamp = None
@property
def id(self):
return self._id
@id.setter
def id(self, value):
if value not in [None, ''] and str(value).strip() != '':
val = str(value).strip()[0]
if val not in VALID_ID_CHARS:
raise AttributeError('%s id cannot start with "%s" character' % (self.__class__.__name__, val,))
self._id = value
@property
def edit_point(self):
return self._edit_point
@edit_point.setter
def edit_point(self, value):
if value is not None:
if value not in self.EDIT_POINTS:
raise AttributeError('%s edit_point must be one of %s not %s' % (
self.__class__.__name__, ','.join(self.EDIT_POINTS), value,))
self._edit_point = value
def build(self, builder):
params = {}
if self.edit_point is not None:
params["EditPoint"] = self.edit_point
if self.used_imputation_method is not None:
params['UsedImputationMethod'] = bool_to_yes_no(self.used_imputation_method)
if self.id is not None:
params['ID'] = str(self.id)
if self.include_file_oid is not None:
params['mdsol:IncludeFileOID'] = bool_to_yes_no(self.include_file_oid)
builder.start("AuditRecord", params)
if self.user_ref is None:
raise ValueError("User Reference not set.")
self.user_ref.build(builder)
if self.location_ref is None:
raise ValueError("Location Reference not set.")
self.location_ref.build(builder)
if self.date_time_stamp is None:
raise ValueError("DateTime not set.")
self.date_time_stamp.build(builder)
# Optional
if self.reason_for_change is not None:
self.reason_for_change.build(builder)
builder.end("AuditRecord")
def __lshift__(self, other):
if not isinstance(other, (UserRef, LocationRef, DateTimeStamp, ReasonForChange,)):
raise ValueError("AuditRecord cannot accept a child element of type %s" % other.__class__.__name__)
# Order is important, apparently
self.set_single_attribute(other, UserRef, 'user_ref')
self.set_single_attribute(other, LocationRef, 'location_ref')
self.set_single_attribute(other, DateTimeStamp, 'date_time_stamp')
self.set_single_attribute(other, ReasonForChange, 'reason_for_change')
return other
class TransactionalElement(ODMElement):
"""Models an ODM Element that is allowed a transaction type. Different elements have different
allowed transaction types"""
ALLOWED_TRANSACTION_TYPES = []
def __init__(self, transaction_type):
self._transaction_type = None
self.transaction_type = transaction_type
@property
def transaction_type(self):
return self._transaction_type
@transaction_type.setter
def transaction_type(self, value):
if value is not None:
if value not in self.ALLOWED_TRANSACTION_TYPES:
raise AttributeError('%s transaction_type element must be one of %s not %s' % (
self.__class__.__name__, ','.join(self.ALLOWED_TRANSACTION_TYPES), value,))
self._transaction_type = value
class MdsolQuery(ODMElement):
"""MdsolQuery extension element for Queries at item level only"""
def __init__(self, value=None, query_repeat_key=None, recipient=None, status=None, requires_response=None,
response=None):
self.value = value
self.query_repeat_key = query_repeat_key
self.recipient = recipient
self._status = None
self.status = status
self.requires_response = requires_response
self.response = response
@property
def status(self):
return self._status
@status.setter
def status(self, value):
if value is not None:
if not isinstance(value, QueryStatusType):
raise AttributeError("%s action type is invalid in mdsol:Query." % (value,))
self._status = value
def build(self, builder):
params = {}
if self.value is not None:
params['Value'] = str(self.value)
if self.query_repeat_key is not None:
params['QueryRepeatKey'] = str(self.query_repeat_key)
if self.recipient is not None:
params['Recipient'] = str(self.recipient)
if self.status is not None:
params['Status'] = self.status.value
if self.requires_response is not None:
params['RequiresResponse'] = bool_to_yes_no(self.requires_response)
# When closing a query
if self.response is not None:
params['Response'] = str(self.response)
builder.start("mdsol:Query", params)
builder.end("mdsol:Query")
class ItemData(TransactionalElement):
"""Models the ODM ItemData object"""
ALLOWED_TRANSACTION_TYPES = ['Insert', 'Update', 'Upsert', 'Context', 'Remove']
def __init__(self, itemoid, value, specify_value=None, transaction_type=None, lock=None, freeze=None, verify=None):
super(self.__class__, self).__init__(transaction_type)
self.itemoid = itemoid
self.value = value
self.specify_value = specify_value
self.lock = lock
self.freeze = freeze
self.verify = verify
self.audit_record = None
self.queries = []
self.measurement_unit_ref = None
def build(self, builder):
"""Build XML by appending to builder
<ItemData ItemOID="MH_DT" Value="06 Jan 2009" TransactionType="Insert">
"""
params = dict(ItemOID=self.itemoid)
if self.transaction_type is not None:
params["TransactionType"] = self.transaction_type
if self.value in [None, '']:
params['IsNull'] = 'Yes'
else:
params['Value'] = str(self.value)
if self.specify_value is not None:
params['mdsol:SpecifyValue'] = self.specify_value
if self.lock is not None:
params['mdsol:Lock'] = bool_to_yes_no(self.lock)
if self.freeze is not None:
params['mdsol:Freeze'] = bool_to_yes_no(self.freeze)
if self.verify is not None:
params['mdsol:Verify'] = bool_to_yes_no(self.verify)
builder.start("ItemData", params)
if self.audit_record is not None:
self.audit_record.build(builder)
# Measurement unit ref must be after audit record or RWS complains
if self.measurement_unit_ref is not None:
self.measurement_unit_ref.build(builder)
for query in self.queries:
query.build(builder)
builder.end("ItemData")
def __lshift__(self, other):
if not isinstance(other, (MeasurementUnitRef, AuditRecord, MdsolQuery,)):
raise ValueError("ItemData object can only receive MeasurementUnitRef, AuditRecord or MdsolQuery objects")
self.set_single_attribute(other, MeasurementUnitRef, 'measurement_unit_ref')
self.set_single_attribute(other, AuditRecord, 'audit_record')
self.set_list_attribute(other, MdsolQuery, 'queries')
return other
class ItemGroupData(TransactionalElement):
"""Models the ODM ItemGroupData object.
Note no name for the ItemGroupData element is required. This is built automatically by the form.
"""
ALLOWED_TRANSACTION_TYPES = ['Insert', 'Update', 'Upsert', 'Context']
def __init__(self, transaction_type=None, item_group_repeat_key=None, whole_item_group=False):
super(self.__class__, self).__init__(transaction_type)
self.item_group_repeat_key = item_group_repeat_key
self.whole_item_group = whole_item_group
self.items = {}
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, ItemData):
raise ValueError("ItemGroupData object can only receive ItemData object")
if other.itemoid in self.items:
raise ValueError("ItemGroupData object with that itemoid is already in the ItemGroupData object")
self.items[other.itemoid] = other
return other
def build(self, builder, formname):
"""Build XML by appending to builder
"""
params = dict(ItemGroupOID=formname)
if self.transaction_type is not None:
params["TransactionType"] = self.transaction_type
if self.item_group_repeat_key is not None:
params["ItemGroupRepeatKey"] = str(
self.item_group_repeat_key) # may be @context for transaction type upsert or context
params["mdsol:Submission"] = "WholeItemGroup" if self.whole_item_group else "SpecifiedItemsOnly"
builder.start("ItemGroupData", params)
# Ask children
for item in self.items.values():
item.build(builder)
builder.end("ItemGroupData")
class FormData(TransactionalElement):
"""Models the ODM FormData object"""
ALLOWED_TRANSACTION_TYPES = ['Insert', 'Update']
def __init__(self, formoid, transaction_type=None, form_repeat_key=None):
super(self.__class__, self).__init__(transaction_type)
self.formoid = formoid
self.form_repeat_key = form_repeat_key
self.itemgroups = []
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, ItemGroupData):
raise ValueError("FormData object can only receive ItemGroupData object")
self.set_list_attribute(other, ItemGroupData, 'itemgroups')
return other
def build(self, builder):
"""Build XML by appending to builder
<FormData FormOID="MH" TransactionType="Update">
"""
params = dict(FormOID=self.formoid)
if self.transaction_type is not None:
params["TransactionType"] = self.transaction_type
if self.form_repeat_key is not None:
params["FormRepeatKey"] = str(self.form_repeat_key)
builder.start("FormData", params)
# Ask children
for itemgroup in self.itemgroups:
itemgroup.build(builder, self.formoid)
builder.end("FormData")
class StudyEventData(TransactionalElement):
"""Models the ODM StudyEventData object"""
ALLOWED_TRANSACTION_TYPES = ['Insert', 'Update', 'Remove', 'Context']
def __init__(self, study_event_oid, transaction_type="Update", study_event_repeat_key=None):
super(self.__class__, self).__init__(transaction_type)
self.study_event_oid = study_event_oid
self.study_event_repeat_key = study_event_repeat_key
self.forms = []
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, FormData):
raise ValueError("StudyEventData object can only receive FormData object")
self.set_list_attribute(other, FormData, 'forms')
return other
def build(self, builder):
"""Build XML by appending to builder
<StudyEventData StudyEventOID="SCREENING" StudyEventRepeatKey="1" TransactionType="Update">
"""
params = dict(StudyEventOID=self.study_event_oid)
if self.transaction_type is not None:
params["TransactionType"] = self.transaction_type
if self.study_event_repeat_key is not None:
params["StudyEventRepeatKey"] = self.study_event_repeat_key
builder.start("StudyEventData", params)
# Ask children
for form in self.forms:
form.build(builder)
builder.end("StudyEventData")
class SubjectData(TransactionalElement):
"""Models the ODM SubjectData and ODM SiteRef objects"""
ALLOWED_TRANSACTION_TYPES = ['Insert', 'Update', 'Upsert']
def __init__(self, sitelocationoid, subject_key, subject_key_type="SubjectName", transaction_type="Update"):
super(self.__class__, self).__init__(transaction_type)
self.sitelocationoid = sitelocationoid
self.subject_key = subject_key
self.subject_key_type = subject_key_type
self.study_events = [] # Can have collection
self.audit_record = None
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (StudyEventData, AuditRecord,)):
raise ValueError("SubjectData object can only receive StudyEventData or AuditRecord object")
self.set_list_attribute(other, StudyEventData, 'study_events')
self.set_single_attribute(other, AuditRecord, 'audit_record')
return other
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(SubjectKey=self.subject_key)
params['mdsol:SubjectKeyType'] = self.subject_key_type
if self.transaction_type is not None:
params["TransactionType"] = self.transaction_type
builder.start("SubjectData", params)
# Ask children
if self.audit_record is not None:
self.audit_record.build(builder)
builder.start("SiteRef", {'LocationOID': self.sitelocationoid})
builder.end("SiteRef")
for event in self.study_events:
event.build(builder)
builder.end("SubjectData")
class ClinicalData(ODMElement):
"""Models the ODM ClinicalData object"""
def __init__(self, projectname, environment, metadata_version_oid="1"):
self.projectname = projectname
self.environment = environment
self.metadata_version_oid = metadata_version_oid
self.subject_data = None
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, SubjectData):
raise ValueError("ClinicalData object can only receive SubjectData object")
self.set_single_attribute(other, SubjectData, 'subject_data')
return other
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(MetaDataVersionOID=self.metadata_version_oid,
StudyOID="%s (%s)" % (self.projectname, self.environment,),
)
builder.start("ClinicalData", params)
# Ask children
if self.subject_data is not None:
self.subject_data.build(builder)
builder.end("ClinicalData")
class ODM(ODMElement):
"""Models the ODM object"""
FILETYPE_TRANSACTIONAL = 'Transactional'
FILETYPE_SNAPSHOT = 'Snapshot'
def __init__(self, originator, description="", creationdatetime=now_to_iso8601(), fileoid=None, filetype=None):
self.originator = originator # Required
self.description = description
self.creationdatetime = creationdatetime
# filetype will always be "Transactional"
# ODM version will always be 1.3
# Granularity="SingleSubject"
# AsOfDateTime always OMITTED (it's optional)
self.clinical_data = None
self.study = None
self.filetype = ODM.FILETYPE_TRANSACTIONAL if filetype is None else ODM.FILETYPE_SNAPSHOT
# Create unique fileoid if none given
self.fileoid = str(uuid.uuid4()) if fileoid is None else fileoid
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (ClinicalData, Study,)):
raise ValueError("ODM object can only receive ClinicalData or Study object")
self.set_single_attribute(other, ClinicalData, 'clinical_data')
self.set_single_attribute(other, Study, 'study')
return other
def getroot(self):
"""Build XML object, return the root"""
builder = ET.TreeBuilder()
params = dict(ODMVersion="1.3",
FileType=self.filetype,
CreationDateTime=self.creationdatetime,
Originator=self.originator,
FileOID=self.fileoid,
xmlns="http://www.cdisc.org/ns/odm/v1.3",
)
params['xmlns:mdsol'] = "http://www.mdsol.com/ns/odm/metadata"
if self.description:
params['Description'] = self.description
builder.start("ODM", params)
# Ask the children
if self.study is not None:
self.study.build(builder)
if self.clinical_data is not None:
self.clinical_data.build(builder)
builder.end("ODM")
return builder.close()
def __str__(self):
doc = self.getroot()
indent(doc)
header = '<?xml version="1.0" encoding="utf-8" ?>\n'
return header + ET.tostring(doc, encoding='utf-8').decode('utf-8')
# -----------------------------------------------------------------------------------------------------------------------
# Metadata Objects
class GlobalVariables(ODMElement):
"""GlobalVariables Metadata element"""
def __init__(self, protocol_name, name=None, description=''):
"""Name and description are not important. protocol_name maps to the Rave project name"""
self.protocol_name = protocol_name
self.name = name if name is not None else protocol_name
self.description = description
def build(self, builder):
"""Build XML by appending to builder"""
builder.start("GlobalVariables", {})
make_element(builder, 'StudyName', self.name)
make_element(builder, 'StudyDescription', self.description)
make_element(builder, 'ProtocolName', self.protocol_name)
builder.end("GlobalVariables")
class TranslatedText(ODMElement):
"""Represents a language and a translated text for that language"""
def __init__(self, text, lang=None):
self.text = text
self.lang = lang
def build(self, builder):
"""Build XML by appending to builder"""
params = {}
if self.lang is not None:
params['xml:lang'] = self.lang
builder.start("TranslatedText", params)
builder.data(self.text)
builder.end("TranslatedText")
class Symbol(ODMElement):
def __init__(self):
self.translations = []
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, TranslatedText):
raise ValueError("Symbol can only accept TranslatedText objects as children")
self.set_list_attribute(other, TranslatedText, 'translations')
return other
def build(self, builder):
"""Build XML by appending to builder"""
builder.start("Symbol", {})
for child in self.translations:
child.build(builder)
builder.end("Symbol")
class MeasurementUnit(ODMElement):
"""A measurement unit"""
def __init__(self,
oid,
name,
unit_dictionary_name=None,
constant_a=1,
constant_b=1,
constant_c=0,
constant_k=0,
standard_unit=False):
self.symbols = []
self.oid = oid
self.name = name
self.unit_dictionary_name = unit_dictionary_name
self.constant_a = constant_a
self.constant_b = constant_b
self.constant_c = constant_c
self.constant_k = constant_k
self.standard_unit = standard_unit
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(OID=self.oid,
Name=self.name)
if self.unit_dictionary_name:
params['mdsol:UnitDictionaryName'] = self.unit_dictionary_name
for suffix in ['A', 'B', 'C', 'K']:
val = getattr(self, 'constant_{0}'.format(suffix.lower()))
params['mdsol:Constant{0}'.format(suffix)] = str(val)
if self.standard_unit:
params['mdsol:StandardUnit'] = 'Yes'
builder.start("MeasurementUnit", params)
for child in self.symbols:
child.build(builder)
builder.end("MeasurementUnit")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, Symbol):
raise ValueError("MeasurementUnits object can only receive Symbol object")
self.set_list_attribute(other, Symbol, 'symbols')
return other
class BasicDefinitions(ODMElement):
"""Container for Measurement units"""
def __init__(self):
self.measurement_units = []
def build(self, builder):
"""Build XML by appending to builder"""
builder.start("BasicDefinitions", {})
for child in self.measurement_units:
child.build(builder)
builder.end("BasicDefinitions")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, MeasurementUnit):
raise ValueError("BasicDefinitions object can only receive MeasurementUnit object")
self.measurement_units.append(other)
return other
class StudyEventRef(ODMElement):
def __init__(self, oid, order_number, mandatory):
self.oid = oid
self.order_number = order_number
self.mandatory = mandatory
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(StudyEventOID=self.oid,
OrderNumber=str(self.order_number),
Mandatory=bool_to_yes_no(self.mandatory))
builder.start("StudyEventRef", params)
builder.end("StudyEventRef")
class Protocol(ODMElement):
"""Protocol child of MetaDataVersion, holder of StudyEventRefs"""
def __init__(self):
self.study_event_refs = []
def build(self, builder):
"""Build XML by appending to builder"""
builder.start("Protocol", {})
for child in self.study_event_refs:
child.build(builder)
builder.end("Protocol")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (StudyEventRef,)):
raise ValueError('Protocol cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, StudyEventRef, 'study_event_refs')
return other
class FormRef(ODMElement):
def __init__(self, oid, order_number, mandatory):
self.oid = oid
self.order_number = order_number
self.mandatory = mandatory
def build(self, builder):
params = dict(FormOID=self.oid,
OrderNumber=str(self.order_number),
Mandatory=bool_to_yes_no(self.mandatory)
)
builder.start('FormRef', params)
builder.end('FormRef')
class StudyEventDef(ODMElement):
# Event types
SCHEDULED = 'Scheduled'
UNSCHEDULED = 'Unscheduled'
COMMON = 'Common'
def __init__(self, oid, name, repeating, event_type,
category=None,
access_days=None,
start_win_days=None,
target_days=None,
end_win_days=None,
overdue_days=None,
close_days=None
):
self.oid = oid
self.name = name
self.repeating = repeating
self.event_type = event_type
self.category = category
self.access_days = access_days
self.start_win_days = start_win_days
self.target_days = target_days
self.end_win_days = end_win_days
self.overdue_days = overdue_days
self.close_days = close_days
self.formrefs = []
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(OID=self.oid, Name=self.name,
Repeating=bool_to_yes_no(self.repeating),
Type=self.event_type)
if self.category is not None:
params['Category'] = self.category
if self.access_days is not None:
params['mdsol:AccessDays'] = str(self.access_days)
if self.start_win_days is not None:
params['mdsol:StartWinDays'] = str(self.start_win_days)
if self.target_days is not None:
params['mdsol:TargetDays'] = str(self.target_days)
if self.end_win_days is not None:
params['mdsol:EndWinDays'] = str(self.end_win_days)
if self.overdue_days is not None:
params['mdsol:OverDueDays'] = str(self.overdue_days)
if self.close_days is not None:
params['mdsol:CloseDays'] = str(self.close_days)
builder.start("StudyEventDef", params)
for formref in self.formrefs:
formref.build(builder)
builder.end("StudyEventDef")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (FormRef,)):
raise ValueError('StudyEventDef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, FormRef, 'formrefs')
return other
class ItemGroupRef(ODMElement):
def __init__(self, oid, order_number, mandatory=True):
self.oid = oid
self.order_number = order_number
self.mandatory = mandatory
def build(self, builder):
params = dict(ItemGroupOID=self.oid,
OrderNumber=str(self.order_number),
Mandatory=bool_to_yes_no(self.mandatory),
)
builder.start("ItemGroupRef", params)
builder.end("ItemGroupRef")
class MdsolHelpText(ODMElement):
"""Help element for FormDefs and ItemDefs"""
def __init__(self, lang, content):
self.lang = lang
self.content = content
def build(self, builder):
builder.start('mdsol:HelpText', {'xml:lang': self.lang})
builder.data(self.content)
builder.end('mdsol:HelpText')
class MdsolViewRestriction(ODMElement):
"""ViewRestriction for FormDefs and ItemDefs"""
def __init__(self, rolename):
self.rolename = rolename
def build(self, builder):
builder.start('mdsol:ViewRestriction', {})
builder.data(self.rolename)
builder.end('mdsol:ViewRestriction')
class MdsolEntryRestriction(ODMElement):
"""EntryRestriction for FormDefs and ItemDefs"""
def __init__(self, rolename):
self.rolename = rolename
def build(self, builder):
builder.start('mdsol:EntryRestriction', {})
builder.data(self.rolename)
builder.end('mdsol:EntryRestriction')
class FormDef(ODMElement):
LOG_PORTRAIT = 'Portrait'
LOG_LANDSCAPE = 'Landscape'
DDE_MUSTNOT = 'MustNotDDE'
DDE_MAY = 'MayDDE'
DDE_MUST = 'MustDDE'
NOLINK = 'NoLink'
LINK_NEXT = 'LinkNext'
LINK_CUSTOM = 'LinkCustom'
def __init__(self, oid, name,
repeating=False,
order_number=None,
active=True,
template=False,
signature_required=False,
log_direction=LOG_PORTRAIT,
double_data_entry=DDE_MUSTNOT,
confirmation_style=NOLINK,
link_study_event_oid=None,
link_form_oid=None
):
self.oid = oid
self.name = name
self.order_number = order_number
self.repeating = repeating # Not actually used by Rave.
self.active = active
self.template = template
self.signature_required = signature_required
self.log_direction = log_direction
self.double_data_entry = double_data_entry
self.confirmation_style = confirmation_style
self.link_study_event_oid = link_study_event_oid
self.link_form_oid = link_form_oid
self.itemgroup_refs = []
self.helptexts = [] # Not clear that Rave can accept multiple from docs
self.view_restrictions = []
self.entry_restrictions = []
def build(self, builder):
params = dict(OID=self.oid,
Name=self.name,
Repeating=bool_to_yes_no(self.repeating)
)
if self.order_number is not None:
params['mdsol:OrderNumber'] = str(self.order_number)
if self.active is not None:
params['mdsol:Active'] = bool_to_yes_no(self.active)
params['mdsol:Template'] = bool_to_yes_no(self.template)
params['mdsol:SignatureRequired'] = bool_to_yes_no(self.signature_required)
params['mdsol:LogDirection'] = self.log_direction
params['mdsol:DoubleDataEntry'] = self.double_data_entry
params['mdsol:ConfirmationStyle'] = self.confirmation_style
if self.link_study_event_oid:
params['mdsol:LinkStudyEventOID'] = self.link_study_event_oid
if self.link_form_oid:
params['mdsol:LinkFormOID'] = self.link_form_oid
builder.start("FormDef", params)
for itemgroup_ref in self.itemgroup_refs:
itemgroup_ref.build(builder)
for helptext in self.helptexts:
helptext.build(builder)
for view_restriction in self.view_restrictions:
view_restriction.build(builder)
for entry_restriction in self.entry_restrictions:
entry_restriction.build(builder)
builder.end("FormDef")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (ItemGroupRef, MdsolHelpText, MdsolViewRestriction, MdsolEntryRestriction,)):
raise ValueError('StudyEventDef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, ItemGroupRef, 'itemgroup_refs')
self.set_list_attribute(other, MdsolHelpText, 'helptexts')
self.set_list_attribute(other, MdsolViewRestriction, 'view_restrictions')
self.set_list_attribute(other, MdsolEntryRestriction, 'entry_restrictions')
return other
class MdsolLabelRef(ODMElement):
"""A reference to a label on a form"""
def __init__(self, oid, order_number):
self.oid = oid
self.order_number = order_number
def build(self, builder):
params = dict(LabelOID=self.oid,
OrderNumber=str(self.order_number),
)
builder.start('mdsol:LabelRef', params)
builder.end('mdsol:LabelRef')
class MdsolAttribute(ODMElement):
def __init__(self, namespace, name, value, transaction_type='Insert'):
self.namespace = namespace
self.name = name
self.value = value
self.transaction_type = transaction_type
def build(self, builder):
params = dict(Namespace=self.namespace,
Name=self.name,
Value=self.value,
TransactionType=self.transaction_type,
)
builder.start('mdsol:Attribute', params)
builder.end('mdsol:Attribute')
class ItemRef(ODMElement):
def __init__(self, oid, order_number, mandatory=False, key_sequence=None,
imputation_method_oid=None, role=None, role_codelist_oid=None):
self.oid = oid
self.order_number = order_number
self.mandatory = mandatory
self.key_sequence = key_sequence
self.imputation_method_oid = imputation_method_oid
self.role = role
self.role_codelist_oid = role_codelist_oid
self.attributes = []
def build(self, builder):
params = dict(ItemOID=self.oid,
OrderNumber=str(self.order_number),
Mandatory=bool_to_yes_no(self.mandatory)
)
if self.key_sequence is not None:
params['KeySequence'] = str(self.key_sequence)
if self.imputation_method_oid is not None:
params['ImputationMethodOID'] = self.imputation_method_oid
if self.role is not None:
params['Role'] = self.role
if self.role_codelist_oid is not None:
params['RoleCodeListOID'] = self.role_codelist_oid
builder.start('ItemRef', params)
for attribute in self.attributes:
attribute.build(builder)
builder.end('ItemRef')
def __lshift__(self, other):
"""ItemRef can accept MdsolAttribute(s)"""
if not isinstance(other, (MdsolAttribute)):
raise ValueError('ItemRef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, MdsolAttribute, 'attributes')
return other
class ItemGroupDef(ODMElement):
def __init__(self, oid, name, repeating=False, is_reference_data=False, sas_dataset_name=None,
domain=None, origin=None, role=None, purpose=None, comment=None):
self.oid = oid
self.name = name
self.repeating = repeating
self.is_reference_data = is_reference_data
self.sas_dataset_name = sas_dataset_name
self.domain = domain
self.origin = origin
self.role = role
self.purpose = purpose
self.comment = comment
self.item_refs = []
self.label_refs = []
def build(self, builder):
params = dict(OID=self.oid,
Name=self.name,
Repeating=bool_to_yes_no(self.repeating),
IsReferenceData=bool_to_yes_no(self.is_reference_data)
)
if self.sas_dataset_name is not None:
params['SASDatasetName'] = self.sas_dataset_name
if self.domain is not None:
params['Domain'] = self.domain
if self.origin is not None:
params['Origin'] = self.origin
if self.role is not None:
params['Role'] = self.role
if self.purpose is not None:
params['Purpose'] = self.purpose
if self.comment is not None:
params['Comment'] = self.comment
builder.start('ItemGroupDef', params)
for itemref in self.item_refs:
itemref.build(builder)
# Extensions always listed AFTER core elements
for labelref in self.label_refs:
labelref.build(builder)
builder.end('ItemGroupDef')
def __lshift__(self, other):
"""ItemGroupDef can accept ItemRef and LabelRef"""
if not isinstance(other, (ItemRef, MdsolLabelRef)):
raise ValueError('ItemGroupDef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, ItemRef, 'item_refs')
self.set_list_attribute(other, MdsolLabelRef, 'label_refs')
return other
class Question(ODMElement):
def __init__(self):
self.translations = []
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (TranslatedText)):
raise ValueError('Question cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, TranslatedText, 'translations')
return other
def build(self, builder):
"""Questions can contain translations"""
builder.start('Question', {})
for translation in self.translations:
translation.build(builder)
builder.end('Question')
class MeasurementUnitRef(ODMElement):
def __init__(self, oid, order_number=None):
self.oid = oid
self.order_number = order_number
def build(self, builder):
params = dict(MeasurementUnitOID=self.oid)
if self.order_number is not None:
params['mdsol:OrderNumber'] = str(self.order_number)
builder.start('MeasurementUnitRef', params)
builder.end('MeasurementUnitRef')
class MdsolHeaderText(ODMElement):
"""Header text for ItemDef when showed in grid"""
def __init__(self, content, lang=None):
self.content = content
self.lang = lang
def build(self, builder):
params = {}
if self.lang is not None:
params['xml:lang'] = self.lang
builder.start('mdsol:HeaderText', params)
builder.data(self.content)
builder.end('mdsol:HeaderText')
class CodeListRef(ODMElement):
"""CodeListRef: a reference a codelist within an ItemDef"""
def __init__(self, oid):
self.oid = oid
def build(self, builder):
builder.start('CodeListRef', {'CodeListOID': self.oid})
builder.end('CodeListRef')
class MdsolLabelDef(ODMElement):
"""Label definition"""
def __init__(self, oid, name, field_number=None):
self.oid = oid
self.name = name
self.field_number = field_number
self.help_texts = []
self.translations = []
self.view_restrictions = []
def build(self, builder):
params = dict(OID=self.oid, Name=self.name)
if self.field_number is not None:
params['FieldNumber'] = str(self.field_number)
builder.start("mdsol:LabelDef", params)
for translation in self.translations:
translation.build(builder)
for view_restriction in self.view_restrictions:
view_restriction.build(builder)
builder.end("mdsol:LabelDef")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (MdsolViewRestriction, TranslatedText)):
raise ValueError('MdsolLabelDef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, TranslatedText, 'translations')
self.set_list_attribute(other, MdsolViewRestriction, 'view_restrictions')
return other
class MdsolReviewGroup(ODMElement):
"""Maps to Rave review groups for an Item"""
def __init__(self, name):
self.name = name
def build(self, builder):
builder.start('mdsol:ReviewGroup', {})
builder.data(self.name)
builder.end('mdsol:ReviewGroup')
class CheckValue(ODMElement):
"""A value in a RangeCheck"""
def __init__(self, value):
self.value = value
def build(self, builder):
builder.start('CheckValue', {})
builder.data(str(self.value))
builder.end('CheckValue')
class RangeCheck(ODMElement):
"""
Rangecheck in Rave relates to QueryHigh QueryLow and NonConformandHigh and NonComformanLow
for other types of RangeCheck, need to use an EditCheck (part of Rave's extensions to ODM)
"""
def __init__(self, comparator, soft_hard):
self._comparator = None
self.comparator = comparator
self._soft_hard = None
self.soft_hard = soft_hard
self.check_value = None
self.measurement_unit_ref = None
@property
def comparator(self):
return self._comparator
@comparator.setter
def comparator(self, value):
if not isinstance(value, RangeCheckComparatorType):
raise AttributeError("%s comparator is invalid in RangeCheck." % (value,))
self._comparator = value
@property
def soft_hard(self):
return self._soft_hard
@soft_hard.setter
def soft_hard(self, value):
if not isinstance(value, RangeCheckType):
raise AttributeError("%s soft_hard invalid in RangeCheck." % (value,))
self._soft_hard = value
def build(self, builder):
params = dict(SoftHard=self.soft_hard.value, Comparator=self.comparator.value)
builder.start("RangeCheck", params)
if self.check_value is not None:
self.check_value.build(builder)
if self.measurement_unit_ref is not None:
self.measurement_unit_ref.build(builder)
builder.end("RangeCheck")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (CheckValue, MeasurementUnitRef,)):
raise ValueError('RangeCheck cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_single_attribute(other, CheckValue, 'check_value')
self.set_single_attribute(other, MeasurementUnitRef, 'measurement_unit_ref')
class ItemDef(ODMElement):
VALID_DATATYPES = [DataType.Text, DataType.Integer, DataType.Float, DataType.Date,
DataType.DateTime, DataType.Time]
def __init__(self, oid, name, datatype, length,
significant_digits=None,
sas_field_name=None,
sds_var_name=None,
origin=None, # Not mapped in Rave
comment=None,
active=True,
control_type=None,
acceptable_file_extensions=None,
indent_level=0,
source_document_verify=False,
default_value=None,
sas_format=None,
sas_label=None,
query_future_date=False,
visible=True,
translation_required=False,
query_non_conformance=False,
other_visits=False,
can_set_item_group_date=False,
can_set_form_date=False,
can_set_study_event_date=False,
can_set_subject_date=False,
visual_verify=False,
does_not_break_signature=False,
date_time_format=None,
field_number=None,
variable_oid=None
):
self.oid = oid
self.name = name
if datatype not in ItemDef.VALID_DATATYPES:
raise AttributeError('{0} is not a valid datatype!'.format(datatype))
if control_type is not None:
if not isinstance(control_type, ControlType):
raise AttributeError("{0} is not a valid Control Type".format(control_type))
self.datatype = datatype
self.length = length
self.significant_digits = significant_digits
self.sas_field_name = sas_field_name
self.sds_var_name = sds_var_name
self.origin = origin
self.comment = comment
self.active = active
self.control_type = control_type
self.acceptable_file_extensions = acceptable_file_extensions
self.indent_level = indent_level
self.source_document_verify = source_document_verify
self.default_value = default_value
self.sas_format = sas_format
self.sas_label = sas_label
self.query_future_date = query_future_date
self.visible = visible
self.translation_required = translation_required
self.query_non_conformance = query_non_conformance
self.other_visits = other_visits
self.can_set_item_group_date = can_set_item_group_date
self.can_set_form_date = can_set_form_date
self.can_set_study_event_date = can_set_study_event_date
self.can_set_subject_date = can_set_subject_date
self.visual_verify = visual_verify
self.does_not_break_signature = does_not_break_signature
self.date_time_format = date_time_format
self.field_number = field_number
self.variable_oid = variable_oid
self.question = None
self.codelistref = None
self.measurement_unit_refs = []
self.help_texts = []
self.view_restrictions = []
self.entry_restrictions = []
self.header_text = None
self.review_groups = []
self.range_checks = []
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(OID=self.oid,
Name=self.name,
DataType=self.datatype.value,
Length=str(self.length),
)
if self.date_time_format is not None:
params['mdsol:DateTimeFormat'] = self.date_time_format
params['mdsol:Active'] = bool_to_yes_no(self.active)
if self.significant_digits is not None:
params['SignificantDigits'] = str(self.significant_digits)
if self.sas_field_name is not None:
params['SASFieldName'] = self.sas_field_name
if self.sds_var_name is not None:
params['SDSVarName'] = self.sds_var_name
if self.origin is not None:
params['Origin'] = self.origin
if self.comment is not None:
params['Comment'] = self.comment
if self.control_type is not None:
params['mdsol:ControlType'] = self.control_type.value
if self.acceptable_file_extensions is not None:
params['mdsol:AcceptableFileExtensions'] = self.acceptable_file_extensions
if self.default_value is not None:
params['mdsol:DefaultValue'] = str(self.default_value)
params['mdsol:SourceDocument'] = bool_to_yes_no(self.source_document_verify)
params['mdsol:IndentLevel'] = str(self.indent_level)
if self.sas_format is not None:
params['mdsol:SASFormat'] = self.sas_format
if self.sas_label is not None:
params['mdsol:SASLabel'] = self.sas_label
params['mdsol:QueryFutureDate'] = bool_to_yes_no(self.query_future_date)
params['mdsol:Visible'] = bool_to_yes_no(self.visible)
params['mdsol:TranslationRequired'] = bool_to_yes_no(self.translation_required)
params['mdsol:QueryNonConformance'] = bool_to_yes_no(self.query_non_conformance)
params['mdsol:OtherVisits'] = bool_to_yes_no(self.other_visits)
params['mdsol:CanSetItemGroupDate'] = bool_to_yes_no(self.can_set_item_group_date)
params['mdsol:CanSetFormDate'] = bool_to_yes_no(self.can_set_form_date)
params['mdsol:CanSetStudyEventDate'] = bool_to_yes_no(self.can_set_study_event_date)
params['mdsol:CanSetSubjectDate'] = bool_to_yes_no(self.can_set_subject_date)
params['mdsol:VisualVerify'] = bool_to_yes_no(self.visual_verify)
params['mdsol:DoesNotBreakSignature'] = bool_to_yes_no(self.does_not_break_signature)
if self.field_number is not None:
params['mdsol:FieldNumber'] = self.field_number
if self.variable_oid is not None:
params['mdsol:VariableOID'] = self.variable_oid
builder.start("ItemDef", params)
if self.question is not None:
self.question.build(builder)
if self.codelistref is not None:
self.codelistref.build(builder)
for mur in self.measurement_unit_refs:
mur.build(builder)
for range_check in self.range_checks:
range_check.build(builder)
if self.header_text is not None:
self.header_text.build(builder)
for view_restriction in self.view_restrictions:
view_restriction.build(builder)
for entry_restriction in self.entry_restrictions:
entry_restriction.build(builder)
for help_text in self.help_texts:
help_text.build(builder)
for review_group in self.review_groups:
review_group.build(builder)
builder.end("ItemDef")
def __lshift__(self, other):
"""Override << operator"""
# ExternalQuestion?,,
# Role*, Alias*,
# mdsol:HelpText?, mdsol:ViewRestriction* or mdsolEntryRestrictions*), (or mdsol:ReviewGroups*), mdsol:Label?)
if not isinstance(other, (MdsolHelpText, MdsolEntryRestriction, MdsolViewRestriction, Question,
MeasurementUnitRef, CodeListRef, MdsolHeaderText, MdsolReviewGroup, RangeCheck)):
raise ValueError('ItemDef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_single_attribute(other, Question, 'question')
self.set_single_attribute(other, CodeListRef, 'codelistref')
self.set_single_attribute(other, MdsolHeaderText, 'header_text')
self.set_list_attribute(other, RangeCheck, 'range_checks')
self.set_list_attribute(other, MeasurementUnitRef, 'measurement_unit_refs')
self.set_list_attribute(other, MdsolHelpText, 'help_texts')
self.set_list_attribute(other, MdsolViewRestriction, 'view_restrictions')
self.set_list_attribute(other, MdsolEntryRestriction, 'entry_restrictions')
self.set_list_attribute(other, MdsolReviewGroup, 'review_groups')
return other
class Decode(ODMElement):
def __init__(self):
self.translations = []
def build(self, builder):
builder.start("Decode", {})
for translation in self.translations:
translation.build(builder)
builder.end("Decode")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, TranslatedText):
raise ValueError('Decode cannot accept child of type {0}'.format(other.__class__.__name__))
self.translations.append(other)
return other
class CodeListItem(ODMElement):
def __init__(self, coded_value, order_number=None, specify=False):
self.coded_value = coded_value
self.order_number = order_number
self.specify = specify
self.decode = None
def build(self, builder):
params = dict(CodedValue=self.coded_value)
if self.order_number is not None:
params['mdsol:OrderNumber'] = str(self.order_number)
if self.specify:
params['mdsol:Specify'] = "Yes"
builder.start("CodeListItem", params)
if self.decode is not None:
self.decode.build(builder)
builder.end("CodeListItem")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, Decode):
raise ValueError('CodelistItem cannot accept child of type {0}'.format(other.__class__.__name__))
self.set_single_attribute(other, Decode, 'decode')
return other
class CodeList(ODMElement):
"""A container for CodeListItems equivalent of Rave Dictionary"""
VALID_DATATYPES = [DataType.Integer, DataType.Text, DataType.Float, DataType.String]
def __init__(self, oid, name, datatype, sas_format_name=None):
self.oid = oid
self.name = name
if datatype not in CodeList.VALID_DATATYPES:
raise ValueError("{0} is not a valid CodeList datatype".format(datatype))
self.datatype = datatype
self.sas_format_name = sas_format_name
self.codelist_items = []
def build(self, builder):
params = dict(OID=self.oid,
Name=self.name,
DataType=self.datatype.value)
if self.sas_format_name is not None:
params['SASFormatName'] = self.sas_format_name
builder.start("CodeList", params)
for item in self.codelist_items:
item.build(builder)
builder.end("CodeList")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, CodeListItem):
raise ValueError('Codelist cannot accept child of type {0}'.format(other.__class__.__name__))
self.set_list_attribute(other, CodeListItem, 'codelist_items')
return other
class MdsolConfirmationMessage(ODMElement):
"""Form is saved confirmation message"""
def __init__(self, message, lang=None):
self.message = message
self.lang = lang
def build(self, builder):
params = {}
if self.lang:
params['xml:lang'] = self.lang
builder.start('mdsol:ConfirmationMessage', params)
builder.data(self.message)
builder.end('mdsol:ConfirmationMessage')
class MdsolDerivationStep(ODMElement):
"""A derivation step modeled after the Architect Loader definition.
Do not use directly, use appropriate subclasses.
"""
VALID_STEPS = VALID_DERIVATION_STEPS
def __init__(self,
variable_oid=None,
data_format=None,
form_oid=None,
folder_oid=None,
field_oid=None,
value=None,
function=None,
custom_function=None,
record_position=None,
form_repeat_number=None,
folder_repeat_number=None,
logical_record_position=None
):
self.variable_oid = variable_oid
self.data_format = data_format
self.form_oid = form_oid
self.folder_oid = folder_oid
self.field_oid = field_oid
self.value = value
self._function = None
self.function = function
self.custom_function = custom_function
self.record_position = record_position
self.form_repeat_number = form_repeat_number
self.folder_repeat_number = folder_repeat_number
self.logical_record_position = logical_record_position
@property
def function(self):
return self._function
@function.setter
def function(self, value):
if value is not None:
if value not in MdsolDerivationStep.VALID_STEPS:
raise AttributeError("Invalid derivation function %s" % value)
self._function = value
def build(self, builder):
params = dict()
if self.variable_oid is not None:
params['VariableOID'] = self.variable_oid
if self.data_format is not None:
params['DataFormat'] = self.data_format
if self.folder_oid is not None:
params['FolderOID'] = self.folder_oid
if self.field_oid is not None:
params['FieldOID'] = self.field_oid
if self.form_oid is not None:
params['FormOID'] = self.form_oid
if self.value is not None:
params['Value'] = self.value
if self.function is not None:
params['Function'] = self.function.value
if self.custom_function is not None:
params['CustomFunction'] = self.custom_function
if self.record_position is not None:
params['RecordPosition'] = str(self.record_position)
if self.form_repeat_number is not None:
params['FormRepeatNumber'] = str(self.form_repeat_number)
if self.folder_repeat_number is not None:
params['FolderRepeatNumber'] = str(self.folder_repeat_number)
if self.logical_record_position is not None:
params['LogicalRecordPosition'] = self.logical_record_position
builder.start("mdsol:DerivationStep", params)
builder.end("mdsol:DerivationStep")
class MdsolCheckStep(ODMElement):
"""A check step modeled after the Architect Loader definition.
Do not use directly, use appropriate subclasses.
"""
VALID_STEPS = ALL_STEPS
def __init__(self,
variable_oid=None,
data_format=None,
form_oid=None,
folder_oid=None,
field_oid=None,
static_value=None,
function=None,
custom_function=None,
record_position=None,
form_repeat_number=None,
folder_repeat_number=None,
logical_record_position=None
):
self.variable_oid = variable_oid
self.data_format = data_format
self.form_oid = form_oid
self.folder_oid = folder_oid
self.field_oid = field_oid
self.static_value = static_value
self._function = None
self.function = function
self.custom_function = custom_function
self.record_position = record_position
self.form_repeat_number = form_repeat_number
self.folder_repeat_number = folder_repeat_number
self.logical_record_position = logical_record_position
@property
def function(self):
return self._function
@function.setter
def function(self, value):
if value is not None:
if value not in MdsolCheckStep.VALID_STEPS:
raise AttributeError("Invalid function %s" % value)
self._function = value
def build(self, builder):
params = dict()
if self.variable_oid is not None:
params['VariableOID'] = self.variable_oid
if self.data_format is not None:
params['DataFormat'] = self.data_format
if self.folder_oid is not None:
params['FolderOID'] = self.folder_oid
if self.field_oid is not None:
params['FieldOID'] = self.field_oid
if self.form_oid is not None:
params['FormOID'] = self.form_oid
if self.static_value is not None:
params['StaticValue'] = self.static_value
if self.function is not None:
params['Function'] = self.function.value
if self.custom_function is not None:
params['CustomFunction'] = self.custom_function
if self.record_position is not None:
params['RecordPosition'] = str(self.record_position)
if self.form_repeat_number is not None:
params['FormRepeatNumber'] = str(self.form_repeat_number)
if self.folder_repeat_number is not None:
params['FolderRepeatNumber'] = str(self.folder_repeat_number)
if self.logical_record_position is not None:
params['LogicalRecordPosition'] = self.logical_record_position
builder.start("mdsol:CheckStep", params)
builder.end("mdsol:CheckStep")
class MdsolCheckAction(ODMElement):
"""
Check Action modeled after check action in Architect Loader spreadsheet.
Do not use directly, use appropriate sub-class.
"""
def __init__(self,
variable_oid=None,
field_oid=None,
form_oid=None,
folder_oid=None,
record_position=None,
form_repeat_number=None,
folder_repeat_number=None,
check_action_type=None,
check_string=None,
check_options=None,
check_script=None
):
self.variable_oid = variable_oid
self.folder_oid = folder_oid
self.field_oid = field_oid
self.form_oid = form_oid
self.record_position = record_position
self.form_repeat_number = form_repeat_number
self.folder_repeat_number = folder_repeat_number
self._check_action_type = None
self.check_action_type = check_action_type
self.check_string = check_string
self.check_options = check_options
self.check_script = check_script
@property
def check_action_type(self):
return self._check_action_type
@check_action_type.setter
def check_action_type(self, value):
if value is not None:
if not isinstance(value, ActionType):
raise AttributeError("Invalid check action %s" % value)
self._check_action_type = value
def build(self, builder):
params = dict()
if self.variable_oid is not None:
params['VariableOID'] = self.variable_oid
if self.field_oid is not None:
params['FieldOID'] = self.field_oid
if self.form_oid is not None:
params['FormOID'] = self.form_oid
if self.folder_oid is not None:
params['FolderOID'] = self.folder_oid
if self.record_position is not None:
params['RecordPosition'] = str(self.record_position)
if self.form_repeat_number is not None:
params['FormRepeatNumber'] = str(self.form_repeat_number)
if self.folder_repeat_number is not None:
params['FolderRepeatNumber'] = str(self.folder_repeat_number)
if self.check_action_type is not None:
params['Type'] = self.check_action_type.value
if self.check_string is not None:
params['String'] = self.check_string
if self.check_options is not None:
params['Options'] = self.check_options
if self.check_script is not None:
params['Script'] = self.check_script
builder.start("mdsol:CheckAction", params)
builder.end("mdsol:CheckAction")
class MdsolEditCheckDef(ODMElement):
"""Extension for Rave edit checks"""
def __init__(self, oid, active=True, bypass_during_migration=False, needs_retesting=False):
self.oid = oid
self.active = active
self.bypass_during_migration = bypass_during_migration
self.needs_retesting = needs_retesting
self.check_steps = []
self.check_actions = []
def build(self, builder):
params = dict(OID=self.oid,
Active=bool_to_true_false(self.active),
BypassDuringMigration=bool_to_true_false(self.bypass_during_migration),
NeedsRetesting=bool_to_true_false(self.needs_retesting)
)
builder.start('mdsol:EditCheckDef', params)
for step in self.check_steps:
step.build(builder)
for action in self.check_actions:
action.build(builder)
builder.end('mdsol:EditCheckDef')
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (MdsolCheckStep, MdsolCheckAction,)):
raise ValueError('EditCheck cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, MdsolCheckStep, 'check_steps')
self.set_list_attribute(other, MdsolCheckAction, 'check_actions')
class MdsolDerivationDef(ODMElement):
"""Extension for Rave derivations"""
def __init__(self, oid, active=True,
bypass_during_migration=False,
needs_retesting=False,
variable_oid=None,
field_oid=None,
form_oid=None,
folder_oid=None,
record_position=None,
form_repeat_number=None,
folder_repeat_number=None,
logical_record_position=None,
all_variables_in_folders=None,
all_variables_in_fields=None
):
self.oid = oid
self.active = active
self.bypass_during_migration = bypass_during_migration
self.needs_retesting = needs_retesting
self.variable_oid = variable_oid
self.field_oid = field_oid
self.form_oid = form_oid
self.folder_oid = folder_oid
self.record_position = record_position
self.form_repeat_number = form_repeat_number
self.folder_repeat_number = folder_repeat_number
self.logical_record_position = logical_record_position
self.all_variables_in_folders = all_variables_in_folders
self.all_variables_in_fields = all_variables_in_fields
self.derivation_steps = []
def build(self, builder):
params = dict(
OID=self.oid,
Active=bool_to_true_false(self.active),
BypassDuringMigration=bool_to_true_false(self.bypass_during_migration),
NeedsRetesting=bool_to_true_false(self.needs_retesting)
)
if self.variable_oid is not None:
params['VariableOID'] = self.variable_oid
if self.field_oid is not None:
params['FieldOID'] = self.field_oid
if self.form_oid is not None:
params['FormOID'] = self.form_oid
if self.folder_oid is not None:
params['FolderOID'] = self.folder_oid
if self.record_position is not None:
params['RecordPosition'] = str(self.record_position)
if self.form_repeat_number is not None:
params['FormRepeatNumber'] = str(self.form_repeat_number)
if self.folder_repeat_number is not None:
params['FolderRepeatNumber'] = str(self.folder_repeat_number)
if self.all_variables_in_folders is not None:
params['AllVariablesInFolders'] = bool_to_true_false(self.all_variables_in_folders)
if self.all_variables_in_fields is not None:
params['AllVariablesInFields'] = bool_to_true_false(self.all_variables_in_fields)
if self.logical_record_position is not None:
params['LogicalRecordPosition'] = self.logical_record_position
builder.start('mdsol:DerivationDef', params)
for step in self.derivation_steps:
step.build(builder)
builder.end('mdsol:DerivationDef')
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, MdsolDerivationStep):
raise ValueError('Derivation cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, MdsolDerivationStep, 'derivation_steps')
class MdsolCustomFunctionDef(ODMElement):
"""Extension for Rave Custom functions"""
VB = "VB" # VB was deprecated in later Rave versions.
C_SHARP = "C#"
SQL = "SQ"
VALID_LANGUAGES = [C_SHARP, SQL, VB]
def __init__(self, oid, code, language="C#"):
self.oid = oid
self.code = code
self.language = language
def build(self, builder):
params = dict(OID=self.oid, Language=self.language)
builder.start('mdsol:CustomFunctionDef', params)
builder.data(self.code)
builder.end('mdsol:CustomFunctionDef')
class MetaDataVersion(ODMElement):
"""MetaDataVersion, child of study"""
def __init__(self, oid, name,
description=None,
primary_formoid=None,
default_matrix_oid=None,
delete_existing=False,
signature_prompt=None):
self.oid = oid
self.name = name
self.description = description
self.primary_formoid = primary_formoid
self.default_matrix_oid = default_matrix_oid
self.delete_existing = delete_existing
self.signature_prompt = signature_prompt
self.confirmation_message = None
self.protocol = None
self.codelists = []
self.item_defs = []
self.label_defs = []
self.item_group_defs = []
self.form_defs = []
self.study_event_defs = []
self.edit_checks = []
self.derivations = []
self.custom_functions = []
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(OID=self.oid, Name=self.name)
if self.description is not None:
params['Description'] = self.description
if self.signature_prompt is not None:
params['mdsol:SignaturePrompt'] = self.signature_prompt
if self.primary_formoid is not None:
params['mdsol:PrimaryFormOID'] = self.primary_formoid
if self.default_matrix_oid is not None:
params['mdsol:DefaultMatrixOID'] = self.default_matrix_oid
params['mdsol:DeleteExisting'] = bool_to_yes_no(self.delete_existing)
builder.start("MetaDataVersion", params)
if self.protocol:
self.protocol.build(builder)
for event in self.study_event_defs:
event.build(builder)
for formdef in self.form_defs:
formdef.build(builder)
for itemgroupdef in self.item_group_defs:
itemgroupdef.build(builder)
for itemdef in self.item_defs:
itemdef.build(builder)
for codelist in self.codelists:
codelist.build(builder)
# Extensions must always come after core elements
if self.confirmation_message:
self.confirmation_message.build(builder)
for labeldef in self.label_defs:
labeldef.build(builder)
for edit_check in self.edit_checks:
edit_check.build(builder)
for derivation in self.derivations:
derivation.build(builder)
for custom_function in self.custom_functions:
custom_function.build(builder)
builder.end("MetaDataVersion")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (Protocol, StudyEventDef, FormDef, ItemGroupDef, ItemDef, MdsolLabelDef, CodeList,
MdsolConfirmationMessage, MdsolEditCheckDef, MdsolDerivationDef,
MdsolCustomFunctionDef)):
raise ValueError('MetaDataVersion cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_single_attribute(other, Protocol, 'protocol')
self.set_single_attribute(other, MdsolConfirmationMessage, 'confirmation_message')
self.set_list_attribute(other, StudyEventDef, 'study_event_defs')
self.set_list_attribute(other, FormDef, 'form_defs')
self.set_list_attribute(other, ItemGroupDef, 'item_group_defs')
self.set_list_attribute(other, MdsolLabelDef, 'label_defs')
self.set_list_attribute(other, ItemDef, 'item_defs')
self.set_list_attribute(other, CodeList, 'codelists')
self.set_list_attribute(other, MdsolEditCheckDef, 'edit_checks')
self.set_list_attribute(other, MdsolDerivationDef, 'derivations')
self.set_list_attribute(other, MdsolCustomFunctionDef, 'custom_functions') # NB. Current schema limits to 1
return other
class Study(ODMElement):
"""ODM Study Metadata element"""
PROJECT = 'Project'
GLOBAL_LIBRARY = 'GlobalLibrary Volume'
PROJECT_TYPES = [PROJECT, GLOBAL_LIBRARY]
def __init__(self, oid, project_type=None):
self.oid = oid
self.global_variables = None
self.basic_definitions = None
self.metadata_version = None
self.studyevent_defs = []
if project_type is None:
self.project_type = "Project"
else:
if project_type in Study.PROJECT_TYPES:
self.project_type = project_type
else:
raise ValueError('Project type "{0}" not valid. Expected one of {1}'.format(project_type,
','.join(
Study.PROJECT_TYPES)))
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (GlobalVariables, BasicDefinitions, MetaDataVersion)):
raise ValueError('Study cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_single_attribute(other, GlobalVariables, 'global_variables')
self.set_single_attribute(other, BasicDefinitions, 'basic_definitions')
self.set_single_attribute(other, MetaDataVersion, 'metadata_version')
return other
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(OID=self.oid)
params['mdsol:ProjectType'] = self.project_type
builder.start("Study", params)
# Ask children
if self.global_variables is not None:
self.global_variables.build(builder)
if self.basic_definitions is not None:
self.basic_definitions.build(builder)
if self.metadata_version is not None:
self.metadata_version.build(builder)
builder.end("Study")
|
mit
| -2,991,630,257,282,767,400
| 33.787551
| 121
| 0.603444
| false
| 4.042562
| false
| false
| false
|
toddheitmann/PetroPy
|
setup.py
|
1
|
1590
|
"""Setup script for PetroPy"""
from setuptools import setup
from os import path
from petropy import __version__
with open(path.join(path.dirname(__file__), "requirements.txt"), "r") as f:
requirements = f.read().splitlines()
with open(path.join(path.dirname(__file__), "README.rst"), "r") as f:
long_description = f.read()
setup(
name = 'petropy',
packages=["petropy", ],
version = __version__,
description = 'A package to calculate petrophysical properties for formation evaluation.',
long_description = long_description,
author = 'Todd Heitmann',
author_email = 'toddheitmann@protonmail.com',
url = 'https://github.com/toddheitmann/petropy',
keywords = ['petrophysics', 'formation evaluation', 'reservoir characterization', 'Oil and Gas'],
classifiers=[
"Intended Audience :: Customer Service",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Other Audience",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
"Topic :: System :: Filesystems",
"Topic :: Scientific/Engineering :: Information Analysis",
],
install_requires = requirements,
package_data = {'petropy': ['data/*.csv', 'data/*.xml', 'data/*.las']}
)
|
mit
| 8,575,491,469,597,915,000
| 37.780488
| 101
| 0.637107
| false
| 4.025316
| false
| false
| false
|
saisankargochhayat/algo_quest
|
leetcode/115. Distinct Subsequences/soln.py
|
1
|
1479
|
from functools import lru_cache
class Solution:
def numDistinct(self, s: str, t: str) -> int:
@lru_cache(maxsize=None)
def helper(i, j):
M, N = len(s), len(t)
if i == M or j == N or M-i < N-j:
return int(j == N)
# if i of s and j of s dont match or match either case we skip
ans = helper(i+1, j)
# if it matches we skip both
if s[i] == t[j]:
ans += helper(i+1, j+1)
return ans
res = helper(0,0)
return res
class Solution:
def numDistinct(self, s: str, t: str) -> int:
# Dictionary for memoization
mem = {}
def helper(i, j):
M, N = len(s), len(t)
# Base case
if i == M or j == N or M - i < N - j:
return int(j == len(t))
# Check if the result is already cached
if (i, j) in mem:
return mem[i,j]
# Always make this recursive call
ans = helper(i + 1, j)
# If the characters match, make the other
# one and add the result to "ans"
if s[i] == t[j]:
ans += helper(i + 1, j + 1)
# Cache the answer and return
mem[i, j] = ans
return ans
return helper(0, 0)
|
apache-2.0
| 3,884,121,225,222,017,000
| 27.461538
| 74
| 0.408384
| false
| 4.019022
| false
| false
| false
|
benob/chainer
|
chainer/functions/evaluation/accuracy.py
|
1
|
2436
|
import numpy
import six
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class Accuracy(function.Function):
def __init__(self, ignore_label=None):
self.ignore_label = ignore_label
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype.kind == 'f',
t_type.dtype == numpy.int32
)
t_ndim = t_type.ndim.eval()
type_check.expect(
x_type.ndim >= t_type.ndim,
x_type.shape[0] == t_type.shape[0],
x_type.shape[2: t_ndim + 1] == t_type.shape[1:]
)
for i in six.moves.range(t_ndim + 1, x_type.ndim.eval()):
type_check.expect(x_type.shape[i] == 1)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
if self.ignore_label is not None:
mask = (t == self.ignore_label)
ignore_cnt = mask.sum()
# will always be true when the true label is ignore_label
# TODO(henry0312)
# If cupy.where returns indexes, we could make the code better.
# Also, we would need Advanced Indexing.
pred = xp.where(mask, self.ignore_label,
y.argmax(axis=1).reshape(t.shape))
count = (pred == t).sum() - ignore_cnt
total = t.size - ignore_cnt
if total == 0:
return xp.asarray(0.0, dtype=y.dtype),
else:
return xp.asarray(float(count) / total, dtype=y.dtype),
else:
pred = y.argmax(axis=1).reshape(t.shape)
return xp.asarray((pred == t).mean(dtype=y.dtype)),
def accuracy(y, t, ignore_label=None):
"""Computes muticlass classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose (i, j)-th element
indicates the score of the class j at the i-th example.
t (Variable): Variable holding an int32 vector of ground truth labels.
ignore_label (int or None): Skip calculating accuracy
if the ture label is ``ignore_label``.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return Accuracy(ignore_label=ignore_label)(y, t)
|
mit
| -7,014,396,633,179,930,000
| 31.918919
| 78
| 0.573481
| false
| 3.747692
| false
| false
| false
|
zakirovandrey/cfmaxwell
|
src/genConeFold.py
|
1
|
21709
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
from types import *
from operator import *
def makeCombinations(lst, res=['']):
if len(lst)<=0: return res
else: return makeCombinations(lst[:-1], [c+n for n in res for c in lst[-1]])
class CFact:
def __init__(self, Gen, actN):
self.Gen=Gen; self.actN=actN
self.podDatas,self.podDatasShift = ['',],[0,]
for s in xrange(Gen.dim):
#datas = self.pars4actN[actN[s]]
datas = Gen.Rules2act['pars'][actN[s]]
for k in xrange(1<<s):
self.podDatas.append(self.podDatas[k] + datas[1])
self.podDatas[k] += datas[0]
self.podDatasShift.append(self.podDatasShift[k] + (0,1<<s)[datas[1] is 'p'])
pass
pass
pass
def PodActList(self, tC='A'):
tier0=makeCombinations([self.Gen.Rules2act['subacts'][a][:2] for a in self.actN])
tier1=makeCombinations([self.Gen.Rules2act['subacts'][a][2:] for a in self.actN])
#tier0=makeCombinations([self.decomp4actN[a][:2] for a in self.actN])
#tier1=makeCombinations([self.decomp4actN[a][2:] for a in self.actN])
tier2=reduce(add,zip(tier0,tier1))
tier0=filter(lambda (a,n,t):'-' not in a, zip(tier0,xrange(len(tier0)),['B']*len(tier0))); tier0.reverse()
tier1=filter(lambda (a,n,t):'-' not in a, zip(tier1,xrange(len(tier1)),['T']*len(tier1))); tier1.reverse()
tier2=filter(lambda (a,n,t):'-' not in a, zip(tier2,xrange(len(tier2)),['B','T']*len(tier0))); tier2.reverse()
if tC is 'B': return tier0
if tC is 'T': return tier1
if tC is 'BT': return tier2
return tier0+tier1
def PodActList_mp(self):
tier=makeCombinations([self.Gen.Rules2act['subacts_mp'][a] for a in self.actN])
tier=filter(lambda (a,n):'-' not in a, zip(tier,xrange(len(tier)))); tier.reverse()
return tier
def getIsh(self, datI, shI):
return filter(len, [('',self.Gen.rulesShift[self.podDatas[datI]][s])[(shI&(1<<s))>0] for s in xrange(self.Gen.dim)])
def getPodActPar(self, par, parI, cntI, tC):
'''Возвращает параметр parI (0..2^d) для под-act-а с базовым datas-ом cntI (0..2^d), исходя из шаблона par'''
if tC is 'B': datI,poddatI = cntI&parI,cntI^parI
if tC is 'T': datI,poddatI = cntI|parI,((1<<self.Gen.dim)-1)&(~(cntI^parI))
if tC is 'X':
parIl,cntIl=map(lambda v:(0,1,-1)[v],self.Gen.num2list(parI,3)),self.Gen.num2list(cntI)
datI = self.Gen.list2num([(p+n)>=1 for (p,n) in zip(parIl,cntIl)])
poddatI = self.Gen.list2num([1&~((p&1)^n) for (p,n) in zip(parIl,cntIl)])
#datI,poddatI --- номера датаса и его поддатаса для параметра parI
poddatIx = 0
for s in xrange(self.Gen.dim-1,-1,-1):
i2s = 1<<s
if self.Gen.Rules2dim[self.podDatas[datI][s]]: poddatIx = 2*poddatIx+((poddatI&i2s)!=0)
#poddatIx --- смещение параметра в массиве поддатасов для датаса datI
#print par, parI, cntI, tC,self.podDatas[datI],"->",datI,poddatI,poddatIx
if par.find('dat')>=0:
dat_shift = self.podDatasShift[datI]
return (self.Gen.datTmpl%datI+'->',self.Gen.datTmpl%(datI-dat_shift)+'[%s].'%('+'.join(self.getIsh(datI-dat_shift,dat_shift))))[dat_shift>0]+'datas'+('','+%d'%poddatIx)[poddatIx>0]
if par.find('const int _I')>=0:
sh = par[par.find('const int _I')+len('const int _I'):]
sI = sh.find('p')
if sI >= 0:
if poddatI&(1<<sI): return '%d'%(1<<self.Gen.get_dim(sh[:sI]))
return 'I'+sh
sI = sh.find('m')
if sI >= 0:
if poddatI&(1<<sI): return '-I'+sh.replace('m','p')
return '-%d'%(1<<self.Gen.get_dim(sh[:sI]))
return '?'+sh
return '======== not implemented yet,',par, n, t
class CFact_mp(CFact):
def __init__(self, Gen, actN):
CFact.__init__(self, Gen, actN)
self.podDatas_mp,self.podDatasShift_mp = ['',],[0,]
for s in xrange(Gen.dim):
datas = Gen.Rules2act_mp['pars'][actN[s]]
for k in xrange(3**s): self.podDatas_mp.append(self.podDatas_mp[k] + datas[2])
for k in xrange(3**s): self.podDatas_mp.append(self.podDatas_mp[k] + datas[0])
for k in xrange(3**s): self.podDatas_mp[k] += datas[1]
for k in xrange(3**s): self.podDatasShift_mp.append(self.podDatasShift_mp[k] + (0,3**s)[datas[2] is 'p'])
for k in xrange(3**s): self.podDatasShift_mp.append(self.podDatasShift_mp[k] + 2*(0,3**s)[datas[0] is 'm'])
pass
pass
def PodActList_mp(self):
#tier=makeCombinations([self.decomp4actNmp[a] for a in self.actN])
tier=makeCombinations([self.Gen.Rules2act_mp['subacts'][a] for a in self.actN])
tier=filter(lambda (a,n):'-' not in a, zip(tier,xrange(len(tier)))); tier.reverse()
return tier
def PodActList_mpPIC(self):
#tier=makeCombinations([self.decomp4actNmpPIC[a] for a in self.actN])
tier=makeCombinations([self.Gen.Rules2act_mp['subactsPIC'][a] for a in self.actN])
tier=filter(lambda (a,n):'-' not in a, zip(tier,xrange(len(tier)))); tier.reverse()
return tier
def getIsh_mp(self, datI, shI):
'''вычисляет сдвиг datas-а'''
if '_' in self.podDatas_mp[datI]: return ('<no-data>',)
#print datI, self.podDatas_mp[datI], self.Gen.rulesShift[self.podDatas_mp[datI]]
shIl = self.Gen.num2list(shI,3)
Ish=[]
for s,sh in zip(xrange(self.Gen.dim), self.Gen.num2list(shI,3)):
rul = self.Gen.rulesShift[self.podDatas_mp[datI]][s]
Ish.append(('',rul, rul.replace('p','m'))[shIl[s]])
return filter(len, Ish)
def getPodActPar(self, par, parI, cntIl, tC):
'''Возвращает параметр parI (0..3^d) для под-act-а с базовым datas-ом cntI (0..2^d), исходя из шаблона par'''
if tC is 'X':
parIl=map(lambda v:(0,1,-1)[v],self.Gen.num2list(parI,3))
datIl = [(-1,-1,0,0,1,1)[p+n+2] for (p,n) in zip(parIl,cntIl)] # можно (p+n)/2 # номер (смещение) datas-а относительно базового cntI
poddatIl = [(0,1,0,1,0,1)[p+n+2] for (p,n) in zip(parIl,cntIl)] # можно (p+n)%2 # номер poddatas-а в datas-е
datI = self.Gen.list2num([((p+n)/2)%3 for (p,n) in zip(parIl,cntIl)], 3)
poddatI = self.Gen.list2num([((p+n)%2)&3 for (p,n) in zip(parIl,cntIl)])
poddatIx = 0
for s in xrange(self.Gen.dim-1,-1,-1):
i2s = 1<<s
if self.Gen.Rules2dim[self.podDatas_mp[datI][s]]: poddatIx = 2*poddatIx+((poddatI&i2s)!=0)
if par.find('dat')>=0:
dat_shift = self.podDatasShift_mp[datI]
return (self.Gen.datTmpl%datI+'->',self.Gen.datTmpl%(datI-dat_shift)+'[%s].'%('+'.join(self.getIsh_mp(datI-dat_shift,dat_shift))))[dat_shift!=0]+'datas'+('','+%d'%poddatIx)[poddatIx>0]
if par.find('const int _I')>=0:
sh = par[par.find('const int _I')+len('const int _I'):]
sI = sh.find('p')
if sI >= 0:
if poddatIl[sI]==1: return '%d'%(1<<self.Gen.get_dim(sh[:sI]))
if datIl[sI]<=0: return '-I'+sh.replace('p','m')
return 'I'+sh
sI = sh.find('m')
if sI >= 0:
if poddatIl[sI]==0: return '-%d'%(1<<self.Gen.get_dim(sh[:sI]))
if datIl[sI]>=0: return '-I'+sh.replace('m','p')
return 'I'+sh
return '?'+sh
return '======== not implemented yet,',par, n, t
class CFpodact(CFact):
def __init__(self, act, actN):
CFact.__init__(self, act.Gen, actN)
self.nadact = act
pass
def getParsList(self, n, tC):
pars_list, full_pars_list = self.Gen.get_pars(self.podDatas)
fakt_pars = [self.nadact.getPodActPar(par, full_pars_list.index(par), n,tC) for par in pars_list]
return fakt_pars
class CFpodact_mp(CFact_mp):
def __init__(self, act, actN):
CFact_mp.__init__(self, act.Gen, actN)
self.nadact = act
def getParsList(self, n, tC):
pars_list, full_pars_list = self.Gen.get_pars_mp(self.podDatas_mp)
fakt_pars = [self.nadact.getPodActPar(par, full_pars_list.index(par), n, tC) for par in pars_list]
return fakt_pars
class Generator:
def __init__(self, dim, types):
self.Rules2act = {
'pars': {'D':'dp', 'S':'dp', 'I':'_d', 'J':'dp', 'X':'dx', 'Y':'d_', 'P':'dd', 'Q':'dd', 'L':'ld', 'M':'dp', 'R':'dr'},
'subacts':{'D':'DDDD', 'S':'SSSS', 'I':'-IID', 'X':'DXX-', 'Y':'DYY-', 'P':'DPPD', 'L':'-LLD', 'R':'DRR-'},
'subacts_mp': {'D':'DD', 'I':'-J', 'J':'JD', 'X':'X-', 'Y':'Y-', 'P':'PQ', 'L':'-M', 'R':'R-'},
'LR':{'L':'IL-', 'R':'-RY'},
}
self.Rules2act_mp = {
#'pars': {'D':'mdp', 'S':'msp', 'J':'_dp', 'X':'mdp', 'P':'mdd', 'Q':'ddp', 'M':'ldp', 'R':'mdr'},
'pars': {'D':'mdp', 'S':'msp', 'J':'_dp', 'X':'mdx', 'P':'mdd', 'Q':'ddp', 'M':'ldp', 'R':'mdr'},
'subacts': {'D':'DD', 'I':'JD', 'J':'JD', 'X':'DX', 'P':'DP', 'Q': 'QD', 'L':'MD', 'M':'MD', 'R':'DR'},
'subactsPIC': {'D':'DDDD', 'J':'-JDD', 'X':'DDX-', 'P':'DDPQ', 'Q': 'PQDD', 'M':'-MDD', 'R':'DDR-'},
}
self.Rules2rank = {}
self.Rules2dim = {}
for s in 'dmps': self.Rules2dim[s] = 1
for s in 'xlr': self.Rules2dim[s] = 0
self.dim=dim
self.par_name_start=-(dim+2)-(dim==1)
self.datTmpl='datas_____'[:2+(dim+1)/2:]+'%0'+'%dd'%((dim+1)/2)
self.types=types
print '// acts: %d'%len(makeCombinations([types]*dim))
pass
def num2list(self, num, bas=2):
numLst = []
for s in xrange(self.dim):
numLst.append(num%bas)
num /= bas
return numLst
def list2num(self, numLst, bas=2):
numLstR = numLst[:]
numLstR.reverse()
return reduce(lambda r,v: r*bas+v, numLstR, 0)
def get_dim(self, pd):
return reduce(lambda r,s: r+self.Rules2dim[s], pd, 0)
def add2rules(self, pd):
self.rules[pd] = 'cubeLR<%d,T%%(Npd)d,%s>'%(self.get_dim(pd),self.Rules2rank.get(pd,self.rank)) + '* const '+self.datTmpl.replace('%','%(Npd)')
self.add2rulesShift(pd)
pass
def add2rulesShift(self, pd):
self.rulesShift[pd] = []
#self.rulesShiftM[pd] = []
for s in xrange(len(pd)):
sh=pd[:s]+'p'+pd[s+1:]
self.rules[sh] = 'const int _I'+sh
self.rulesShift[pd].append('_I'+sh)
shM=pd[:s]+'m'+pd[s+1:]
self.rules[shM] = 'const int _I'+shM
#self.rulesShiftM[pd].append('_I'+shM)
pass
pass
def get_pars(self, datas, shift=0):
full_pars_list = [self.rules.get(pd,'')%{'Npd':i+shift} for (i,pd) in zip(xrange(1<<self.dim),datas)]
pars_list = filter(len, full_pars_list)
pars_list = map(lambda i: pars_list[i], filter(lambda i: pars_list.index(pars_list[i])==i, xrange(len(pars_list))))
return pars_list, full_pars_list
def get_pars_mp(self, datas):
full_pars_list = [self.rules.get(pd,'')%{'Npd':i} for (i,pd) in zip(xrange(3**self.dim),datas)]
pars_list = filter(len, full_pars_list)
pars_list = map(lambda i: pars_list[i], filter(lambda i: pars_list.index(pars_list[i])==i, xrange(len(pars_list))))
return pars_list, full_pars_list
def getTmplPars(self, formal_pars):
template_pars = ','.join(map(lambda s: 'class '+s, filter(lambda s: s[0] is 'T', ''.join(formal_pars).split(','))))
rank_par = ('','int %s'%self.rank)[self.rank is 'rank']
return 'template <%s>'%(', '.join(filter(len,(rank_par,template_pars))))
def makeAct(self, actN):
act = CFact(self, actN)
formal_pars = self.get_pars(act.podDatas)[0]
shift_pars = map(lambda fp: fp[10:], filter(lambda fp: fp[:12] == 'const int _I', formal_pars))
shift_line = ', '.join(map(lambda p: '%s=(%s<<%d)-%d'%(p[1:],p,self.get_dim(p[2:]),1<<self.get_dim(p[2:][:p[2:].index('p')])), shift_pars))
#вычисление сдвигов поддатасов из имени сдвига датаса (например, из _Ixpd получаем Ixpd=(_Ixpd<<2)-1, где 2=dim(xpd), а 1=1<<dim(x).
print self.getTmplPars(formal_pars)+' inline void %s(%s) {'%(self.actTmpl%actN,', '.join(formal_pars))
if len(shift_line)>0:
if ''.join(self.subactTmpl.keys()) in 'SF': print '//',
print ' const int %s;'%shift_line
if 'B' in self.subactTmpl.keys():
for (a,n,tC) in act.PodActList('B'):
print ' %s(%s);'%(self.subactTmpl['B']%a, ', '.join(CFpodact(act,a).getParsList(n,tC)))
if 'F' in self.subactTmpl.keys():
print ' %s(%s);'%(self.subactTmpl['F']%actN,', '.join(map(lambda p: p[self.par_name_start:], formal_pars)))
if 'S' in self.subactTmpl.keys():
tier=filter(lambda a:'-' not in a, makeCombinations([self.Rules2act['LR'].get(a, '-%c-'%a) for a in actN])); tier.reverse()
for tactN in tier:
tact = CFact(self, tactN)
tformal_pars = self.get_pars(tact.podDatas, shift=self.list2num([{'I':-1,'Y':1}.get(c,0) for c in tactN]))[0]
print ' %s(%s);'%(self.subactTmpl['S']%tactN,', '.join(map(lambda p: p[self.par_name_start:], tformal_pars)))
if 'X' in self.subactTmpl.keys():
for (a,n) in act.PodActList_mp():
print ' %s(%s);'%(self.subactTmpl['X']%a, ', '.join(CFpodact_mp(act,a).getParsList(n,'X')))
if 'T' in self.subactTmpl.keys():
for (a,n,tC) in act.PodActList('T'): print ' %s(%s);'%(self.subactTmpl['T']%a, ', '.join(CFpodact(act,a).getParsList(n,tC)))
print '}'
pass
def makeAct_mp(self, actN):
actT = CFact_mp(self, 'D'*self.dim)
act = CFact_mp(self, actN)
formal_pars = self.get_pars_mp(act.podDatas_mp)[0]
shift_pars = map(lambda fp: fp[10:], filter(lambda fp: fp[:12] == 'const int _I', formal_pars))
shift_line = ', '.join(map(lambda p: '%s=(%s<<%d)%c%d'%(p[1:],p,self.get_dim(p[2:]),"-+-+"[p[2:].count('m')],1<<self.get_dim(p[2:][:p[2:].replace('m','p').index('p')])), shift_pars))
#вычисление сдвигов поддатасов из имени сдвига датаса (например, из _Ixmd получаем Ixmd=(_Ixmd<<2)+1, где 2=dim(xmd), а 1=1<<dim(x).
print self.getTmplPars(formal_pars)+' inline void %s(%s) {'%(self.actTmpl%actN,', '.join(formal_pars))
if len(shift_line)>0: print ' const int %s;'%shift_line
if 'B' in self.subactTmpl.keys():
for (a,n,tC) in act.PodActList('B'): print ' %s(%s);'%(self.subactTmpl['B']%a, ', '.join(CFpodact(act,a).getParsList(n,tC)))
#if 'F' in self.subactTmpl.keys():
# print ' %s(%s);'%(self.subactTmpl['F']%actN,', '.join(map(lambda p: p[self.par_name_start:], formal_pars)))
# #for (a,n,tC) in act.PodActList('F'): print ' %s(%s);'%(self.subactTmpl['F']%a, ', '.join(CFpodact(act,a).getParsList(n,tC)))
if 'J' in self.subactTmpl.keys():
caseNshift = self.list2num([1]*self.dim, 4)
print ' for(int ic=0; ic<4; ic++) {'
print 'if(dat0->datas[ic].Npts>NptsMax) {\n T0& datT=dat0->datas[ic];\n NptsMax = datT.Npts;\n printf("'+'===%s:'%actN+' inc NptsMax to %d in xyt: %.3g %.3g %d\\n", NptsMax, datT.x, datT.y, datT.it);\n}'
print ' int ip=dat0->datas[ic].Nexch;\n while(ip < dat0->datas[ic].Npts) {\n pts& pt=dat0->datas[ic].ptslist[ip];\n double dstep=1.0;\n do {\n switch(pt.ix+4*pt.iy) {'
for (a,n) in act.PodActList_mpPIC():
nL = map(lambda nt: nt-1, self.num2list(n,4))
parList = CFpodact_mp(act,a).getParsList(nL,'X')
print ' case %d: dstep=pt.%s(dstep, %s); break;'%(n-caseNshift,self.subactTmpl['J']%a, ', '.join(parList))
print ' }\n } while(dstep<1.0);\n if((pt.ix&2)|(pt.iy&2)) {\n int swk=pt.ix+4*pt.iy;\n if(pt.ix<0) pt.ix += 2; else if(pt.ix>1) pt.ix -= 2;\n if(pt.iy<0) pt.iy += 2; else if(pt.iy>1) pt.iy -= 2;\n switch(swk) {'
for (a,n) in act.PodActList_mpPIC():
nL = map(lambda nt: nt-1, self.num2list(n,4))
if len(filter(lambda _n: _n in (-1,2), nL))==0: continue;
datPtr = '(%s)'%CFpodact_mp(act,a).getParsList(nL,'X')[0]
oldPtr = 'dat0->datas[ic]'
Npts = datPtr+'->Npts'
Nxch = datPtr+'->Nexch'
case_dict = {'ptN': datPtr, 'ptO': oldPtr,'Np': Npts,'Nx':Nxch}
print ' case %(n)d:'%{'n':n-caseNshift},
#print 'if(%(ptO)s.it > %(ptN)s->it) printf("Illegal Exch!\\n"); else'%case_dict,
print 'if(%(ptO)s.it > %(ptN)s->it) printf("Illegal Exch!\\n"); else if(%(ptO)s.it < %(ptN)s->it) %(ptN)s->ptslist[%(Np)s].copyfrom(pt); else { %(ptN)s->ptslist[%(Np)s].copyfrom(%(ptN)s->ptslist[%(Nx)s]); %(ptN)s->ptslist[%(Nx)s].copyfrom(pt); %(Nx)s++; } %(Np)s++; break;'%case_dict
#print 'if(%(ptO)s.it < %(ptN)s->it) %(ptN)s->ptslist[%(Np)s].copyfrom(pt); else { %(ptN)s->ptslist[%(Np)s].copyfrom(%(ptN)s->ptslist[%(Nx)s]); %(ptN)s->ptslist[%(Nx)s].copyfrom(pt); %(Nx)s++; } %(Np)s++; break;'%case_dict
print ' }\n dat0->datas[ic].Npts--;\n if(ip<dat0->datas[ic].Npts) pt.copyfrom(dat0->datas[ic].ptslist[dat0->datas[ic].Npts]);\n } else ip++;\n }\n dat0->datas[ic].Nexch=0; dat0->datas[ic].it++;\n }'
if 'X' in self.subactTmpl.keys():
for (a,n) in act.PodActList_mp():
print ' for(int ip=0; ip<Nz; ip++) dat0->datas[%d].ptslist[ip].%s(1.0, %s);'%(n,self.subactTmpl['X']%a, ', '.join(CFpodact_mp(act,a).getParsList(n,'X')))
if 'T' in self.subactTmpl.keys():
for (a,n,tC) in act.PodActList('T'): print ' %s(%s);'%(self.subactTmpl['T']%a, ', '.join(CFpodact(act,a).getParsList(n,tC)))
print '}'
pass
def genConeFold(self, rank='rank', actTmpl=r'%sactCF', subactTmpl=None, knot='p', exclude=[], acts4gen=[]):
'''Печатает ConeFold заданного ранга, имени и типа:
rank --- имя ранга (либо диапазона), строка;
actTmpl --- шаблон имени, строка, на которую накатывается имя act-а;
subactTmpl --- правила разбиения и имена подConeFold-ов (мЕньшего ранга), на которые разбивается ConeFold, словарь, ключи которого ---
слои по времени B/T/F/X --- bottom/top/flat/flat с , уровни подConeFold-ов'''
if len(exclude): print '// exclude up to: %d'%len(exclude)
self.rank=rank
self.actTmpl=actTmpl
if subactTmpl is None: subactTmpl = { 'BT' : actTmpl }
if type(subactTmpl) is str: subactTmpl = { subactTmpl : actTmpl }
self.subactTmpl={}; map(lambda k: self.subactTmpl.update(dict(zip(k,(subactTmpl[k],)*len(k)))), subactTmpl.keys())
self.rules,self.rulesShift={},{}
datasTypes = reduce(lambda r,t: r+filter(lambda c: c not in r+'_mp', self.Rules2act['pars'][t]), self.types, '')
for pd in makeCombinations([datasTypes]*self.dim): self.add2rules(pd)
if len(acts4gen) == 0: acts4gen = makeCombinations([self.types]*self.dim)
for a in acts4gen:
if a in exclude: continue
if knot == 'p': self.makeAct(a)
elif knot == 'mp': self.makeAct_mp(a)
pass
pass
#stdout, sys.stdout = sys.stdout,open('Test.inc.hpp', 'w')
#gPJ = Generator(dim=2, types='JDX')
#gPJ.genConeFold(rank="FFRank-1", actTmpl=r'PIC2update%s', subactTmpl={'J': r'PIC2update%s'}, knot='mp')
dim=3
incpath = sys.argv[0][:sys.argv[0].find(sys.argv[0].split('/')[-1])]
stdout, sys.stdout = sys.stdout,open(incpath+'CF2Dpic.inc.hpp', 'w')
g = Generator(dim=dim, types='IDX')
g.genConeFold(actTmpl=r'update%s')
g.genConeFold(actTmpl=r'FLDupdate%s')
#g.genConeFold(actTmpl=r'FLDupdate%s', subactTmpl={'B': r'FLDupdate%s'})
sys.stdout.close(); sys.stdout = stdout
stdout, sys.stdout = sys.stdout,open(incpath+'CF2Dpic.inc.hpp', 'w')
print 'int NptsMax=0;'
gP = Generator(dim=dim, types='IDX')
gPJ = Generator(dim=dim, types='JDX')
g.genConeFold(actTmpl=r'picNfld%s', subactTmpl={'B': r'picNfld%s', 'T':r'FLDupdate%s'})
gP.genConeFold(rank="PicRank+1", actTmpl=r'update%s', subactTmpl={'B': r'picNfld%s', 'T':r'FLDupdate%s'})
gP.genConeFold(rank="FFRank-1", actTmpl=r'PIC1update%s', subactTmpl={'X': r'PIC1update%s'})
gPJ.genConeFold(rank="FFRank-1", actTmpl=r'PIC2update%s', subactTmpl={'J': r'PIC2update%s'}, knot='mp')
gP.genConeFold(rank="FFRank-1", actTmpl=r'PIC3update%s', subactTmpl={'X': r'PIC3update%s'})
gP.genConeFold(rank="FFRank", actTmpl=r'pic%s', subactTmpl={'B': r'PIC1update%s', 'X':r'PIC2update%s', 'T':r'PIC3update%s'})
gP.genConeFold(rank="FFRank+1", actTmpl=r'picNfld%s', subactTmpl={'B': r'pic%s', 'T':r'FLDupdate%s'})
sys.stdout.close(); sys.stdout = stdout
#---------------PML-------------------------------------------
stdout, sys.stdout = sys.stdout,open(incpath+'CF2Dpic.inc.hpp', 'w')
#============= Non-PML ConeFold
g = Generator(dim=dim, types='DX')
#acts = ['DD','DX']
acts = ['D'*dim]
#============= BC ConeFold for rank>PMLrank
gBC = Generator(dim=dim, types='LDRX')
gBC.Rules2rank['d'*dim] = 'rank+PMLrank'
#gBC.Rules2rank['dx'] = 'rank+PMLrank'
actsBC = makeCombinations(['LDR','LDR','LDX'])
#============= PML ConeFold for rank<=PMLrank
gPML = Generator(dim=dim, types='ILDSRYX')
gPML.Rules2act['subacts'].update({'I':'-IIS', 'Y':'SYY-', 'S':'SSSS', 'L':'SLLD', 'R':'DRRS'})
gPML.Rules2act['pars'].update({'S':'sp', 'L':'sd', 'R':'ds', 'Y':'s_', 'I':'_s', 'J':'sp'})
for s in 'lr': gPML.Rules2dim[s] = 1
actsPML = makeCombinations(['ILDSRY','ILDSRY','ILDSX'])
print '//===========any rank==============DX'
g.genConeFold(actTmpl=r'%sact', acts4gen=acts)
print '//=========rank>PMLrank============LDR/X'
gBC.genConeFold(actTmpl=r'%sact', acts4gen=actsBC, exclude=acts)
print '//=========rank<=PMLrank============DX'
g.genConeFold(actTmpl=r'%sactPML', subactTmpl={'F':r'%sact'}, acts4gen=acts)
print '//=========rank<PMLrank============ILDSRY'
gPML.genConeFold(actTmpl=r'%sactPML', acts4gen=actsPML, exclude=acts)
print '//=========rank=PMLrank============LDR/X'
gPML.genConeFold(rank='PMLrank', actTmpl=r'%sact', subactTmpl={'S':r'%sactPML'}, acts4gen=actsBC, exclude=acts)
sys.stdout.close(); sys.stdout = stdout
|
gpl-2.0
| -776,956,531,473,899,800
| 54.430446
| 291
| 0.592831
| false
| 2.347338
| false
| false
| false
|
atom-bomb/drill_from_image
|
drill_from_image.py
|
1
|
11733
|
#!/usr/bin/python
#
# Hey, here's a thing:
#
# You can use this bit of python script to generate GCode to drill a PCB based on an image file that you used
# to etch the board.
#
# This script makes GCode to drill the center of sections of an image that are a given color or brightness.
#
# All you need to do is load the image file that you used to etch and color the things you want drilled.
# This should be easy since all of your drills are probably surrounded by traces and all of your traces are
# probably colored black. Just use your favorite graphic editor (such as gimp) to flood fill parts of the board
# that aren't traces or drills, leaving the drills as the only thing that are white.
#
# Run this script on your edited image and you'll get some GCode.
#
# Before you run the GCode, jog the spindle over where you want the topmost, leftmost hole to be drilled and
# zero your machine.
# The GCode will begin my moving over where the bottommost, rightmost hole would be drilled.
# Move your workpiece, return to zero rewind and restart the GCode until your machine lines up with both drills,
# then you can allow the machine to continue to drill your board.
#
from __future__ import print_function
import sys
import math
from PIL import Image
import subprocess
import re
import argparse
class BoundingBox:
def __init__(self):
self.coord = [[0, 0], [0, 0]]
self.empty = 1
def intersects(self, box):
return (((1 ^ self.empty) and (1 ^ box.empty)) and
((self.coord[0][0] < box.coord[1][0]) and
(self.coord[0][1] < box.coord[1][1]) and
(self.coord[1][0] > box.coord[0][0]) and
(self.coord[1][1] > box.coord[0][1])))
def center(self):
return [self.coord[0][0] + ((self.coord[1][0] - self.coord[0][0]) / 2),
self.coord[0][1] + ((self.coord[1][1] - self.coord[0][1]) / 2)]
def boundCoord(self, coord):
if (self.empty):
self.coord[0][0] = coord[0]
self.coord[0][1] = coord[1]
self.coord[1][0] = coord[0]
self.coord[1][1] = coord[1]
self.empty = 0
else:
if (coord[0] < self.coord[0][0]):
self.coord[0][0] = coord[0]
if (coord[1] < self.coord[0][1]):
self.coord[0][1] = coord[1]
if (coord[0] > self.coord[1][0]):
self.coord[1][0] = coord[0]
if (coord[1] > self.coord[1][1]):
self.coord[1][1] = coord[1]
class BoundingBoxList:
def __init__(self):
self.boxes = []
def addBox(self, box):
for oldBox in self.boxes:
if (oldBox.intersects(box)):
return
self.boxes.append(box)
# use ImageMagick to figure out how many pixels per inch or cm in the image file
def getDensity(filename, units = "PixelsPerInch"):
pipe = subprocess.Popen(["identify", "-format", "%x,%y", "-units", units, filename],
stdout=subprocess.PIPE)
res = re.sub('[\t\r\n"]', '', pipe.communicate()[0]).split(',')
xres = float(res[0].split(' ')[0])
yres = float(res[1].split(' ')[0])
return [xres, yres]
# make a list of drill points from an image map
class DrillMap:
def __init__(self, filename, units = 'Inches', density = [], rgbThresh = 127 * 3):
self.image = Image.open(filename)
self.pixmap = self.image.load()
if (len(density) == 0):
if (units == 'Inches'):
self.density = getDensity(filename)
else:
cmDensity = getDensity(filename, units = 'PixelsPerCentimeter')
self.density = [float(cmDensity[0]) / 10, float(cmDensity[1]) / 10]
else:
self.density = density ;
self.rgbThresh = rgbThresh ;
self.boxlist = BoundingBoxList()
self.drillList = []
self.findBoxes()
self.makeDrillList()
def coordOffset(self, coord):
return [float(coord[0]) / float(self.density[0]), float(coord[1]) / float(self.density[1])]
def isCoordOn(self, coord):
pixel = self.pixmap[coord[0], coord[1]]
if (self.image.mode == "RGB"):
sum = pixel[0] + pixel[1] + pixel[2]
return (sum > self.rgbThresh)
if (self.image.mode == "1"):
return pixel
def scanLeftToBox(self, coord, box):
y = coord[1]
x = coord[0]
while ((x >= 0) and self.isCoordOn([x, y])):
box.boundCoord([x, y])
x = x - 1
return (x != coord[0])
def scanRightToBox(self, coord, box):
y = coord[1]
x = coord[0]
while ((x <= self.image.size[1] - 1) and self.isCoordOn([x, y])):
box.boundCoord([x, y])
x = x + 1
return (x != coord[0])
def scanLineToBox(self, coord, box):
return (self.scanLeftToBox(coord, box) or self.scanRightToBox(coord, box))
def scanUpperLineToBox(self, coord, box):
if (coord[1] > 0):
upperCoord = [int(box.center()[0]), coord[1] - 1]
if (self.scanLineToBox(upperCoord, box)):
self.scanUpperLineToBox(upperCoord, box)
def scanLowerLineToBox(self, coord, box):
if (coord[1] < self.image.size[1] - 1):
lowerCoord = [box.center()[0], coord[1] + 1]
if (self.scanLineToBox(lowerCoord, box)):
self.scanLowerLineToBox(lowerCoord, box)
def scanToBox(self, coord):
box = BoundingBox()
if (self.scanRightToBox(coord, box)):
self.scanUpperLineToBox(coord, box)
self.scanLowerLineToBox(coord, box)
return box
def findBoxes(self):
y = 0
while (y < self.image.size[1] - 1):
x = 0
while (x < self.image.size[0] - 1):
if (self.isCoordOn([x, y])):
newBox = self.scanToBox([x, y])
if (not newBox.empty):
self.boxlist.addBox(newBox)
x = newBox.coord[1][0] + 1
else:
x += 1
else:
x += 1
y += 1
def makeDrillList(self):
for eachBox in self.boxlist.boxes:
self.drillList.append(self.coordOffset(eachBox.center()))
class GCode:
GCodeCommands = {'Mach3': {
'Message': '(',
'Stop': 'M0',
'Sleep': 'M01',
'SpindleCW': 'M03',
'SpindleCCW': 'M04',
'SpindleStop': 'M05',
'ToolChange': 'M06',
'Pause': 'M60',
'FastMove': 'G0',
'SlowMove': 'G1',
'Dwell': 'G4',
'InchesMode': 'G20',
'MillimetersMode': 'G21',
'MoveToOrigin': 'G28',
'ClearToolOffet': 'G49',
'Drill': 'G81',
'DrillWithDwell': 'G82',
'AbsoluteMode': 'G90',
'RelativeMode': 'G91',
'SetPosition': 'G92',
},
'EMC': {
'Message': '(MSG,',
'Stop': 'M0',
'Sleep': 'M01',
'SpindleCW': 'M03',
'SpindleCCW': 'M04',
'SpindleStop': 'M05',
'ToolChange': 'M06',
'Pause': 'M60',
'FastMove': 'G0',
'SlowMove': 'G1',
'Dwell': 'G4',
'InchesMode': 'G20',
'MillimetersMode': 'G21',
'MoveToOrigin': 'G28',
'ClearToolOffet': 'G49',
'Drill': 'G81',
'DrillWithDwell': 'G82',
'AbsoluteMode': 'G90',
'RelativeMode': 'G91',
'SetPosition': 'G92',
}}
def __init__(self, theGCodeType):
self.variant = theGCodeType
def Comment(self, string):
return " ; " + string
def Message(self, string):
return self.GCodeCommands[self.variant]['Message'] + string + " )"
def Pause(self):
return self.GCodeCommands[self.variant]['Pause']
def Spindle(self, Mode):
SpindleModes = {'Stop': 'SpindleStop', 'CW': 'SpindleCW', 'CCW': 'SpindleCCW'}
return self.GCodeCommands[self.variant][SpindleModes[Mode]]
def Units(self, theUnits):
if (theUnits == 'Inches'):
return self.GCodeCommands[self.variant]['InchesMode']
else:
return self.GCodeCommands[self.variant]['MillimetersMode']
def Absolute(self, isAbsolute = True):
if (isAbsolute):
return self.GCodeCommands[self.variant]['AbsoluteMode']
else:
return self.GCodeCommands[self.variant]['RelativeMode']
def _CommonArgs(self, X = None, Y = None, Z = None, rate = None):
OutStr = ''
if (X != None):
OutStr += ' X' + format(X, ".4f")
if (Y != None):
OutStr += ' Y' + format(Y, ".4f")
if (Z != None):
OutStr += ' Z' + format(Z, ".4f")
if (rate != None):
OutStr += ' F' + format(rate, ".4f")
return OutStr
def Move(self, X = None, Y = None, Z = None, rate = None, speed='Fast'):
OutStr = self.GCodeCommands[self.variant][speed + 'Move']
OutStr += self._CommonArgs(X = X, Y = Y, Z = Z, rate = rate)
return OutStr
def Dwell(self, seconds = 1):
OutStr = self.GCodeCommands[self.variant]['Dwell'] + ' P' + `seconds`
return OutStr
def Drill(self, X = None, Y = None, Z = None, retract = None, seconds = None, rate = None):
if (seconds != None):
OutStr = self.GCodeCommands[self.variant]['DrillWithDwell']
OutStr += ' P' + `seconds`
else:
OutStr = self.GCodeCommands[self.variant]['Drill']
OutStr += self._CommonArgs(X = X, Y = Y, Z = Z, rate = rate)
if (retract != None):
OutStr += ' R' + `retract`
return OutStr
# -------- execution starts here
# parse parameters
# TODO: add density parameter & drill color parameter & check for ImageMagick
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--verbose', action='store_true', help='spew possibly useless output')
parser.add_argument('-m', '--millimeters',
action='store_const', dest='units', const='Millimeters', help='set units to millimeters')
parser.add_argument('-i', '--inches',
action='store_const', dest='units', const='Inches', help='set units to inches')
parser.add_argument('-a', '--mach3',
action='store_const', dest='gcode', const='Mach3', help='set gcode type to mach3')
parser.add_argument('-e', '--emc',
action='store_const', dest='gcode', const='EMC', help='set gcode type to emc')
parser.add_argument('-s', '--safe',
nargs=1, default='0.25', type=float, help='safe height')
parser.add_argument('-d', '--drill',
nargs=1, default='-0.2', type=float, help='drill depth')
parser.add_argument('-p', '--dwell',
nargs=1, default='0.5', type=float, help='drill dwell')
parser.add_argument('-f', '--feed',
nargs=1, default='100', type=float, help='feed rate')
parser.add_argument('input')
args = parser.parse_args()
if (args.gcode == None):
args.gcode = 'Mach3'
if (args.units == None):
args.units = 'Inches'
theMap = DrillMap(args.input, args.units)
# make drill coordinates relative to first drill
if (theMap.drillList):
firstCoord = theMap.drillList[0]
relativeDrillList = []
for drill in theMap.drillList:
newCoord = [drill[0] - firstCoord[0], drill[1] - firstCoord[1]]
relativeDrillList.append(newCoord)
# output gcode for the list of drills
# init machine, set units, zero axes
gc = GCode(args.gcode)
print(gc.Spindle('Stop'))
print(gc.Units(args.units))
print(gc.Absolute())
print(gc.Pause(), gc.Comment('Check that tool is aligned with first drill'))
print(gc.Move(Z = args.safe))
# move to last drill position and pause
lastDrill = len(relativeDrillList) - 1
print(gc.Move(X = relativeDrillList[lastDrill][0], Y = relativeDrillList[lastDrill][1]))
print(gc.Pause())
print(gc.Pause(), gc.Comment('Check that tool is aligned with last drill'))
print(gc.Spindle('CW'))
print(gc.Dwell(3))
print(gc.Message('Drilling'))
# move to each drill position and drill
for eachDrill in relativeDrillList:
print(gc.Drill(X = eachDrill[0], Y = eachDrill[1], Z = args.drill, retract = args.safe, seconds = args.dwell))
# end of GCode program
print(gc.Spindle('Stop'))
print(gc.Pause())
|
unlicense
| 695,087,722,139,142,000
| 32.618911
| 112
| 0.594903
| false
| 3.143048
| false
| false
| false
|
735tesla/SkypeDump
|
skypedump.py
|
1
|
6469
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import unicodedata
import webbrowser
import hashlib
import sqlite3 as sqlite
import xml.etree.ElementTree as ET
import platform
import sys
class ChatMessage(object):
def __init__(self):
super(ChatMessage, self).__init__()
self.from_username = '[data not available]'
self.to_username = '[data not available]'
self.message_body = '[data not available]'
@property
def from_username(self):
return self._from_username
@from_username.setter
def from_username(self, value):
self._from_username = value
@property
def to_username(self):
return self._to_username
@to_username.setter
def to_username(self, value):
self._to_username = value
@property
def message_body(self):
return self._message_body
@message_body.setter
def message_body(self, value):
self._message_body = value
def to_html(self):
html = """
<tr>
<td>__from_username__</td>
<td>__to_username__</td>
<td>__msg_body__</td>
</tr>
"""
html = html.replace('__from_username__', self.from_username)
html = html.replace('__to_username__', self.to_username)
html = html.replace('__msg_body__', self.message_body)
return html
class SkypeUser(object):
def __init__(self):
super(SkypeUser, self).__init__()
self.actual_name = '[data not available]'
self.username = '[data not available]'
self.birthday = '[data not available]'
self.phone_home = '[data not available]'
self.phone_mobile = '[data not available]'
self.email = '[data not available]'
@property
def actual_name(self):
return self._actual_name
@actual_name.setter
def actual_name(self, value):
self._actual_name = value
@property
def username(self):
return self._username
@username.setter
def username(self, value):
self._username = value
@property
def birthday(self):
return self._birthday
@birthday.setter
def birthday(self, value):
self._birthday = str(value)
@property
def phone_home(self):
return self._phone_home
@phone_home.setter
def phone_home(self, value):
self._phone_home = value
@property
def phone_mobile(self):
return self._phone_mobile
@phone_mobile.setter
def phone_mobile(self, value):
self._phone_mobile = value
@property
def email(self):
return self._email
@email.setter
def email(self, value):
self._email = value
def to_html(self):
html = """
<tr>
<td>__username__</td>
<td>__fullname__</td>
<td>__birthday__</td>
<td>__homphone__</td>
<td>__mobphone__</td>
<td>__theemail__</td>
</tr>
"""
html = html.replace('__username__', self.username)
html = html.replace('__fullname__', self.actual_name)
html = html.replace('__birthday__', self.birthday)
html = html.replace('__homphone__', self.phone_home)
html = html.replace('__mobphone__', self.phone_mobile)
html = html.replace('__theemail__', self.email)
return html
def process_skype_database(db_file):
messages = []
user = None
database_connection = sqlite.connect(db_file)
database_cursor = database_connection.cursor()
database_cursor.execute('SELECT author,dialog_partner,body_xml FROM Messages')
for from_username,to_username,body_xml in database_cursor.fetchall():
chatmessage = ChatMessage()
if from_username:
chatmessage.from_username = from_username
if to_username:
chatmessage.to_username = to_username
if body_xml:
chatmessage.message_body = body_xml
messages.append(chatmessage)
database_cursor.execute('SELECT skypename,fullname,birthday,phone_home,phone_mobile,emails from Accounts')
xml_root = ET.parse('/'.join(db_file.split('/')[:-1])+'/config.xml').getroot()
auth_data = xml_root[0][0][0].text # TODO: find out how to decrypt this
user = SkypeUser()
user_data = database_cursor.fetchone()
if user_data[0]:
user.username = user_data[0]
if user_data[1]:
user.actual_name = user_data[1]
if user_data[2]:
user.birthday = user_data[2]
if user_data[3]:
user.phone_home = user_data[3]
if user_data[4]:
user.phone_mobile = user_data[4]
if user_data[5]:
user.email = user_data[5]
return (user, messages)
def verify_os_type():
if platform.system() != 'Darwin':
sys.stderr.write('[!] Incompatible operating system\n')
exit(-1)
def get_db_list():
db_files = []
home_dir = os.path.expanduser("~")
db_dir = home_dir+'/Library/Application Support/Skype'
for the_dir in os.listdir(db_dir):
if os.path.isdir(db_dir+'/'+the_dir) and the_dir not in ('DataRv', 'EmoticonCache.bundle', 'shared_dynco', 'shared_httpfe'):
db_files.append(db_dir+'/'+the_dir+'/main.db')
return db_files
def main(args):
html = """
<!DOCTYPE html>
<html>
<head>
<meta charset='utf-8'>
<title>SkypeDump Output Table</title>
<link rel="stylesheet" href="http://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css">
<link rel="stylesheet" href="http://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap-theme.min.css">
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="http://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/js/bootstrap.min.js"></script>
<style type="text/css">
.sd-table{
margin: 20px;
}
</style>
</head>
<body>
<div class="sd-table">
<table class="table">
<thead>
<tr>
<th>Skype Username:</th>
<th>Real Name:</th>
<th>Birthday:</th>
<th>Home Phone #:</th>
<th>Cell Phone #:</th>
<th>Email:</th>
</tr>
</thead>
<tbody>
__USER_DATA__
</tbody>
</table>
<table class="table">
<thead>
<tr>
<th>From:</th>
<th>To:</th>
<th>Message:</th>
</tr>
</thead>
<tbody>
__MESSAGE_DATA__
</tbody>
</table>
</div>
</body>
</html>
"""
user_html = ''
message_html = ''
for db_file in get_db_list():
print "[*] Processing database: %s\n" % (db_file)
user_info, messages_info = process_skype_database(db_file)
user_html += user_info.to_html()
for message in messages_info:
message_html += message.to_html()
html = html.replace('__USER_DATA__', user_html)
html = html.replace('__MESSAGE_DATA__', message_html)
html = unicodedata.normalize('NFKD', html).encode('ascii', 'ignore')
html = re.sub(r'[^\x00-\x7F]+', '', html)
with open('/tmp/skype_db.html', 'w') as f:
f.write(html)
webbrowser.open_new_tab('/tmp/skype_db.html')
if __name__ == '__main__':
main(sys.argv)
|
gpl-2.0
| 962,441,054,399,818,200
| 27.004329
| 126
| 0.64554
| false
| 2.893113
| false
| false
| false
|
jtauber/czerny
|
prototypes/process_hanon_21.py
|
1
|
2417
|
#!/usr/bin/env python
from align import nw_align
def load_score(filename):
score = []
for line in open(filename):
note, duration_64 = line.strip().split()
note = int(note)
duration_64 = int(duration_64)
score.append((note, duration_64))
return score
def load_performance(filename):
performance = []
# dictionary mapping pitch to offset and velocity of event when that pitch
# was started
note_started = {}
for line in open(filename):
offset, note, velocity = line.strip().split()
offset = int(float(offset) * 1000000)
note = int(note)
velocity = int(velocity)
if velocity > 0:
if note in note_started:
# new note at that pitch started before previous finished
# not sure it should happen but let's handle it anyway
(start_offset, start_velocity) = note_started.pop(note)
duration = offset - start_offset
performance.append(
(start_offset, note, start_velocity, duration))
note_started[note] = (offset, velocity)
else: # note end
if note not in note_started:
# note was never started so ignore
pass
else:
(start_offset, start_velocity) = note_started.pop(note)
duration = offset - start_offset
performance.append(
(start_offset, note, start_velocity, duration))
return performance
# similarity measure used by Needleman-Wunsch algorithm
def note_similarity(score_note, performance_note):
# at the moment we just give a 1 if the pitch matches, 0.5 if it's
# within a tone and 0 if more
# over time this can be tweaked to include velocity, duration, etc
if score_note[0] == performance_note[1]:
return 1
elif abs(score_note[0] - performance_note[1]) < 3:
return 0.5
else:
return 0
if __name__ == "__main__":
score = load_score("../examples/scores/hanon_21_rh.txt")
performance = load_performance("../examples/recordings/hanon_21_rh.txt")
# align score and performance using above similarity function and a penalty
# of -1 for insertions and deletions @@@ might need a lot of tweaking
for i in nw_align(score, performance, note_similarity, -1, -1):
print i
|
mit
| 829,367,193,271,788,400
| 29.2125
| 79
| 0.599917
| false
| 4.196181
| false
| false
| false
|
Farthen/OTFBot
|
otfbot/plugins/ircClient/seen.py
|
1
|
2384
|
# This file is part of OtfBot.
#
# OtfBot is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# OtfBot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OtfBot; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# (c) 2009 by Thomas Wiegart
#
import pickle,time,os
from otfbot.lib import chatMod
class Plugin(chatMod.chatMod):
def __init__(self, bot):
self.bot = bot
try:
os.mkdir(datadir)
except OSError:
pass
try:
f = file(datadir + "/users", "rb")
self.userdata = pickle.load(f)
f.close()
except IOError:
self.userdata = [{}]
self.bot.root.getServiceNamed('scheduler').callLater(60, self.save_data) #TODO: call this only on exit
def joined(self,channel):
try:
self.userdata[0][channel]
except KeyError:
self.userdata[0][channel] = {}
def msg(self, user, channel, msg):
if channel[0] == "#":
self.userdata[0][channel][user.split("!")[0].lower()] = {'msg':msg, 'time':time.time()}
def command(self, user, channel, command, options):
if command == "seen":
try:
zeit = self.userdata[0][channel][options.lower()]['time']
msg = self.userdata[0][channel][options.lower()]['msg']
self.bot.sendmsg(channel,"user " + options + " was last seen on " + str(time.strftime("%a, %d %b %Y %H:%M:%S",time.localtime(zeit))) + " saying '" + msg + "'.")
except:
self.bot.sendmsg(channel,"user " + options + " is unknown")
def stop(self):
self.save_data()
def save_data(self):
f = file(datadir + "/users", "wb")
pickle.dump(self.userdata, f)
f.close()
self.bot.root.getServiceNamed('scheduler').callLater(60, self.save_data)
|
gpl-2.0
| -1,188,863,002,668,787,700
| 36.25
| 176
| 0.599832
| false
| 3.701863
| false
| false
| false
|
QuantiModo/QuantiModo-SDK-Python
|
SwaggerPetstore/models/connector.py
|
1
|
3170
|
#!/usr/bin/env python
# coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class Connector(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Swagger model
:param dict swaggerTypes: The key is attribute name and the value is attribute type.
:param dict attributeMap: The key is attribute name and the value is json key in definition.
"""
self.swagger_types = {
'id': 'int',
'name': 'str',
'display_name': 'str',
'image': 'str',
'get_it_url': 'str',
'connected': 'str',
'connect_instructions': 'str',
'last_update': 'int',
'latest_data': 'int',
'no_data_yet': 'bool'
}
self.attribute_map = {
'id': 'id',
'name': 'name',
'display_name': 'displayName',
'image': 'image',
'get_it_url': 'getItUrl',
'connected': 'connected',
'connect_instructions': 'connectInstructions',
'last_update': 'lastUpdate',
'latest_data': 'latestData',
'no_data_yet': 'noDataYet'
}
# Connector ID number
self.id = None # int
# Connector lowercase system name
self.name = None # str
# Connector pretty display name
self.display_name = None # str
# URL to the image of the connector logo
self.image = None # str
# URL to a site where one can get this device or application
self.get_it_url = None # str
# True if the authenticated user has this connector enabled
self.connected = None # str
# URL and parameters used when connecting to a service
self.connect_instructions = None # str
# Epoch timestamp of last sync
self.last_update = None # int
# Number of measurements obtained during latest update
self.latest_data = None # int
# True if user has no measurements for this connector
self.no_data_yet = None # bool
def __repr__(self):
properties = []
for p in self.__dict__:
if p != 'swaggerTypes' and p != 'attributeMap':
properties.append('{prop}={val!r}'.format(prop=p, val=self.__dict__[p]))
return '<{name} {props}>'.format(name=__name__, props=' '.join(properties))
|
gpl-2.0
| -8,626,761,504,861,406,000
| 31.346939
| 100
| 0.561199
| false
| 4.408901
| false
| false
| false
|
mvsaha/blahb
|
blahb/label.py
|
1
|
15233
|
import numba
from .utils import exponential_search
from .strgen import *
def _split_init_into_coords_init_str(dim):
return "coords_{} = loc[:, {}]".format(dim, dim)
def split_init_into_coords_init_str(ndim):
return '\n'.join([_split_init_into_coords_init_str(dim)
for dim in range(ndim)])
update_cursor_dim_0_base_string = """
if shift_0:
left_edge_0 = c0 - r0
if shift_0 >= {P_shape_0}:
# Re-initialize first cursor when all spans are invalidated
cursors_0[0] = exponential_search(
coords_0, left_edge_0, start=ends_0[-1])
ends_0[0] = exponential_search(
coords_0, left_edge_0 + 1, start=cursors_0[0])
else:
# Shift the spans that are still valid, but cursors must be reset
for sh in range({P_shape_0} - shift_0): #
cursors_0[sh] = ends_0[sh + shift_0 - 1]
ends_0[sh] = ends_0[sh + shift_0]
# Initialize cursors/ends for positions that are not shifted
shift_0 = min(shift_0, {P_shape_0})
for sh in range({P_shape_0} - shift_0, {P_shape_0}):
cursors_0[sh] = exponential_search(
coords_0, left_edge_0 + sh, ends_0[sh - 1])
ends_0[sh] = exponential_search(
coords_0, left_edge_0 + sh + 1, start=cursors_0[sh])
if shift_0:
shift_1 = np.int64({P_shape_1})
shift_0 = np.int64(coords_0[i_coord + 1] - c0)
c0 = coords_0[i_coord + 1]
"""
def update_cursor_section_dim_0(neigh_shape):
"""Propagate shift should be true if there is more than one dimension"""
if not len(neigh_shape) > 1:
raise ValueError(
"Use specialized 1d labeling function for 1d pixelsets.")
return update_cursor_dim_0_base_string.format(
P_shape_0=neigh_shape[0],
P_shape_1=neigh_shape[1],
)
init_loop_base_string = """
start = cursors_{dim_minus_1}[{lower_dim_index}]
stop = ends_{dim_minus_1}[{lower_dim_index}]
cursors_{dim}[{lower_dim_index}, 0] = exponential_search(
coords_{dim}, left_edge_{dim}, start=start, stop=stop)
ends_{dim}[{lower_dim_index}, 0] = exponential_search(
coords_{dim}, left_edge_{dim} + 1, start=cursors_{dim}[{lower_dim_index}, 0], stop=stop)
"""
def param_init_loop(shp, dim):
assert dim <= len(shp)
lower_dim_index = ', '.join(
[i_(low_dim) for low_dim in range(dim)]) # 'i0, i1, ...'
body = init_loop_base_string.format(
dim=dim,
dim_minus_1=dim - 1,
lower_dim_index=lower_dim_index,
)
return loop_over_shape(shp[:dim], body)
shift_loop_base_string = """
for sh in range({dim_shape} - shift_{dim}): #
cursors_{dim}[{lower_dim_index}, sh] = ends_{dim}[{lower_dim_index}, sh + shift_{dim} - 1]
ends_{dim}[{lower_dim_index}, sh] = ends_{dim}[{lower_dim_index}, sh + shift_{dim}]
"""
def param_shift_loop(shp, dim):
assert len(shp) > dim
lower_dim_index = ', '.join(
[i_(low_dim) for low_dim in range(dim)]) # 'i0, i1, ...'
body = shift_loop_base_string.format(
dim=dim,
dim_shape=shp[dim],
lower_dim_index=lower_dim_index,
)
return loop_over_shape(shp[:dim], body)
set_higher_shift_string = """shift_{dim_plus_1} = {dim_plus_1_shape}"""
def param_set_higher_shift(shp, dim):
if len(shp) - dim < 2:
return ''
else:
return set_higher_shift_string.format(
dim_plus_1=dim + 1, dim_plus_1_shape=shp[dim + 1])
set_new_cursor_loop_base_exponential_search_string = """
start = cursors_{dim_minus_1}[{lower_dim_index}]
stop = ends_{dim_minus_1}[{lower_dim_index}]
for sh in range({dim_shape} - shift_{dim}, {dim_shape}):
start = max(start, ends_{dim}[{lower_dim_index}, sh - 1])
cursors_{dim}[{lower_dim_index}, sh] = exponential_search(
coords_{dim}, left_edge_{dim} + sh, start=start, stop=stop)
ends_{dim}[{lower_dim_index}, sh] = exponential_search(
coords_{dim}, left_edge_{dim} + sh + 1,
start=cursors_{dim}[{lower_dim_index}, sh], stop=stop)
"""
set_new_cursor_loop_base_linear_search_string = """
start = cursors_{dim_minus_1}[{lower_dim_index}]
stop = ends_{dim_minus_1}[{lower_dim_index}]
for sh in range({dim_shape} - shift_{dim}, {dim_shape}):
start = max(start, ends_{dim}[{lower_dim_index}, sh - 1])
for i in range(start, stop + 1):
if coords_{dim}[i] >= left_edge_{dim} + sh or i == stop:
cursors_{dim}[{lower_dim_index}, sh] = i
break
start = cursors_{dim}[{lower_dim_index}, sh]
for i in range(start, stop + 1):
if coords_{dim}[i] > left_edge_{dim} + sh or i == stop:
ends_{dim}[{lower_dim_index}, sh] = i
break
"""
def param_set_new_cursor_loop(shp, dim):
assert len(shp) > dim
lower_dim_index = ', '.join(
[i_(low_dim) for low_dim in range(dim)]) # 'i0, i1, ...'
if dim < 2:
base_str = set_new_cursor_loop_base_exponential_search_string
else:
base_str = set_new_cursor_loop_base_linear_search_string
body = base_str.format(
dim=dim,
dim_shape=shp[dim],
dim_minus_1=dim - 1,
lower_dim_index=lower_dim_index
)
return loop_over_shape(shp[:dim], body)
minimize_shift_string = """shift_{dim} = min(shift_{dim}, {dim_shape})"""
def minimize_shift(dim, dim_shape):
return minimize_shift_string.format(dim=dim, dim_shape=dim_shape)
cursor_loops_string = """
if shift_{dim}:
left_edge_{dim} = c{dim} - r{dim}
right_edge_{dim} = c{dim} + r{dim}
if shift_{dim} >= {dim_shape}:
{init_loop}
else:
{shift_loop}
{minimize_shift}
{set_new_cursor_loop}
{set_higher_shift}
shift_{dim} = np.int64(coords_{dim}[i_coord + 1] - c{dim})
c{dim} = coords_{dim}[i_coord + 1]
"""
def param_cursor_loops(shp, dim):
return cursor_loops_string.format(
dim=dim,
dim_shape=shp[dim],
init_loop=indent_block(param_init_loop(shp, dim), 2, first_line=0),
shift_loop=indent_block(param_shift_loop(shp, dim), 2, first_line=0),
minimize_shift=minimize_shift(dim, shp[dim]),
set_new_cursor_loop=indent_block(param_set_new_cursor_loop(shp, dim),
1, first_line=0),
set_higher_shift=param_set_higher_shift(shp, dim)
)
last_dim_loop_string = """
c{dim} = coords_{dim}[i_coord]
left_edge_{dim} = c{dim} - r{dim}
right_edge_{dim} = c{dim} + r{dim}
{do_something_with_central_pixel}
{low_dim_loop}"""
last_dim_loop_body_string_hyperrect = """
cursor = cursors_{dim_minus_1}[{lower_dim_index}]
while cursor < ends_{dim_minus_1}[{lower_dim_index}] and coords_{dim}[cursor] < left_edge_{dim}:
cursor += 1
cursors_{dim_minus_1}[{lower_dim_index}] = cursor # Save the position we reached along the shard
while cursor < ends_{dim_minus_1}[{lower_dim_index}] and coords_{dim}[cursor] <= right_edge_{dim}:
{do_something_with_neighbors}
cursor += 1"""
last_dim_loop_body_string_struct_el = """
cursor = cursors_{dim_minus_1}[{lower_dim_index}]
while cursor < ends_{dim_minus_1}[{lower_dim_index}] and coords_{dim}[cursor] < left_edge_{dim}:
cursor += 1
cursors_{dim_minus_1}[{lower_dim_index}] = cursor # Save the position we reached along the shard
_end = ends_{dim_minus_1}[{lower_dim_index}]
for i_final in range({last_dim_shape}):
while cursor < _end and coords_{dim}[cursor] < left_edge_{dim} + i_final:
cursor += 1
if cursor == _end:
break
elif coords_{dim}[cursor] == left_edge_{dim} + i_final and struct_el[{lower_dim_index}, i_final]:
{do_something_with_neighbors}"""
def param_last_dim_loop(shp, struct_el):
"""
shp : Shape of the hyperrect around the central pixel to search for neighbors
struct_el: True/False on whether a structuring element of shape shp will be used."""
assert len(shp)
last_dim = len(shp) - 1
lower_dim_index = ', '.join(
[i_(low_dim) for low_dim in range(last_dim)]) # 'i0, i1, ...'
if struct_el:
loop_body = last_dim_loop_body_string_struct_el.format(
dim=last_dim,
dim_minus_1=last_dim - 1,
last_dim_shape=shp[-1],
lower_dim_index=lower_dim_index,
do_something_with_neighbors="{do_something_with_neighbors}"
)
else:
loop_body = last_dim_loop_body_string_hyperrect.format(
dim=last_dim,
dim_minus_1=last_dim - 1,
lower_dim_index=lower_dim_index,
do_something_with_neighbors="{do_something_with_neighbors}"
)
loop = loop_over_shape(shp[:-1], loop_body)
return last_dim_loop_string.format(
dim=last_dim,
low_dim_loop=loop,
do_something_with_central_pixel="{do_something_with_central_pixel}",
)
# Find the ancestors of neighbor index
find_central_ancestor_string = """
central_ancestor = labels[i_coord]
while labels[central_ancestor] != central_ancestor:
prev_central_ancestor = central_ancestor
central_ancestor = labels[central_ancestor]
labels[prev_central_ancestor] = central_ancestor"""
find_neighbor_ancestor_string = """
#central_ancestor = labels[i_coord]
neighbor_ancestor = labels[cursor]
if neighbor_ancestor == central_ancestor:
break
#while labels[central_ancestor] != central_ancestor:
# prev_central_ancestor = central_ancestor
# central_ancestor = labels[central_ancestor]
# labels[prev_central_ancestor] = central_ancestor
while labels[neighbor_ancestor] != neighbor_ancestor:
prev_neighbor_ancestor = neighbor_ancestor
neighbor_ancestor = labels[neighbor_ancestor]
labels[prev_neighbor_ancestor] = neighbor_ancestor
if neighbor_ancestor == central_ancestor:
labels[cursor] = central_ancestor
labels[i_coord] = central_ancestor
if neighbor_ancestor < central_ancestor:
labels[cursor] = neighbor_ancestor
labels[i_coord] = neighbor_ancestor
labels[central_ancestor] = neighbor_ancestor
central_ancestor = neighbor_ancestor
else: # neighbor_ancestor > central_ancestor:
labels[cursor] = central_ancestor
labels[i_coord] = central_ancestor
labels[neighbor_ancestor] = central_ancestor"""
finalize_labels_str = """
for i in range(labels.size-1, -1, -1):
i = numba.int_(i)
anc = i
while anc != labels[anc]:
anc = numba.int_(labels[anc])
while labels[i] != anc:
i_prev = i
labels[i_prev] = anc
i = numba.int_(labels[i])
"""
label_func_string = """
def label(loc, labels, {struct_el}):
{split_loc_to_coords}
# Number of coordinates
n = coords_0.size
{shift_init_strings}
{cursors_init_strings}
{ends_init_strings}
{coord_init_strings}
{range_init_strings}
for i_coord in range(n):
{coord_loop_body}
{finish_up}
return labels"""
def find_neighbors_func(neigh_shape, use_struct_el):
""" Build a nopython function to label locations.
Arguments
---------
neigh_shape : ndim-tuple of ints
Should all be odd numbers so that the central pixel remains well
defined
use_struct_el : bool
Flag indicating that the structuring element is not a perfect
hyperect neighborhood (i.e. np.all(struct_el) == False)
Returns
-------
Numba nopython function that labels IndexSet locations that are neighbors.
"""
ndim = len(neigh_shape)
fn = label_func_string.format(
struct_el='struct_el' if use_struct_el else '',
split_loc_to_coords = indent_block(
split_init_into_coords_init_str(ndim), 1, first_line=0),
coord_dim_names=coord_dim_names(ndim),
coord_init_strings=indent_block(coord_init_strings(ndim),
first_line=0),
shift_init_strings=indent_block(shift_init_strings(neigh_shape), 1,
first_line=0),
cursors_init_strings=indent_block(
cursors_init_strings(neigh_shape, np.int64), first_line=0),
ends_init_strings=indent_block(
ends_init_strings(neigh_shape, np.int64), first_line=0),
range_init_strings=indent_block(range_init_strings(neigh_shape),
first_line=0),
coord_loop_body=''.join(
[indent_block(update_cursor_section_dim_0(neigh_shape), 2)] +
[indent_block(param_cursor_loops(neigh_shape, i), 2) for i in
range(1, ndim - 1)] +
[indent_block(param_last_dim_loop(neigh_shape, use_struct_el), 2)]
),
finish_up=indent_block(finalize_labels_str, 1, first_line=0),
)
indent_amount = ndim + 3 if use_struct_el else ndim + 2
fn = fn.format(
do_something_with_central_pixel=indent_block(
find_central_ancestor_string, 2, first_line=0),
do_something_with_neighbors=indent_block(find_neighbor_ancestor_string,
indent_amount, first_line=0),
)
return fn
__saved_neighbor_funcs = dict()
def build_label_func(shape, use_struct_el):
if (shape, use_struct_el) in __saved_neighbor_funcs:
return __saved_neighbor_funcs[(shape, use_struct_el)]
fn_string = find_neighbors_func(shape, use_struct_el)
_loc = dict()
exec(fn_string, globals(), _loc)
fn = numba.jit(_loc['label'], nopython=True, nogil=True)
__saved_neighbor_funcs[(shape, use_struct_el)] = fn
return fn
@numba.njit
def merge_chunked_labels(master_labels, chunk_labels, overlap_start,
overlap_stop):
n_overlapping = overlap_stop - overlap_start
for i_chunk, i_master in enumerate(range(overlap_start, overlap_stop)):
# print(i_chunk, i_master)
anc_master = master_labels[i_master]
while master_labels[anc_master] != anc_master:
anc_master_prev = anc_master
anc_master = master_labels[anc_master]
master_labels[anc_master_prev] = anc_master
anc_chunk = chunk_labels[i_chunk] + overlap_start
while master_labels[anc_chunk] != anc_chunk:
anc_chunk_prev = anc_chunk
anc_chunk = master_labels[anc_chunk]
master_labels[anc_chunk_prev] = anc_chunk
if anc_chunk < anc_master:
master_labels[anc_master] = anc_chunk
elif anc_master < anc_chunk:
master_labels[anc_chunk] = anc_master
fin = overlap_stop + chunk_labels.size - n_overlapping
master_labels[overlap_stop:fin] = (
chunk_labels[n_overlapping:] + overlap_start)
@numba.njit([numba.void(numba.uint8[:]), numba.void(numba.uint16[:]),
numba.void(numba.uint32[:]), numba.void(numba.uint64[:])],
nogil=True)
def finalize_labels(labels):
"""Ensure that labels are root or point to a root."""
for i in range(labels.size - 1, -1, -1):
i = numba.int_(i)
anc = i
while anc != labels[anc]:
anc = numba.int_(labels[anc])
while labels[i] != anc:
i_prev = i
labels[i_prev] = anc
i = numba.int_(labels[i])
|
mit
| 230,391,288,093,049,820
| 31.551282
| 101
| 0.601589
| false
| 3.208298
| false
| false
| false
|
istio/tools
|
perf/docker/rabbitmq/client.py
|
1
|
2647
|
# Copyright Istio Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import logging
import prom_client
import pika
import sys
password = os.environ["PASSWORD"]
username = os.environ["USERNAME"]
address = os.environ["ADDRESS"]
queue = 'queue'
def with_metrics(f, valid=None):
return prom_client.attempt_request(
f,
source='rabbitmq-client',
destination='rabbitmq',
valid=valid
)
def with_metrics_or_fail(f, valid=None):
r, success = with_metrics(f, valid)
if not success:
raise Exception("Function failed")
return r, success
def setup_client():
credentials = pika.PlainCredentials(username, password)
connection = pika.BlockingConnection(
pika.ConnectionParameters(address, credentials=credentials))
channel = connection.channel()
channel.queue_declare(queue=queue)
return channel
def send(channel, message):
with_metrics_or_fail(
lambda: channel.basic_publish(
exchange='',
routing_key=queue,
body=message
),
valid=None
)
def attempt_decode(s):
if s is None:
return ""
return s.decode('utf-8')
def receive(channel, expected):
with_metrics_or_fail(
lambda: attempt_decode(
next(channel.consume(queue, inactivity_timeout=1))[2]),
valid=lambda resp: resp == expected
)
def run_test():
pub, succeeded = with_metrics(setup_client)
if not succeeded:
logging.error("Failed to setup client")
sys.exit(1)
sub, succeeded = with_metrics(setup_client)
if not succeeded:
logging.error("Failed to setup client")
sys.exit(1)
while True:
message = "a message"
send(pub, message)
receive(sub, message)
time.sleep(.5)
if __name__ == "__main__":
prom_client.report_metrics()
prom_client.report_running('rabbitmq')
time.sleep(10) # Wait for server
while True:
try:
run_test()
except Exception:
logging.warning("Rerunning test due to exception")
time.sleep(.5)
|
apache-2.0
| 3,253,971,791,620,770,300
| 23.738318
| 74
| 0.649037
| false
| 3.944858
| false
| false
| false
|
HybridF5/jacket
|
jacket/api/compute/openstack/compute/legacy_v2/contrib/floating_ip_pools.py
|
1
|
2154
|
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from jacket.api.compute.openstack import extensions
from jacket.compute import network
authorize = extensions.extension_authorizer('compute', 'floating_ip_pools')
def _translate_floating_ip_view(pool_name):
return {
'name': pool_name,
}
def _translate_floating_ip_pools_view(pools):
return {
'floating_ip_pools': [_translate_floating_ip_view(pool_name)
for pool_name in pools]
}
class FloatingIPPoolsController(object):
"""The Floating IP Pool API controller for the OpenStack API."""
def __init__(self):
self.network_api = network.API()
super(FloatingIPPoolsController, self).__init__()
def index(self, req):
"""Return a list of pools."""
context = req.environ['compute.context']
authorize(context)
pools = self.network_api.get_floating_ip_pools(context)
return _translate_floating_ip_pools_view(pools)
class Floating_ip_pools(extensions.ExtensionDescriptor):
"""Floating IPs support."""
name = "FloatingIpPools"
alias = "os-floating-ip-pools"
namespace = ("http://docs.openstack.org/compute/ext/"
"floating_ip_pools/api/v1.1")
updated = "2012-01-04T00:00:00Z"
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-floating-ip-pools',
FloatingIPPoolsController(),
member_actions={})
resources.append(res)
return resources
|
apache-2.0
| 1,853,800,950,264,657,400
| 31.149254
| 78
| 0.65506
| false
| 4.018657
| false
| false
| false
|
pekkosk/hotbit
|
hotbit/containers/chiralwedge.py
|
1
|
5906
|
from __future__ import division
import numpy as np
from box.mix import phival
from math import sin,cos
from weakref import proxy
import warnings
class ChiralWedge:
def __init__(self,atoms,type):
'''
Class for chiral+wedge boundary conditions.
@param: atoms hotbit.Atoms instance
@param: type Should equal to "ChiralWedge"
More documentation for the methods can be found from hotbit.Atoms -class.
'''
self.type='ChiralWedge'
assert type==self.type
self.atoms = proxy(atoms)
self.par = {'height':(1,0),'twist':(0,1),'angle':(2,0),'physical':(1,1)}
self.atoms.set_pbc((True,False,True))
#self._set_table()
def get_type(self):
return self.type
def __repr__(self):
twist, angle, height, physical = self.get('twist'), self.get('angle'), self.get('height'), self.get('physical')
x='ChiralWedge: angle=%.4f (2*pi/%.2f, ' %(angle,2*np.pi/angle)
if physical:
x+='physical), '
else:
x+='not physical), '
x+='height=%.4f Ang ' %height
x+='twist angle %.4f' %twist
return x
def get_table(self):
M = int( round(2*np.pi/self.get('angle')) )
return [{'M':M},{'M':1},{'M':np.Inf}]
def get(self,key):
"""
Get container parameters
key: 'angle','height','twist','physical'
"""
x = self.atoms.get_cell()[self.par[key]]
if key in ['angle','height','twist']:
return x
else:
return bool(np.round(x))
def _set(self,**kwargs):
assert len(kwargs)==1
for key in kwargs:
cell = self.atoms.get_cell()
cell[self.par[key]] = kwargs[key]
self.atoms.set_cell(cell)
def set(self, angle=None, height=None, M=None, physical=True, twist=None, scale_atoms=False, container=None):
"""
parameters:
===========
angle angle (in radians) of the wedge (and M=None)
height Height of the primitive cell in z-direction
M set angle to 2*pi/M (and angle=None)
physical (only if M=None) if angle is small, it does not be
exactly 2*pi/integer, i.e. situation has no physical meaning
(use for calculating stuff continuously)
twist The twist angle for z-translation
scale_atoms Scale atoms according to changes in parameters
"""
if container!=None:
assert angle==None and height==None and M==None and twist==None
self.set(angle=container.get('angle'),height=container.get('height'),\
physical=container.get('physical'), twist=container.get('twist'))
if angle!=None or M!=None:
#assert not scale_atoms
assert not (angle!=None and M!=None)
old_angle = self.get('angle')
if M != None:
assert isinstance(M,int)
self._set(angle=2*np.pi/M)
elif angle != None:
M = np.abs(int( round(2*np.pi/angle) ))
self._set(angle=angle)
# check parameters
self._set( physical=float(physical) )
if np.abs(self.get('angle'))<1E-6:
raise Warning('Too small angle (%f) may bring numerical problems.' %self.get('angle'))
if self.get('angle')>np.pi:
raise AssertionError('angle>pi')
if np.abs(M-2*np.pi/np.abs(self.get('angle')))>1E-12 and self.get('physical'):
raise AssertionError('angle not physical: angle != 2*pi/M')
if not self.get('physical') and M<20:
warnings.warn('Quite large, non-physical angle 2*pi/%.4f.' %(2*np.pi/self.get('angle')) )
if scale_atoms:
if abs(old_angle)<1E-10:
raise ValueError('Atoms cannot be scaled; old wedge angle too small.')
newr = []
for r in self.atoms.get_positions():
x,y = r[0],r[1]
rad = np.sqrt( x**2+y**2 )
newphi = phival(x,y)*(self.get('angle')/old_angle)
newr.append( [rad*np.cos(newphi),rad*np.sin(newphi),r[2]] )
self.atoms.set_positions(newr)
if height!=None:
if scale_atoms:
r = self.atoms.get_positions()
r[:,2] = r[:,2] * height/self.get('height')
self.atoms.set_positions(r)
self._set(height=height)
if twist!=None:
if scale_atoms:
raise NotImplementedError('Atom rescale with twist not implemented.')
self._set(twist=twist)
#self._set_table()
def __eq__(self,other):
return self.atoms == other.atoms
def get_symmetry_operation_ranges(self):
""" Return ranges for symmetry operations. """
M = int( round(2*np.pi/np.abs(self.get('angle'))) )
i = M//2
zi = 0
if np.mod(M,2)==1:
ranges = np.array([[-i,i],[0,0],[-np.Inf,np.Inf]])
else:
ranges = np.array([[-i+1,i],[0,0],[-np.Inf,np.Inf]])
return ranges
def transform(self,r,n):
""" Rotate around z r by (n2*angle+n0*twist) and translate by n0*height. """
R = self.rotation(n)
trans = np.zeros((3))
trans = n[2]*np.array([0,0,self.get('height')])
return np.dot(R,r) + np.array(trans)
def rotation(self,n,angles=False):
""" Active rotation matrix of given angle wrt. z-axis."""
angle = n[0]*self.get('angle') + n[2]*self.get('twist')
R = np.array([[cos(angle),-sin(angle),0],[sin(angle),cos(angle),0],[0,0,1]])
if angles:
raise NotImplementedError('angles not implemented for ChiralWedge')
else:
return R
|
gpl-2.0
| -158,940,071,351,519,900
| 35.45679
| 119
| 0.532509
| false
| 3.612232
| false
| false
| false
|
asidev/aybu-core
|
aybu/core/models/user.py
|
1
|
7972
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright 2010-2012 Asidev s.r.l.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import ast
from aybu.core.models.base import Base
import collections
import crypt
import re
import requests
import urllib
import json
from logging import getLogger
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Unicode
from sqlalchemy import Table
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import (relationship,
object_session,
joinedload)
from sqlalchemy.orm.exc import NoResultFound
__all__ = []
log = getLogger(__name__)
RemoteGroup = collections.namedtuple('Group', ['name'])
class RemoteUser(object):
""" This class is used in place of the User class when
remote API login management is used in place of local
database """
def __init__(self, url, username, crypted_password, cleartext_password,
remote, groups, verify_ssl):
self.url = url
self.username = username
self.crypted_password = crypted_password
self.cleartext_password = cleartext_password
self._groups = groups
self.remote = remote
self.verify_ssl = verify_ssl
@property
def groups(self):
return [RemoteGroup(name=g) for g in self._groups]
@property
def password(self):
return self.crypted_password
@password.setter
def password(self, password):
url = "{}/{}".format(self.remote, self.username)
try:
response = requests.put(
url,
auth=(self.username, self.cleartext_password),
data=dict(password=password),
verify=self.verify_ssl
)
response.raise_for_status()
content = json.loads(response.content)
except requests.exceptions.RequestException as e:
log.critical("Error connection to API: {} - {}"\
.format(type(e).__name__, e))
raise ValueError('Cannot connect to API')
except Exception:
log.exception('Invalid login: %s', response.status_code)
raise ValueError('Invalid login, upstream returned {}'\
.format(response.status_code))
else:
log.info("Updated password for %s", self.username)
self.crypted_password = content['crypted_password']
self.cleartext_password = password
@classmethod
def check(cls, request, username, password):
remote = request.registry.settings.get('remote_login_url')
log.info("Using API server at %s", remote)
try:
verify_ssl = ast.literal_eval(
request.registry.settings.get('remote_login_verify_ssl'))
except:
log.exception('Error in ast.literal_eval')
verify_ssl = False
url = "{}/{}".format(remote, username)
params = dict(
domain=request.host,
action="login"
)
try:
query = "?{}".format(urllib.urlencode(params))
query = "{}{}".format(url, query)
log.debug("GET %s", query)
response = requests.get(query, auth=(username, password),
verify=verify_ssl)
response.raise_for_status()
log.debug("Response: %s", response)
content = json.loads(response.content)
except requests.exceptions.RequestException as e:
log.critical("Error connection to API: {} - {}"\
.format(type(e).__name__, e))
raise ValueError('Cannot connect to API')
except ValueError:
log.exception("Cannot decode JSON")
raise
except Exception:
log.error('Invalid login: %s', response.status_code)
raise ValueError('Invalid login, upstream return %s',
response.status_code)
else:
return RemoteUser(url=url, username=username,
crypted_password=content['crypted_password'],
cleartext_password=password,
groups=content['groups'],
remote=remote, verify_ssl=verify_ssl)
def has_permission(self, perm):
return bool(set((perm, 'admin')) & set(self._groups))
def check_password(self, password):
if not self.cleartext_password == password:
raise ValueError('Invalid username or password')
def __repr__(self):
return "<RemoteUser {}>".format(self.username)
users_groups = Table('users_groups',
Base.metadata,
Column('users_username',
Unicode(255),
ForeignKey('users.username',
onupdate="cascade",
ondelete="cascade")),
Column('groups_name',
Unicode(32),
ForeignKey('groups.name',
onupdate="cascade",
ondelete="cascade")),
mysql_engine='InnoDB')
class User(Base):
__tablename__ = 'users'
__table_args__ = ({'mysql_engine': 'InnoDB'})
hash_re = re.compile(r'(\$[1,5-6]\$|\$2a\$)')
salt = "$6$"
username = Column(Unicode(255), primary_key=True)
crypted_password = Column("password", Unicode(128), nullable=False)
groups = relationship('Group', secondary=users_groups, backref='users')
@classmethod
def get(cls, session, pkey):
# FIXME this should raise NoResultFound if query returns None!
user = session.query(cls).options(joinedload('groups')).get(pkey)
if user is None:
raise NoResultFound("No obj with key {} in class {}"\
.format(pkey, cls.__name__))
return user
@classmethod
def check(cls, session, username, password):
try:
user = cls.get(session, username)
salt = cls.hash_re.match(user.password)
length = len(salt.group()) if salt else 2
enc_password = crypt.crypt(password, user.password[0:length])
assert user.password == enc_password
except (AssertionError, NoResultFound):
log.warn('Invalid login for %s', username)
raise ValueError('invalid username or password')
else:
return user
@hybrid_property
def password(self):
return self.crypted_password
@password.setter
def password(self, value):
self.crypted_password = crypt.crypt(value, self.salt)
def check_password(self, password):
return self.__class__.check(object_session(self), self.username,
password)
def has_permission(self, perm):
return bool(set((perm, 'admin')) & set(g.name for g in self.groups))
def __repr__(self):
return "<User {}>".format(self.username)
class Group(Base):
__tablename__ = 'groups'
__table_args__ = ({'mysql_engine': 'InnoDB'})
name = Column(Unicode(32), primary_key=True)
def __repr__(self):
return "<Group {}>".format(self.name)
|
apache-2.0
| 5,875,499,489,000,414,000
| 33.214592
| 81
| 0.568239
| false
| 4.608092
| false
| false
| false
|
uw-it-aca/sqlshare-rest
|
sqlshare_rest/views/download.py
|
1
|
1410
|
from oauth2_provider.decorators import protected_resource
from django.views.decorators.csrf import csrf_exempt
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from sqlshare_rest.views import get_oauth_user, get403, get404, get400, get405
from sqlshare_rest.util.db import get_backend
from sqlshare_rest.views.sql import response_for_query
from sqlshare_rest.models import DownloadToken
from sqlshare_rest.dao.user import get_user
import json
@csrf_exempt
def run(request, token):
if request.META['REQUEST_METHOD'] != "GET":
return get405()
get_oauth_user(request)
try:
dt = DownloadToken().validate_token(token)
except DownloadToken.DoesNotExist:
return get404()
sql = dt.sql
backend = get_backend()
user = dt.original_user
return response_for_query(sql, user, download_name="query_results.csv")
@csrf_exempt
@protected_resource()
def init(request):
if request.META['REQUEST_METHOD'] != "POST":
return get405()
get_oauth_user(request)
values = json.loads(request.body.decode("utf-8"))
sql = values["sql"]
user = get_user(request)
dt = DownloadToken()
dt.store_token_for_sql(sql, user)
url = reverse("sqlshare_view_run_download", kwargs={"token": dt.token})
response = HttpResponse(json.dumps({'token': dt.token}))
response["Location"] = url
return response
|
apache-2.0
| 3,505,049,341,933,977,000
| 27.77551
| 78
| 0.70922
| false
| 3.615385
| false
| false
| false
|
yakky/djangocms-text-ckeditor
|
tests/test_field.py
|
1
|
2818
|
# -*- coding: utf-8 -*-
from django.template import Context, Template
from django.utils.safestring import SafeData
from djangocms_helper.base_test import BaseTestCase
from tests.test_app.forms import SimpleTextForm
from tests.test_app.models import SimpleText
from djangocms_text_ckeditor.fields import HTMLFormField
class HtmlFieldTestCase(BaseTestCase):
def test_html_form_field(self):
html_field = HTMLFormField()
self.assertTrue(isinstance(html_field.clean('some text'), SafeData))
class FieldTestCase(BaseTestCase):
text_normal = '<p>some non malicious text</p>'
text_with_iframe = ('<p>some non malicious text</p>'
'<iframe src="http://www.w3schools.com"></iframe>')
text_with_iframe_escaped = ('<p>some non malicious text</p><iframe '
'src="http://www.w3schools.com"></iframe>')
text_with_script = ('<p>some non malicious text</p> '
'<script>alert("Hello! I am an alert box!");</script>')
text_with_script_escaped = (u'<p>some non malicious text</p> <script>'
u'alert("Hello! I am an alert box!");</script>')
def test_model_field_text_is_safe(self):
original = 'Hello <h2>There</h2>'
template = Template('{{ obj.text }}')
text = SimpleText.objects.create(text='Hello <h2>There</h2>')
# Fetching a new instance should now have the string marked
# as safe.
text = SimpleText.objects.get(pk=text.pk)
rendered = template.render(Context({'obj': text}))
self.assertEqual(original, rendered)
def test_model_field_sanitized(self):
obj = SimpleText(text=self.text_normal)
obj.full_clean()
obj.save()
obj = SimpleText.objects.get(pk=obj.pk)
self.assertEqual(obj.text, self.text_normal)
obj = SimpleText(text=self.text_with_iframe)
obj.full_clean()
obj.save()
self.assertEqual(obj.text, self.text_with_iframe_escaped)
obj = SimpleText(text=self.text_with_script)
obj.full_clean()
obj.save()
self.assertEqual(obj.text, self.text_with_script_escaped)
def test_form_field_sanitized(self):
form = SimpleTextForm(data={'text': self.text_normal})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['text'], self.text_normal)
form = SimpleTextForm(data={'text': self.text_with_iframe})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['text'], self.text_with_iframe_escaped)
form = SimpleTextForm(data={'text': self.text_with_script})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['text'], self.text_with_script_escaped)
|
bsd-3-clause
| -3,695,870,091,653,139,500
| 36.078947
| 86
| 0.635912
| false
| 3.580686
| true
| false
| false
|
evansde77/cirrus
|
src/cirrus/delegate.py
|
1
|
2385
|
#!/usr/bin/env python
"""
_delegate_
Main cirrus command that delegates the call to
the sub command verb enabling
git cirrus do_a_thing to be routed to the appropriate
command call for do_a_thing
"""
import os
import os.path
import pkg_resources
import sys
import signal
import subprocess
import cirrus.environment as env
def install_signal_handlers():
"""
Need to catch SIGINT to allow the command to be CTRL-C'ed
"""
def signal_handler(signal, frame):
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
def run_command(cmd):
"""
run the delegated command with the CTRL-C signal handler
in place
"""
install_signal_handlers()
return subprocess.call(cmd, shell=False)
HELP = \
"""
Cirrus commands available are:
{0}
Do git cirrus <command> -h for more information on a
particular command
"""
def format_help(command_list):
subs = '\n'.join(
[c for c in command_list if c != 'cirrus']
)
return HELP.format(subs)
def main():
"""
_main_
response to the cirrus <verb> command
Extracts the available verbs that are installed as
entry points by setup.py as cirrus_commands
"""
home = env.virtualenv_home()
commands = []
for script in pkg_resources.iter_entry_points(group="cirrus_commands"):
comm = str(script).split(" = ", 1)[0]
commands.append(comm)
commands.sort()
# switch to the current GIT_PREFIX working dir
old_dir = os.getcwd()
os.chdir(os.path.abspath(os.environ.get('GIT_PREFIX', '.')))
try:
args = sys.argv[1:]
if len(args) == 0 or args[0] == '-h':
# missing command or help
print(format_help(commands))
exit_code = 0
else:
command_path = "{0}/bin/{1}".format(home, args[0])
if not os.path.exists(command_path):
msg = "Unknown command: {}".format(args[0])
print(msg)
print(format_help(commands))
exit_code = 127
else:
exit_code = run_command([command_path, ] + args[1:])
except Exception as ex:
msg = "Exception Details:\n{}".format(ex)
print(msg)
raise
finally:
# always return to previous dir
os.chdir(old_dir)
return exit_code
if __name__ == "__main__":
sys.exit(main())
|
apache-2.0
| -2,823,482,492,811,789,000
| 21.932692
| 75
| 0.600419
| false
| 3.61912
| false
| false
| false
|
moreati/revelation
|
epiphany/test/test_execute_bitwise.py
|
1
|
7492
|
from pydgin.utils import trim_32
from epiphany.instruction import Instruction
from epiphany.isa import decode
from epiphany.machine import RESET_ADDR
from epiphany.test.machine import StateChecker, new_state
import opcode_factory
import pytest
@pytest.mark.parametrize('rn,rm,is16bit', [(-1, 28, True),
(-1, 28, False),
( 1, 28, True),
( 1, 28, False)])
def test_execute_logical_shift_right(rn, rm, is16bit):
rd = 2
state = new_state(rf0=trim_32(rn), rf1=trim_32(rm))
instr = (opcode_factory.lsr16(rd=rd, rn=0, rm=1) if is16bit
else opcode_factory.lsr32(rd=rd, rn=0, rm=1))
name, executefn = decode(instr)
executefn(state, Instruction(instr, None))
expected_state = StateChecker(AZ=(False if rn < 0 else True), # 1 >> 5 == 0
AV=0, AC=0,
pc=((2 if is16bit else 4) + RESET_ADDR),
rf2=(0b1111 if rn < 0 else 0))
expected_state.check(state)
@pytest.mark.parametrize('rn,imm,is16bit', [(-1, 28, True),
(-1, 28, False),
( 1, 28, True),
( 1, 28, False)])
def test_execute_logical_shift_right_imm(rn, imm, is16bit):
rd = 2
state = new_state(rf0=trim_32(rn))
instr = (opcode_factory.lsr16_immediate(rd=rd, rn=0, imm=imm) if is16bit
else opcode_factory.lsr32_immediate(rd=rd, rn=0, imm=imm))
name, executefn = decode(instr)
executefn(state, Instruction(instr, None))
expected_state = StateChecker(AZ=(False if rn < 0 else True), # 1 >> 5 == 0
AV=0, AC=0,
pc=((2 if is16bit else 4) + RESET_ADDR),
rf2=(0b1111 if rn < 0 else 0))
expected_state.check(state)
@pytest.mark.parametrize('rn,rm,is16bit', [(-1, 5, True),
(-1, 5, False),
( 1, 5, True),
( 1, 5, False)])
def test_execute_arith_shift_right(rn, rm, is16bit):
rd = 2
state = new_state(rf0=trim_32(rn), rf1=trim_32(rm))
instr = (opcode_factory.asr16(rd=rd, rn=0, rm=1) if is16bit
else opcode_factory.asr32(rd=rd, rn=0, rm=1))
name, executefn = decode(instr)
executefn(state, Instruction(instr, None))
expected_state = StateChecker(AZ=(False if rn < 0 else True), # 1 >> 5 == 0
AV=0, AC=0,
pc=((2 if is16bit else 4) + RESET_ADDR),
rf2=(trim_32(-1) if rn < 0 else 0))
expected_state.check(state)
@pytest.mark.parametrize('rn,imm,is16bit', [(-1, 5, True),
(-1, 5, False),
( 1, 5, True),
( 1, 5, False)])
def test_execute_arith_shift_right_imm(rn, imm, is16bit):
rd = 2
state = new_state(rf0=trim_32(rn))
instr = (opcode_factory.asr16_immediate(rd=rd, rn=0, imm=imm) if is16bit
else opcode_factory.asr32_immediate(rd=rd, rn=0, imm=imm))
name, executefn = decode(instr)
executefn(state, Instruction(instr, None))
expected_state = StateChecker(AZ=(False if rn < 0 else True), # 1 >> 5 == 0
AV=0, AC=0,
pc=((2 if is16bit else 4) + RESET_ADDR),
rf2=(trim_32(-1) if rn < 0 else 0))
expected_state.check(state)
@pytest.mark.parametrize('factory,is16bit',
[(opcode_factory.lsl16, True),
(opcode_factory.lsl32, False)
])
def test_execute_shift_left(factory, is16bit):
state = new_state(rf0=5, rf1=7)
instr = factory(rd=2, rn=1, rm=0)
name, executefn = decode(instr)
executefn(state, Instruction(instr, None))
expected_state = StateChecker(AZ=0, AN=0, AC=0, AV=0,
pc=((2 if is16bit else 4) + RESET_ADDR),
rf2=7 << 5)
expected_state.check(state)
@pytest.mark.parametrize('factory,is16bit',
[(opcode_factory.lsl16_immediate, True),
(opcode_factory.lsl32_immediate, False)
])
def test_execute_shift_left_immediate(factory, is16bit):
state = new_state(rf1=7)
instr = factory(rd=2, rn=1, imm=5)
name, executefn = decode(instr)
executefn(state, Instruction(instr, None))
expected_state = StateChecker(AZ=0, AN=0, AC=0, AV=0,
pc=((2 if is16bit else 4) + RESET_ADDR),
rf2=7 << 5)
expected_state.check(state)
@pytest.mark.parametrize('bits,expected,is16bit',
[(0b10101010101010101010101010101010,
0b01010101010101010101010101010101,
True),
(0b01010101010101010101010101010101,
0b10101010101010101010101010101010,
True),
(0b10101010101010101010101010101010,
0b01010101010101010101010101010101,
False),
(0b01010101010101010101010101010101,
0b10101010101010101010101010101010,
False),
])
def test_execute_bitr(bits, expected, is16bit):
state = new_state(rf0=0, rf1=bits)
instr = (opcode_factory.bitr16_immediate(rd=2, rn=1, imm=0) if is16bit
else opcode_factory.bitr32_immediate(rd=2, rn=1, imm=0))
name, executefn = decode(instr)
executefn(state, Instruction(instr, None))
expected_state = StateChecker(AZ=0, AC=0, AV=0,
pc=((2 if is16bit else 4) + RESET_ADDR),
rf2=expected)
expected_state.check(state)
@pytest.mark.parametrize('factory,expected', [(opcode_factory.and32, 5 & 7),
(opcode_factory.orr32, 5 | 7),
(opcode_factory.eor32, 5 ^ 7),
])
def test_execute_bitwise32(factory, expected):
state = new_state(rf0=5, rf1=7)
instr = factory(rd=2, rn=1, rm=0)
name, executefn = decode(instr)
executefn(state, Instruction(instr, None))
expected_state = StateChecker(AZ=0, AV=0, AC=0, pc=(4 + RESET_ADDR),
rf2=expected)
expected_state.check(state)
@pytest.mark.parametrize('factory,expected', [(opcode_factory.and16, 5 & 7),
(opcode_factory.orr16, 5 | 7),
(opcode_factory.eor16, 5 ^ 7),
])
def test_execute_bitwise16(factory, expected):
state = new_state(rf0=5, rf1=7)
instr = factory(rd=2, rn=1, rm=0)
name, executefn = decode(instr)
executefn(state, Instruction(instr, None))
expected_state = StateChecker(AZ=0, AV=0, AC=0, pc=(2 + RESET_ADDR),
rf2=expected)
expected_state.check(state)
|
bsd-3-clause
| -798,977,002,079,429,200
| 44.406061
| 79
| 0.49693
| false
| 3.701581
| true
| false
| false
|
nbari/my-sandbox
|
python/email/server.py
|
1
|
1444
|
import smtplib
import smtpd
import asyncore
import email.utils
from email.mime.text import MIMEText
import threading
class SMTPReceiver(smtpd.SMTPServer):
def process_message(self, peer, mailfrom, rcpttos, data):
print 'Receiving message from:', peer
print 'Message addressed from:', mailfrom
print 'Message addressed to :', rcpttos
print 'Message length :', len(data)
print data
def send_response():
msg = MIMEText('Hello world!')
msg['To'] = email.utils.formataddr(('Recipient', mailfrom))
msg['From'] = email.utils.formataddr(
('Author', 'jsternberg@example.org'))
msg['Subject'] = ''
print 'Connecting to mail server'
server = smtplib.SMTP()
server.set_debuglevel(1)
server.connect()
print 'Attempting to send message'
try:
server.sendmail(
'jsternberg@example.org',
[mailfrom],
msg.as_string())
except Exception as ex:
print 'Could not send mail', ex
finally:
server.quit()
print 'Finished sending message'
threading.Thread(target=send_response).start()
return
def main():
server = SMTPReceiver(('', 2025), None)
asyncore.loop()
if __name__ == '__main__':
main()
|
bsd-3-clause
| -5,075,651,557,787,795,000
| 28.469388
| 71
| 0.549169
| false
| 4.415902
| false
| false
| false
|
laurentb/weboob
|
modules/onlinenet/module.py
|
1
|
2721
|
# -*- coding: utf-8 -*-
# Copyright(C) 2016 Edouard Lambert
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from weboob.capabilities.bill import DocumentTypes, CapDocument, Subscription, Document, SubscriptionNotFound, DocumentNotFound
from weboob.capabilities.base import find_object, NotAvailable
from weboob.tools.backend import Module, BackendConfig
from weboob.tools.value import ValueBackendPassword, Value
from .browser import OnlinenetBrowser
__all__ = ['OnlinenetModule']
class OnlinenetModule(Module, CapDocument):
NAME = 'onlinenet'
DESCRIPTION = 'Online.net'
MAINTAINER = 'Edouard Lambert'
EMAIL = 'elambert@budget-insight.com'
LICENSE = 'LGPLv3+'
VERSION = '2.1'
CONFIG = BackendConfig(
Value('login', label='Identifiant'),
ValueBackendPassword('password', label='Mot de passe'),
)
BROWSER = OnlinenetBrowser
accepted_document_types = (DocumentTypes.BILL, DocumentTypes.OTHER,)
def create_default_browser(self):
return self.create_browser(self.config['login'].get(), self.config['password'].get())
def iter_subscription(self):
return self.browser.get_subscription_list()
def get_subscription(self, _id):
return find_object(self.iter_subscription(), id=_id, error=SubscriptionNotFound)
def get_document(self, _id):
subid = _id.rsplit('_', 1)[0]
subscription = self.get_subscription(subid)
return find_object(self.iter_documents(subscription), id=_id, error=DocumentNotFound)
def iter_documents(self, subscription):
if not isinstance(subscription, Subscription):
subscription = self.get_subscription(subscription)
return self.browser.iter_documents(subscription)
def download_document(self, document):
if not isinstance(document, Document):
document = self.get_document(document)
if document._url is NotAvailable:
return
return self.browser.open(document._url).content
|
lgpl-3.0
| -5,601,386,455,273,830,000
| 35.77027
| 127
| 0.714076
| false
| 4.091729
| false
| false
| false
|
kanairen/RegularIcosahedronDict
|
src/map/factory/base_shape_map_factory.py
|
1
|
4162
|
#!/usr/bin/env python
# coding: utf-8
import numpy as np
from src.obj.obj3d import Obj3d
from src.obj.grid.base_grid import BaseGrid
class BaseShapeMapFactory(object):
DIST_UNDEFINED = -1
def __init__(self, model_id, obj3d, grid, n_div, cls, grid_scale):
"""
:type model_id: int or long:
:param model_id: 対象3DモデルID
:type obj3d: Obj3d
:param obj3d: 形状マップ生成対象の3Dオブジェクト
:type grid: TriangleGrid
:param grid: 形状マップを生成するための正三角形からなるグリッド
:type n_div: int or long
:param n_div: グリッド分割数
:type cls: int or long
:param cls: クラスラベル
:type grid_scale: float
:param grid_scale: グリッドのスケール率
"""
assert isinstance(model_id, (int, long))
assert isinstance(obj3d, Obj3d)
assert isinstance(grid, BaseGrid)
assert isinstance(cls, (int, long))
assert isinstance(grid_scale, float)
self.model_id = model_id
# 3Dモデル:座標系の中心に置き、正規化する
self.obj3d = obj3d.center().normal()
# 正二十面体グリッド:3Dモデルを内部に完全に含むように拡張
self.grid = grid.center().scale(grid_scale).divide_face(n_div)
# 3Dモデルの中心から最も離れた点の中心からの距離が、
# グリッドの中心から最も近い点のより中心からの距離より大きい場合はサポート外
# (原則、scale_gridは1以上で設定する)
if np.linalg.norm(self.grid.vertices, axis=1).min() < np.linalg.norm(
self.obj3d.vertices, axis=1).max():
raise NotImplementedError()
# クラスラベル
self.cls = cls
@staticmethod
def tomas_moller(origin, end, v0, v1, v2):
"""
Tomas-Mollerのアルゴリズム
線分と三角形の交点を返す
交差しない場合、Noneを返す
行列式を、外積/内積に置き換えている
:type origin: np.ndarray
:param origin: 線分の始点
:type end: np.ndarray
:param end: 線分の終点
:type v0 : np.ndarray
:param v0: 三角形の頂点その1
:type v1: np.ndarray
:param v1: 三角形の頂点その2
:type v2: np.ndarray
:param v2: 三角形の頂点その3
:rtype: np.ndarray
:return: 交点ベクトル
"""
edge1 = v1 - v0
edge2 = v2 - v0
ray = end - origin
P = np.cross(ray, edge2)
# 分母
denominator = np.dot(P, edge1)
if denominator > np.finfo(float).eps:
T = origin - v0
u = np.dot(P, T)
if 0 <= u <= denominator:
Q = np.cross(T, edge1)
v = np.dot(Q, ray)
if 0 <= v <= denominator and (u + v) <= denominator:
t = np.dot(Q, edge2) / denominator
return origin + ray * t
return None
def create(self):
raise NotImplementedError
def _distances(self):
"""
グリッド頂点に対応した距離情報のマップを取得する
"""
grid_center = np.zeros(shape=(3,))
# 距離マップ インデックスはグリッドのverticesに対応する
# 空洞など、距離が未定義のところにはDIST_UNDEFINED値を入れる
distance_map = np.full(shape=(len(self.grid.vertices)),
fill_value=BaseShapeMapFactory.DIST_UNDEFINED,
dtype=np.float64)
for i, g_vertex in enumerate(self.grid.vertices):
for f0, f1, f2 in self.obj3d.vertices[self.obj3d.face_vertices]:
p_cross = self.tomas_moller(grid_center, g_vertex, f0, f1, f2)
if p_cross is not None:
distance_map[i] = np.linalg.norm(p_cross - grid_center)
break
return distance_map
|
mit
| -6,878,317,469,387,684,000
| 24.382353
| 78
| 0.548378
| false
| 2.353102
| false
| false
| false
|
ikben/troposphere
|
examples/WAF_Common_Attacks_Sample.py
|
1
|
5554
|
# Converted from AWS WAF Sample located at:
# https://s3.amazonaws.com/cloudformation-examples/community/common-attacks.json
from troposphere import (
Template,
Parameter,
Join,
Ref
)
from troposphere.waf import (
Rule,
SqlInjectionMatchSet,
WebACL,
SizeConstraintSet,
IPSet,
XssMatchSet,
Predicates,
SqlInjectionMatchTuples,
FieldToMatch,
Action,
Rules,
SizeConstraint,
XssMatchTuple
)
t = Template()
t.add_version("2010-09-09")
t.set_description(
"Creates an AWS WAF configuration that protects against common attacks"
)
WebACLName = t.add_parameter(Parameter(
"WebACLName",
Default="CommonAttackProtection",
Type="String",
Description="Enter the name you want to use for the WebACL. "
"This value is also added as a prefix for the names of the rules, "
"conditions, and CloudWatch metrics created by this template.",
))
SqliMatchSet = t.add_resource(SqlInjectionMatchSet(
"SqliMatchSet",
Name=Join("", [Ref(WebACLName), "SqliMatch"]),
SqlInjectionMatchTuples=[
SqlInjectionMatchTuples(
FieldToMatch=FieldToMatch(
Type="QUERY_STRING"
),
TextTransformation="URL_DECODE"
),
SqlInjectionMatchTuples(
FieldToMatch=FieldToMatch(
Type="QUERY_STRING"
),
TextTransformation="HTML_ENTITY_DECODE"
),
SqlInjectionMatchTuples(
FieldToMatch=FieldToMatch(
Type="BODY"
),
TextTransformation="URL_DECODE"
),
SqlInjectionMatchTuples(
FieldToMatch=FieldToMatch(
Type="BODY"
),
TextTransformation="HTML_ENTITY_DECODE"
),
SqlInjectionMatchTuples(
FieldToMatch=FieldToMatch(
Type="URI"
),
TextTransformation="URL_DECODE"
)
]
))
SqliRule = t.add_resource(Rule(
"SqliRule",
Predicates=[
Predicates(
DataId=Ref(SqliMatchSet),
Type="SqlInjectionMatch",
Negated=False
)
],
Name=Join("", [Ref(WebACLName), "SqliRule"]),
MetricName=Join("", [Ref(WebACLName), "SqliRule"]),
))
XssMatchSet = t.add_resource(XssMatchSet(
"XssMatchSet",
Name=Join("", [Ref(WebACLName), "XssMatch"]),
XssMatchTuples=[
XssMatchTuple(
FieldToMatch=FieldToMatch(
Type="QUERY_STRING",
),
TextTransformation="URL_DECODE"
),
XssMatchTuple(
FieldToMatch=FieldToMatch(
Type="QUERY_STRING",
),
TextTransformation="HTML_ENTITY_DECODE"
),
XssMatchTuple(
FieldToMatch=FieldToMatch(
Type="BODY",
),
TextTransformation="URL_DECODE"
),
XssMatchTuple(
FieldToMatch=FieldToMatch(
Type="BODY",
),
TextTransformation="HTML_ENTITY_DECODE"
),
XssMatchTuple(
FieldToMatch=FieldToMatch(
Type="URI",
),
TextTransformation="URL_DECODE"
)
]
))
XssRule = t.add_resource(Rule(
"XssRule",
Name=Join("", [Ref(WebACLName), "XssRule"]),
Predicates=[
Predicates(
DataId=Ref(XssMatchSet),
Type="XssMatch",
Negated=False
)
],
MetricName=Join("", [Ref(WebACLName), "XssRule"]),
))
WAFManualIPBlockSet = t.add_resource(IPSet(
"WAFManualIPBlockSet",
Name="Manual IP Block Set",
))
ManualIPBlockRule = t.add_resource(Rule(
"ManualIPBlockRule",
Name=Join("", [Ref(WebACLName), "ManualIPBlockRule"]),
MetricName=Join("", [Ref(WebACLName), "ManualIPBlockRule"]),
Predicates=[
Predicates(
DataId=Ref(WAFManualIPBlockSet),
Type="IPMatch",
Negated=False
)
]
))
SizeMatchSet = t.add_resource(SizeConstraintSet(
"SizeMatchSet",
Name=Join("", [Ref(WebACLName), "LargeBodyMatch"]),
SizeConstraints=[
SizeConstraint(
ComparisonOperator="GT",
TextTransformation="NONE",
FieldToMatch=FieldToMatch(
Type="BODY"
),
Size="8192"
)
]
))
SizeMatchRule = t.add_resource(Rule(
"SizeMatchRule",
Name=Join("", [Ref(WebACLName), "LargeBodyMatchRule"]),
MetricName=Join("", [Ref(WebACLName), "DetectLargeBody"]),
Predicates=[
Predicates(
DataId=Ref(SizeMatchSet),
Type="SizeConstraint",
Negated=False
)
]
))
MyWebACL = t.add_resource(WebACL(
"MyWebACL",
Name=Ref(WebACLName),
DefaultAction=Action(
Type="ALLOW"
),
Rules=[
Rules(
Action=Action(
Type="BLOCK"
),
Priority=1,
RuleId=Ref(ManualIPBlockRule)
),
Rules(
Action=Action(
Type="COUNT"
),
Priority=2,
RuleId=Ref(SizeMatchRule)
),
Rules(
Action=Action(
Type="BLOCK"
),
Priority=3,
RuleId=Ref(SqliRule)
),
Rules(
Action=Action(
Type="BLOCK"
),
Priority=4,
RuleId=Ref(XssRule)
)
],
MetricName=Ref(WebACLName),
))
print(t.to_json())
|
bsd-2-clause
| -8,663,083,156,962,772,000
| 23.359649
| 80
| 0.541232
| false
| 3.878492
| false
| false
| false
|
jonlatorre/VideoCargador
|
video/models.py
|
1
|
1791
|
# encoding: utf-8
from django.db import models
import os
from mencoder import *
class Video(models.Model):
"""This is a small demo using just two fields. The slug field is really not
necessary, but makes the code simpler. ImageField depends on PIL or
pillow (where Pillow is easily installable in a virtualenv. If you have
problems installing pillow, use a more generic FileField instead.
"""
file = models.FileField(upload_to="uploaded_videos")
slug = models.SlugField(max_length=50, blank=True)
mp4_encoded = models.BooleanField(default=False)
mp4_file = models.FileField(upload_to="converted_videos", blank=True)
mp4_url = models.BooleanField(default=False)
flv_encoded = models.BooleanField(default=False)
flv_file = models.FileField(upload_to="converted_videos", blank=True)
flv_url = models.BooleanField(default=False)
def __unicode__(self):
return self.file.name
@models.permalink
def get_absolute_url(self):
return ('video-new', )
def save(self, *args, **kwargs):
self.slug = self.file.name
super(Video, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
"""delete -- Remove to leave file."""
self.file.delete(False)
super(Video, self).delete(*args, **kwargs)
def encode_mp4(self):
print "Vamos a convertir a mp4"
destino = self.mp4_file.storage.base_location
destino = os.path.join(destino,"converted_videos")
ret,salida = call_mencoder_mp4(self.file.path,destino)
if ret == 0:
print "Codificacion OK"
self.mp4_file.name = "converted_videos/"+salida
self.mp4_encoded = True
self.save()
def upload_mp4(self):
print "Subimos el MP4"
|
mit
| 5,213,071,513,827,455,000
| 35.55102
| 79
| 0.654941
| false
| 3.589178
| false
| false
| false
|
mrshu/scikit-learn
|
examples/plot_permutation_test_for_classification.py
|
1
|
2236
|
"""
=================================================================
Test with permutations the significance of a classification score
=================================================================
In order to test if a classification score is significative a technique
in repeating the classification procedure after randomizing, permuting,
the labels. The p-value is then given by the percentage of runs for
which the score obtained is greater than the classification score
obtained in the first place.
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# License: BSD
print __doc__
import numpy as np
import pylab as pl
from sklearn.svm import SVC
from sklearn.cross_validation import StratifiedKFold, permutation_test_score
from sklearn import datasets
from sklearn.metrics import zero_one_score
##############################################################################
# Loading a dataset
iris = datasets.load_iris()
X = iris.data
y = iris.target
n_classes = np.unique(y).size
# Some noisy data not correlated
random = np.random.RandomState(seed=0)
E = random.normal(size=(len(X), 2200))
# Add noisy data to the informative features for make the task harder
X = np.c_[X, E]
svm = SVC(kernel='linear')
cv = StratifiedKFold(y, 2)
score, permutation_scores, pvalue = permutation_test_score(
svm, X, y, zero_one_score, cv=cv, n_permutations=100, n_jobs=1)
print "Classification score %s (pvalue : %s)" % (score, pvalue)
###############################################################################
# View histogram of permutation scores
pl.hist(permutation_scores, 20, label='Permutation scores')
ylim = pl.ylim()
# BUG: vlines(..., linestyle='--') fails on older versions of matplotlib
#pl.vlines(score, ylim[0], ylim[1], linestyle='--',
# color='g', linewidth=3, label='Classification Score'
# ' (pvalue %s)' % pvalue)
#pl.vlines(1.0 / n_classes, ylim[0], ylim[1], linestyle='--',
# color='k', linewidth=3, label='Luck')
pl.plot(2 * [score], ylim, '--g', linewidth=3,
label='Classification Score'
' (pvalue %s)' % pvalue)
pl.plot(2 * [1. / n_classes], ylim, '--k', linewidth=3, label='Luck')
pl.ylim(ylim)
pl.legend()
pl.xlabel('Score')
pl.show()
|
bsd-3-clause
| 4,989,806,373,800,379,000
| 31.882353
| 79
| 0.61449
| false
| 3.594855
| false
| false
| false
|
codebikeclimb/NASARobotComp
|
Robot2017_Master/Robot2016/motorTest.py
|
1
|
2963
|
#!/usr/bin/python
from Adafruit_MotorHAT import Adafruit_MotorHAT, Adafruit_DCMotor
import serial
import time
import atexit
#initialize i2c communication with motor shield
roboMotor = Adafruit_MotorHAT(addr=0x60)
#initialize serial communications with XBee RF reciever
xBee = serial.Serial('/dev/ttyACM1',57600)
compass = serial.Serial('/dev/ttyACM0', 9600)
def turnOffMotors():
roboMotor.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
roboMotor.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
atexit.register(turnOffMotors)
#create motor objects
leftFrontRear = roboMotor.getMotor(3)
rightFrontRear = roboMotor.getMotor(4)
#set speed to start ---- 0(off) - 255(Max)
#beacon navigation
def beaconNavigation():
bHeadings = []
botHeadings = []
for x in range(0,2):
botHeading = compass.readline()
botHeading = float(botHeading)
botHeadings.append(botHeading)
print(botHeading)
beaconHeading = xBee.readline()
beaconHeading = float(beaconHeading)
bHeadings.append(beaconHeading)
print(beaconHeading)
botTotal = sum(botHeadings)
botLength = len(botHeadings)
avgBotHeading = botTotal / botLength
print "avg bot heading: ", avgBotHeading
total = sum(bHeadings)
l = len(bHeadings)
avgHeading = total / l
print "avg b heading: ", avgHeading
#calculate opposite heading
x = avgHeading + 180
oppositeHeading = x % 360
oppositeHeading = float(oppositeHeading)
print "opposite beacon heading: ", oppositeHeading
# while(botHeading <= oppositeHeading or botHeading >= oppositeHeading):
while(botHeading < oppositeHeading or botHeading > oppositeHeading + 1.0):
botHeading = compass.readline()
botHeading = float(botHeading)
print botHeading
# rightRotate()
forward()
# toTheBeacon()
#for x in range(0,20):
# heading = xBee.readline()
# botBearing = compass.readline()
# print(heading)
# print(botBearing)
#drive forwards
def forward():
# beaconNavigation()
while(True):
leftFrontRear.setSpeed(80)
rightFrontRear.setSpeed(80)
leftFrontRear.run(Adafruit_MotorHAT.FORWARD)
rightFrontRear.run(Adafruit_MotorHAT.FORWARD)
#drive backwards
def reverse():
rightFrontRear.setSpeed(150)
leftFrontRear.setSpeed(150)
rightFrontRear.run(Adafruit_MotorHAT.BACKWARD)
leftFrontRear.run(Adafruit_MotorHAT.BACKWARD)
#rotate left, rotate right
def leftRotate():
rightFrontRear.setSpeed(70)
rightFrontRear.run(Adafruit_MotorHAT.FORWARD)
def rightRotate():
leftFrontRear.setSpeed(90)
rightFrontRear.setSpeed(90)
leftFrontRear.run(Adafruit_MotorHAT.FORWARD)
rightFrontRear.run(Adafruit_MotorHAT.BACKWARD)
#turn left, turn right
def leftTurn():
rightFrontRear.setSpeed(200)
leftFrontRear.setSpeed(125)
rightFrontRear.run(Adafruit_MotorHAT.FORWARD)
leftFrontRear.run(Adafruit_MotorHAT.FORWARD)
def rightTurn():
rightFrontRear.setSpeed(150)
leftFrontRear.setSpeed(200)
leftFrontRear.run(Adafruit_MotorHAT.FORWARD)
rightFrontRear.run(Adafruit_MotorHAT.FORWARD)
beaconNavigation()
forward()
|
gpl-3.0
| -6,703,005,234,935,376,000
| 21.792308
| 75
| 0.76949
| false
| 2.746061
| false
| false
| false
|
rzinkstok/skymap
|
skymap/labeling/runner.py
|
1
|
2495
|
import time
import random
from PIL import Image, ImageDraw
from skymap.labeling.common import Point, BoundingBox, evaluate, POSITION_WEIGHT
from skymap.labeling.greedy import GreedyLabeler, AdvancedGreedyLabeler
from skymap.labeling.grasp import GraspLabeler
from skymap.labeling.genetic import GeneticLabeler, CachedGeneticLabeler
from deap import creator, base
def draw(points, width, height):
SCALE = 4
im = Image.new("RGB", (SCALE * width, SCALE * height), (255, 255, 255))
d = ImageDraw.Draw(im)
for p in points:
x = p.x * SCALE
y = (height - p.y) * SCALE
r = p.radius * SCALE
if p.label is None:
color = (200, 200, 200)
else:
color = "black"
d.ellipse([x - r, y - r, x + r, y + r], fill=color)
if p.label:
x1 = p.label.minx * SCALE
x2 = p.label.maxx * SCALE
y1 = (height - p.label.miny) * SCALE
y2 = (height - p.label.maxy) * SCALE
if p.label.penalty > POSITION_WEIGHT * p.label.position:
color = (256, 0, 0)
else:
color = (200, 200, 200)
d.rectangle((x1, y1, x2, y2), outline=color)
im.show()
if __name__ == "__main__":
print("Starting")
random.seed(1)
creator.create("FitnessMax", base.Fitness, weights=(1.0,))
creator.create("Individual", list, fitness=creator.FitnessMax)
npoints = 1000
nlabels = 200
mapwidth = 500
mapheight = 500
bounding_box = BoundingBox(0, 0, mapwidth, mapheight)
points = []
for i in range(npoints):
x = mapwidth * random.random()
y = mapheight * random.random()
if random.random() < float(nlabels) / npoints:
text = f"Label for point {i}"
p = Point(x, y, 1, text, 0)
else:
p = Point(x, y, 1)
points.append(p)
method = 5
if method == 1:
g = GreedyLabeler(points, bounding_box)
elif method == 2:
g = AdvancedGreedyLabeler(points, bounding_box)
elif method == 3:
g = GraspLabeler(points, bounding_box)
elif method == 4:
g = GeneticLabeler(points, bounding_box)
elif method == 5:
g = CachedGeneticLabeler(creator, points, bounding_box)
t1 = time.clock()
g.run()
t2 = time.clock()
print(f"Run time: {t2 - t1}")
penalty = evaluate(g.points, g.bounding_box)
print(f"Penalty: {penalty}")
# draw(points, mapwidth, mapheight)
|
gpl-3.0
| -6,816,814,384,139,079,000
| 27.678161
| 80
| 0.578357
| false
| 3.198718
| false
| false
| false
|
google/mannequinchallenge
|
loaders/aligned_data_loader.py
|
1
|
1933
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch.utils.data
from loaders import image_folder
class DAVISDataLoader():
def __init__(self, list_path, _batch_size):
dataset = image_folder.DAVISImageFolder(list_path=list_path)
self.data_loader = torch.utils.data.DataLoader(dataset,
batch_size=_batch_size,
shuffle=False,
num_workers=int(1))
self.dataset = dataset
def load_data(self):
return self.data_loader
def name(self):
return 'TestDataLoader'
def __len__(self):
return len(self.dataset)
class TUMDataLoader():
def __init__(self, opt, list_path, is_train, _batch_size, num_threads):
dataset = image_folder.TUMImageFolder(opt=opt, list_path=list_path)
self.data_loader = torch.utils.data.DataLoader(dataset,
batch_size=_batch_size,
shuffle=False,
num_workers=int(num_threads))
self.dataset = dataset
def load_data(self):
return self.data_loader
def name(self):
return 'TUMDataLoader'
def __len__(self):
return len(self.dataset)
|
apache-2.0
| -2,236,632,270,222,474,800
| 34.796296
| 84
| 0.578893
| false
| 4.423341
| false
| false
| false
|
xemul/p.haul
|
phaul/connection.py
|
1
|
1171
|
#
# p.haul connection module contain logic needed to establish connection
# between p.haul and p.haul-service.
#
import logging
import socket
import util
class connection(object):
"""p.haul connection
Class encapsulate connections reqired for p.haul work, including rpc socket
(socket for RPC calls), memory socket (socket for c/r images migration) and
module specific definition of fs channel needed for disk migration.
"""
def __init__(self, rpc_sk, mem_sk, fdfs):
self.rpc_sk = rpc_sk
self.mem_sk = mem_sk
self.fdfs = fdfs
def close(self):
self.rpc_sk.close()
self.mem_sk.close()
def establish(fdrpc, fdmem, fdfs):
"""Construct required socket objects from file descriptors
Expect that each file descriptor represent socket opened in blocking mode
with domain AF_INET and type SOCK_STREAM.
"""
logging.info(
"Use existing connections, fdrpc=%d fdmem=%d fdfs=%s", fdrpc,
fdmem, fdfs)
# Create rpc socket
rpc_sk = socket.fromfd(fdrpc, socket.AF_INET, socket.SOCK_STREAM)
util.set_cloexec(rpc_sk)
# Create memory socket
mem_sk = socket.fromfd(fdmem, socket.AF_INET, socket.SOCK_STREAM)
return connection(rpc_sk, mem_sk, fdfs)
|
lgpl-2.1
| -7,130,818,278,763,077,000
| 23.914894
| 76
| 0.733561
| false
| 3.147849
| false
| false
| false
|
AQORN/thunder-engine
|
thunder_web/api/views.py
|
1
|
1791
|
from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
#
from rest_framework.response import Response
from task.models import Task
from api.serializers import TaskSerializer
#
@api_view(['GET', 'POST'])
def task_list(request):
"""
List all tasks, or create a new task.
"""
if request.method == 'GET':
tasks = Task.objects.all()
print tasks.query
serializer = TaskSerializer(tasks, many=True)
print tasks
return Response(serializer.data)
elif request.method == 'POST':
serializer = TaskSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'PUT', 'DELETE'])
#@permission_classes((IsAuthenticated, ))
def task_detail(request, pk):
"""
Get, udpate, or delete a specific task
"""
try:
task = Task.objects.get(pk=pk)
except Task.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method == 'GET':
serializer = TaskSerializer(task)
return Response(serializer.data)
elif request.method == 'PUT':
serializer = TaskSerializer(task, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
else:
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
task.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
# Create your views here.
|
gpl-3.0
| -5,743,528,586,565,458,000
| 27.428571
| 76
| 0.638749
| false
| 4.174825
| false
| false
| false
|
PyBossa/pybossa
|
test/test_sched_depth_first_all.py
|
1
|
48364
|
# -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2015 Scifabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
import json
import random
from mock import patch
from helper import sched
from default import Test, db, with_context
from pybossa.model.task import Task
from pybossa.model.project import Project
from pybossa.model.user import User
from pybossa.model.task_run import TaskRun
from pybossa.model.category import Category
from pybossa.sched import get_depth_first_all_task
from pybossa.core import task_repo, project_repo
from factories import TaskFactory, ProjectFactory, TaskRunFactory, UserFactory
from factories import AnonymousTaskRunFactory, ExternalUidTaskRunFactory
from factories import reset_all_pk_sequences
import pybossa
class TestSched(sched.Helper):
endpoints = ['project', 'task', 'taskrun']
def get_headers_jwt(self, project):
"""Return headesr JWT token."""
# Get JWT token
url = 'api/auth/project/%s/token' % project.short_name
res = self.app.get(url, headers={'Authorization': project.secret_key})
authorization_token = 'Bearer %s' % res.data
return {'Authorization': authorization_token}
# Tests
@with_context
def test_anonymous_01_newtask(self):
""" Test SCHED newtask returns a Task for the Anonymous User"""
project = ProjectFactory.create(info=dict(sched='depth_first_all'))
TaskFactory.create_batch(2, project=project, info='hola')
res = self.app.get('api/project/%s/newtask' %project.id)
data = json.loads(res.data)
task_id = data['id']
assert data['info'] == 'hola', data
taskrun = dict(project_id=data['project_id'], task_id=data['id'], info="hola")
res = self.app.post('api/taskrun', data=json.dumps(taskrun))
res = self.app.get('api/project/%s/newtask' %project.id)
data = json.loads(res.data)
assert data['info'] == 'hola', data
assert data['id'] != task_id, data
@with_context
def test_anonymous_01_newtask_limits(self):
""" Test SCHED newtask returns a list of Tasks for the Anonymous User"""
project = ProjectFactory.create(info=dict(sched='depth_first_all'))
TaskFactory.create_batch(100, project=project, info='hola')
url = 'api/project/%s/newtask?limit=100' % project.id
res = self.app.get(url)
data = json.loads(res.data)
assert len(data) == 100
for t in data:
assert t['info'] == 'hola', t
task_ids = [task['id'] for task in data]
task_ids = set(task_ids)
assert len(task_ids) == 100, task_ids
url = 'api/project/%s/newtask?limit=200' % project.id
res = self.app.get(url)
data = json.loads(res.data)
assert len(data) == 100
for t in data:
assert t['info'] == 'hola', t
task_ids = [task['id'] for task in data]
task_ids = set(task_ids)
assert len(task_ids) == 100, task_ids
@with_context
def test_anonymous_02_gets_different_tasks(self):
""" Test SCHED newtask returns N different Tasks for the Anonymous User"""
assigned_tasks = []
# Get a Task until scheduler returns None
project = ProjectFactory.create(info=dict(sched='depth_first_all'))
tasks = TaskFactory.create_batch(3, project=project, info={})
res = self.app.get('api/project/%s/newtask' % project.id)
data = json.loads(res.data)
while data.get('info') is not None:
# Save the assigned task
assigned_tasks.append(data)
task = db.session.query(Task).get(data['id'])
# Submit an Answer for the assigned task
tr = AnonymousTaskRunFactory.create(project=project, task=task)
res = self.app.get('api/project/%s/newtask' %project.id)
data = json.loads(res.data)
# Check if we received the same number of tasks that the available ones
assert len(assigned_tasks) == len(tasks), len(assigned_tasks)
# Check if all the assigned Task.id are equal to the available ones
err_msg = "Assigned Task not found in DB Tasks"
for at in assigned_tasks:
assert self.is_task(at['id'], tasks), err_msg
# Check that there are no duplicated tasks
err_msg = "One Assigned Task is duplicated"
for at in assigned_tasks:
assert self.is_unique(at['id'], assigned_tasks), err_msg
@with_context
def test_anonymous_02_gets_different_tasks_limits(self):
""" Test SCHED newtask returns N different list of Tasks for the Anonymous User"""
assigned_tasks = []
# Get a Task until scheduler returns None
project = ProjectFactory.create(info=dict(sched='depth_first_all'))
tasks = TaskFactory.create_batch(10, project=project, info={})
res = self.app.get('api/project/%s/newtask?limit=5' % project.id)
data = json.loads(res.data)
while len(data) > 0:
# Save the assigned task
for t in data:
assigned_tasks.append(t)
task = db.session.query(Task).get(t['id'])
# Submit an Answer for the assigned task
tr = AnonymousTaskRunFactory.create(project=project, task=task)
res = self.app.get('api/project/%s/newtask?limit=5' % project.id)
data = json.loads(res.data)
# Check if we received the same number of tasks that the available ones
assert len(assigned_tasks) == len(tasks), len(assigned_tasks)
# Check if all the assigned Task.id are equal to the available ones
err_msg = "Assigned Task not found in DB Tasks"
for at in assigned_tasks:
assert self.is_task(at['id'], tasks), err_msg
# Check that there are no duplicated tasks
err_msg = "One Assigned Task is duplicated"
for at in assigned_tasks:
assert self.is_unique(at['id'], assigned_tasks), err_msg
@with_context
def test_external_uid_02_gets_different_tasks(self):
""" Test SCHED newtask returns N different Tasks
for a external User ID."""
assigned_tasks = []
# Get a Task until scheduler returns None
project = ProjectFactory.create(info=dict(sched='depth_first_all'))
tasks = TaskFactory.create_batch(3, project=project, info={})
headers = self.get_headers_jwt(project)
url = 'api/project/%s/newtask?external_uid=%s' % (project.id, '1xa')
res = self.app.get(url, headers=headers)
data = json.loads(res.data)
while data.get('info') is not None:
# Save the assigned task
assigned_tasks.append(data)
task = db.session.query(Task).get(data['id'])
# Submit an Answer for the assigned task
tr = ExternalUidTaskRunFactory.create(project=project, task=task)
res = self.app.get(url, headers=headers)
data = json.loads(res.data)
# Check if we received the same number of tasks that the available ones
assert len(assigned_tasks) == len(tasks), len(assigned_tasks)
# Check if all the assigned Task.id are equal to the available ones
err_msg = "Assigned Task not found in DB Tasks"
for at in assigned_tasks:
assert self.is_task(at['id'], tasks), err_msg
# Check that there are no duplicated tasks
err_msg = "One Assigned Task is duplicated"
for at in assigned_tasks:
assert self.is_unique(at['id'], assigned_tasks), err_msg
# Check that there are task runs saved with the external UID
answers = task_repo.filter_task_runs_by(external_uid='1xa')
print answers
err_msg = "There should be the same amount of task_runs than tasks"
assert len(answers) == len(assigned_tasks), err_msg
assigned_tasks_ids = sorted([at['id'] for at in assigned_tasks])
task_run_ids = sorted([a.task_id for a in answers])
err_msg = "There should be an answer for each assigned task"
assert assigned_tasks_ids == task_run_ids, err_msg
@with_context
def test_external_uid_02_gets_different_tasks_limits(self):
""" Test SCHED newtask returns N different list of Tasks
for a external User ID."""
assigned_tasks = []
# Get a Task until scheduler returns None
project = ProjectFactory.create(info=dict(sched='depth_first_all'))
tasks = TaskFactory.create_batch(10, project=project, info={})
headers = self.get_headers_jwt(project)
url = 'api/project/%s/newtask?limit=5&external_uid=%s' % (project.id, '1xa')
res = self.app.get(url, headers=headers)
data = json.loads(res.data)
while len(data) > 0 :
# Save the assigned task
for t in data:
assigned_tasks.append(t)
task = db.session.query(Task).get(t['id'])
# Submit an Answer for the assigned task
tr = ExternalUidTaskRunFactory.create(project=project, task=task)
res = self.app.get(url, headers=headers)
data = json.loads(res.data)
# Check if we received the same number of tasks that the available ones
assert len(assigned_tasks) == len(tasks), len(assigned_tasks)
# Check if all the assigned Task.id are equal to the available ones
err_msg = "Assigned Task not found in DB Tasks"
for at in assigned_tasks:
assert self.is_task(at['id'], tasks), err_msg
# Check that there are no duplicated tasks
err_msg = "One Assigned Task is duplicated"
for at in assigned_tasks:
assert self.is_unique(at['id'], assigned_tasks), err_msg
# Check that there are task runs saved with the external UID
answers = task_repo.filter_task_runs_by(external_uid='1xa')
print answers
err_msg = "There should be the same amount of task_runs than tasks"
assert len(answers) == len(assigned_tasks), err_msg
assigned_tasks_ids = sorted([at['id'] for at in assigned_tasks])
task_run_ids = sorted([a.task_id for a in answers])
err_msg = "There should be an answer for each assigned task"
assert assigned_tasks_ids == task_run_ids, err_msg
@with_context
def test_anonymous_03_respects_limit_tasks(self):
""" Test SCHED newtask respects the limit of 10 TaskRuns per Task"""
assigned_tasks = []
project = ProjectFactory.create(owner=UserFactory.create(id=500),
info=dict(sched='depth_first_all'))
user = UserFactory.create()
task = TaskFactory.create(project=project, n_answers=10)
tasks = get_depth_first_all_task(project.id, user.id)
assert len(tasks) == 1, len(tasks)
assert tasks[0].id == task.id, tasks
assert tasks[0].state == 'ongoing', tasks
for i in range(10):
tr = TaskRun(project_id=project.id,
task_id=task.id,
user_ip='127.0.0.%s' % i)
db.session.add(tr)
db.session.commit()
tasks = get_depth_first_all_task(project.id, user.id)
assert len(tasks) == 1, len(tasks)
assert tasks[0].id == task.id, tasks
assert tasks[0].state == 'completed', tasks
for i in range(10):
tasks = get_depth_first_all_task(project.id,
user_id=None,
user_ip='127.0.0.%s' % i)
assert len(tasks) == 0, tasks
tr = TaskRun(project_id=project.id,
task_id=task.id,
user_id=user.id)
db.session.add(tr)
db.session.commit()
tasks = get_depth_first_all_task(project.id, user.id)
assert len(tasks) == 0, tasks
@with_context
def test_anonymous_03_respects_limit_tasks_limits(self):
""" Test SCHED newtask limit respects the limit of 30 TaskRuns per Task using limits"""
assigned_tasks = []
user = UserFactory.create()
project = ProjectFactory.create(info=dict(sched='depth_first_all'))
orig_tasks = TaskFactory.create_batch(2, project=project, n_answers=5)
tasks = get_depth_first_all_task(project.id, user.id, limit=2)
assert len(tasks) == 2, len(tasks)
assert tasks[0].id == orig_tasks[0].id
assert tasks[1].id == orig_tasks[1].id
for i in range(5):
tr = TaskRun(project_id=project.id,
task_id=tasks[0].id,
user_ip='127.0.0.%s' % i)
db.session.add(tr)
db.session.commit()
# Task should be marked as completed, but as user has no
# participated it should get the completed one as well.
tasks = get_depth_first_all_task(project.id, user.id, limit=2,
orderby='id', desc=False)
assert len(tasks) == 2, len(tasks)
assert tasks[0].id == orig_tasks[0].id, tasks[0]
assert tasks[0].state == 'completed', tasks[0].state
assert len(tasks[0].task_runs) == 5
assert tasks[1].id == orig_tasks[1].id
assert tasks[1].state == 'ongoing', tasks[1].state
assert len(tasks[1].task_runs) == 0
# User contributes, so only one task should be returned
tr = TaskRun(project_id=project.id,
task_id=tasks[0].id,
user_id=user.id)
db.session.add(tr)
db.session.commit()
tasks = get_depth_first_all_task(project.id, user.id, limit=2,
orderby='id', desc=False)
assert len(tasks) == 1, len(tasks)
assert tasks[0].id == orig_tasks[1].id, tasks[0]
assert tasks[0].state == 'ongoing', tasks[0].state
assert len(tasks[0].task_runs) == 0
@with_context
def test_external_uid_03_respects_limit_tasks(self):
""" Test SCHED newtask external uid respects the limit of 30 TaskRuns per Task for
external user id"""
assigned_tasks = []
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
user = UserFactory.create()
task = TaskFactory.create(project=project, n_answers=10)
uid = '1xa'
tasks = get_depth_first_all_task(project.id, external_uid=uid)
assert len(tasks) == 1, len(tasks)
assert tasks[0].id == task.id, tasks
assert tasks[0].state == 'ongoing', tasks
# Add taskruns
for i in range(10):
tr = TaskRun(project_id=project.id,
task_id=task.id,
user_ip='127.0.0.%s' % i)
db.session.add(tr)
db.session.commit()
tasks = get_depth_first_all_task(project.id, external_uid=uid)
assert len(tasks) == 1, len(tasks)
assert tasks[0].id == task.id, tasks
assert tasks[0].state == 'completed', tasks
assert len(tasks[0].task_runs) == 10, tasks
url = 'api/project/%s/newtask?external_uid=%s' % (project.id,
uid)
headers = self.get_headers_jwt(project)
res = self.app.get(url, headers=headers)
data = json.loads(res.data)
assert data['id'] == task.id
assert data['state'] == 'completed'
tr = TaskRun(project_id=project.id,
task_id=task.id,
external_uid=uid)
db.session.add(tr)
db.session.commit()
tasks = get_depth_first_all_task(project.id, external_uid=uid)
assert len(tasks) == 0, len(tasks)
res = self.app.get(url, headers=headers)
data = json.loads(res.data)
assert len(data) == 0, data
@with_context
def test_external_uid_03_respects_limit_tasks_limits(self):
""" Test SCHED newtask external uid limits respects the limit of 30 TaskRuns per list of Tasks for
external user id"""
# Get Task until scheduler returns None
project = ProjectFactory.create(info=dict(sched='depth_first_all'))
orig_tasks = TaskFactory.create_batch(2, project=project, n_answers=5)
headers = self.get_headers_jwt(project)
uid = '1xa'
url = 'api/project/%s/newtask?external_uid=%s&limit=2' % (project.id,
uid)
tasks = get_depth_first_all_task(project.id, external_uid=uid, limit=2)
assert len(tasks) == 2, len(tasks)
assert tasks[0].id == orig_tasks[0].id, tasks
assert tasks[0].state == 'ongoing', tasks
assert tasks[1].id == orig_tasks[1].id, tasks
assert tasks[1].state == 'ongoing', tasks
# Add taskruns
for i in range(5):
tr = TaskRun(project_id=project.id,
task_id=tasks[0].id,
user_ip='127.0.0.%s' % i)
db.session.add(tr)
db.session.commit()
tasks = get_depth_first_all_task(project.id, external_uid=uid, limit=2,
orderby='id', desc=False)
assert len(tasks) == 2, len(tasks)
assert tasks[0].id == orig_tasks[0].id, tasks
assert tasks[0].state == 'completed', tasks
assert len(tasks[0].task_runs) == 5, tasks
assert tasks[1].id == orig_tasks[1].id, tasks
assert tasks[1].state == 'ongoing', tasks
assert len(tasks[1].task_runs) == 0, tasks
url = 'api/project/%s/newtask?external_uid=%s&limit=2&orderby=id&desc=False' % (project.id,uid)
headers = self.get_headers_jwt(project)
res = self.app.get(url, headers=headers)
data = json.loads(res.data)
assert data[0]['id'] == orig_tasks[0].id
assert data[0]['state'] == 'completed'
assert data[1]['id'] == orig_tasks[1].id
assert data[1]['state'] == 'ongoing'
tr = TaskRun(project_id=project.id,
task_id=tasks[0].id,
external_uid=uid)
db.session.add(tr)
db.session.commit()
tasks = get_depth_first_all_task(project.id, external_uid=uid,
limit=2, orderby='id', desc=False)
assert len(tasks) == 1, len(tasks)
assert tasks[0].id == orig_tasks[1].id
assert tasks[0].state == 'ongoing'
res = self.app.get(url, headers=headers)
data = json.loads(res.data)
assert data['id'] == orig_tasks[1].id
assert data['state'] == 'ongoing'
@with_context
def test_newtask_default_orderby(self):
"""Test SCHED depth first works with orderby."""
project = ProjectFactory.create(info=dict(sched="depth_first_all"))
task1 = TaskFactory.create(project=project, fav_user_ids=None)
task2 = TaskFactory.create(project=project, fav_user_ids=[1,2,3])
url = "/api/project/%s/newtask?orderby=%s&desc=%s" % (project.id, 'id', False)
res = self.app.get(url)
data = json.loads(res.data)
assert data['id'] == task1.id, data
url = "/api/project/%s/newtask?orderby=%s&desc=%s" % (project.id, 'id', True)
res = self.app.get(url)
data = json.loads(res.data)
assert data['id'] == task2.id, data
url = "/api/project/%s/newtask?orderby=%s&desc=%s" % (project.id, 'created', False)
res = self.app.get(url)
data = json.loads(res.data)
assert data['id'] == task1.id, data
url = "/api/project/%s/newtask?orderby=%s&desc=%s" % (project.id, 'created', True)
res = self.app.get(url)
data = json.loads(res.data)
assert data['id'] == task2.id, data
url = "/api/project/%s/newtask?orderby=%s&desc=%s" % (project.id, 'fav_user_ids', False)
res = self.app.get(url)
data = json.loads(res.data)
assert data['id'] == task1.id, data
url = "/api/project/%s/newtask?orderby=%s&desc=%s" % (project.id, 'fav_user_ids', True)
res = self.app.get(url)
data = json.loads(res.data)
assert data['id'] == task2.id, data
assert data['fav_user_ids'] == task2.fav_user_ids, data
@with_context
def test_user_01_newtask(self):
""" Test SCHED newtask returns a Task for John Doe User"""
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
TaskFactory.create_batch(2, project=project, n_answers=2)
# Register
self.register()
self.signin()
url = 'api/project/%s/newtask' % project.id
res = self.app.get(url)
data = json.loads(res.data)
task_id = data['id']
assert data['id'], data
taskrun = dict(project_id=data['project_id'], task_id=data['id'], info="hola")
res = self.app.post('api/taskrun', data=json.dumps(taskrun))
res = self.app.get(url)
data = json.loads(res.data)
assert data['id'], data
assert data['id'] != task_id, data
self.signout()
@with_context
def test_user_01_newtask_limits(self):
""" Test SCHED newtask returns a Task for John Doe User with limits"""
self.register()
self.signin()
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
tasks = TaskFactory.create_batch(10, project=project, info=dict(foo=1))
# Register
url = 'api/project/%s/newtask?limit=2' % project.id
res = self.app.get(url)
data = json.loads(res.data)
assert len(data) == 2, data
for t in data:
assert t['info']['foo'] == 1, t
self.signout()
@with_context
def test_user_02_gets_different_tasks(self):
""" Test SCHED newtask returns N different Tasks for John Doe User"""
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
TaskFactory.create_batch(10, project=project)
# Register
self.register()
self.signin()
assigned_tasks = []
# Get Task until scheduler returns None
url = 'api/project/%s/newtask' % project.id
res = self.app.get(url)
data = json.loads(res.data)
while data.get('id') is not None:
# Check that we received a Task
assert data.get('id'), data
# Save the assigned task
assigned_tasks.append(data)
# Submit an Answer for the assigned task
tr = dict(project_id=data['project_id'], task_id=data['id'],
info={'answer': 'No'})
tr = json.dumps(tr)
self.app.post('/api/taskrun', data=tr)
res = self.app.get(url)
data = json.loads(res.data)
# Check if we received the same number of tasks that the available ones
tasks = db.session.query(Task).filter_by(project_id=1).all()
assert len(assigned_tasks) == len(tasks), assigned_tasks
# Check if all the assigned Task.id are equal to the available ones
tasks = db.session.query(Task).filter_by(project_id=1).all()
err_msg = "Assigned Task not found in DB Tasks"
for at in assigned_tasks:
assert self.is_task(at['id'], tasks), err_msg
# Check that there are no duplicated tasks
err_msg = "One Assigned Task is duplicated"
for at in assigned_tasks:
assert self.is_unique(at['id'], assigned_tasks), err_msg
@with_context
def test_user_02_gets_different_tasks_limit(self):
""" Test SCHED newtask returns N different list of Tasks for John Doe User"""
# Register
self.register()
self.signin()
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
TaskFactory.create_batch(10, project=project)
assigned_tasks = []
# Get Task until scheduler returns None
url = 'api/project/%s/newtask?limit=5' % project.id
res = self.app.get(url)
data = json.loads(res.data)
while len(data) > 0:
# Check that we received a Task
for t in data:
assert t.get('id'), t
# Save the assigned task
assigned_tasks.append(t)
# Submit an Answer for the assigned task
tr = dict(project_id=t['project_id'], task_id=t['id'],
info={'answer': 'No'})
tr = json.dumps(tr)
self.app.post('/api/taskrun', data=tr)
res = self.app.get(url)
data = json.loads(res.data)
# Check if we received the same number of tasks that the available ones
tasks = db.session.query(Task).filter_by(project_id=1).all()
assert len(assigned_tasks) == len(tasks), assigned_tasks
# Check if all the assigned Task.id are equal to the available ones
tasks = db.session.query(Task).filter_by(project_id=1).all()
err_msg = "Assigned Task not found in DB Tasks"
for at in assigned_tasks:
assert self.is_task(at['id'], tasks), err_msg
# Check that there are no duplicated tasks
err_msg = "One Assigned Task is duplicated"
for at in assigned_tasks:
assert self.is_unique(at['id'], assigned_tasks), err_msg
@with_context
def test_user_03_respects_limit_tasks(self):
""" Test SCHED newtask respects the limit of 30 TaskRuns per Task"""
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
orig_tasks = TaskFactory.create_batch(1, project=project, n_answers=10)
user = UserFactory.create()
tasks = get_depth_first_all_task(project.id, user.id)
assert len(tasks) == 1, len(tasks)
assert tasks[0].id == orig_tasks[0].id, tasks
assert tasks[0].state == 'ongoing', tasks
for i in range(10):
tr = TaskRun(project_id=project.id,
task_id=orig_tasks[0].id,
user_ip='127.0.0.%s' % i)
db.session.add(tr)
db.session.commit()
tasks = get_depth_first_all_task(project.id, user.id)
assert len(tasks) == 1, len(tasks)
assert tasks[0].id == orig_tasks[0].id, tasks
assert tasks[0].state == 'completed', tasks
assert len(tasks[0].task_runs) == 10, tasks
tr = TaskRun(project_id=project.id,
task_id=orig_tasks[0].id,
user_id=user.id)
db.session.add(tr)
db.session.commit()
tasks = get_depth_first_all_task(project.id, user.id)
assert len(tasks) == 0, tasks
@with_context
def test_user_03_respects_limit_tasks_limit(self):
""" Test SCHED limit arg newtask respects the limit of 30 TaskRuns per list of Tasks"""
# Del previous TaskRuns
assigned_tasks = []
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
user = UserFactory.create()
orig_tasks = TaskFactory.create_batch(2, project=project, n_answers=10)
tasks = get_depth_first_all_task(project.id, user.id,
limit=2, orderby='id',
desc=False)
assert len(tasks) == 2, len(tasks)
assert tasks[0].id == orig_tasks[0].id, tasks
assert tasks[0].state == 'ongoing', tasks
assert tasks[1].id == orig_tasks[1].id, tasks
assert tasks[1].state == 'ongoing', tasks
for i in range(10):
tr = TaskRun(project_id=project.id,
task_id=tasks[0].id,
user_ip='127.0.0.%s' % i)
db.session.add(tr)
db.session.commit()
tasks = get_depth_first_all_task(project.id, user.id,
limit=2, orderby='id',
desc=False)
assert len(tasks) == 2, len(tasks)
assert tasks[0].id == orig_tasks[0].id, tasks
assert tasks[0].state == 'completed', tasks
assert len(tasks[0].task_runs) == 10, tasks
assert tasks[1].id == orig_tasks[1].id, tasks
assert tasks[1].state == 'ongoing', tasks
assert len(tasks[1].task_runs) == 0, tasks
tr = TaskRun(project_id=project.id,
task_id=tasks[0].id,
user_id=user.id)
db.session.add(tr)
db.session.commit()
tasks = get_depth_first_all_task(project.id, user.id,
limit=2, orderby='id',
desc=False)
assert len(tasks) == 1, tasks
assert tasks[0].id == orig_tasks[1].id
assert tasks[0].state == 'ongoing'
@with_context
def test_task_preloading(self):
"""Test TASK Pre-loading works"""
# Del previous TaskRuns
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
TaskFactory.create_batch(10, project=project)
# Register
self.register()
self.signin()
assigned_tasks = []
# Get Task until scheduler returns None
url = 'api/project/%s/newtask' % project.id
res = self.app.get(url)
task1 = json.loads(res.data)
# Check that we received a Task
assert task1.get('id'), task1
# Pre-load the next task for the user
res = self.app.get(url + '?offset=1')
task2 = json.loads(res.data)
# Check that we received a Task
assert task2.get('id'), task2
# Check that both tasks are different
assert task1.get('id') != task2.get('id'), "Tasks should be different"
## Save the assigned task
assigned_tasks.append(task1)
assigned_tasks.append(task2)
# Submit an Answer for the assigned and pre-loaded task
for t in assigned_tasks:
tr = dict(project_id=t['project_id'], task_id=t['id'], info={'answer': 'No'})
tr = json.dumps(tr)
self.app.post('/api/taskrun', data=tr)
# Get two tasks again
res = self.app.get(url)
task3 = json.loads(res.data)
# Check that we received a Task
assert task3.get('id'), task1
# Pre-load the next task for the user
res = self.app.get(url + '?offset=1')
task4 = json.loads(res.data)
# Check that we received a Task
assert task4.get('id'), task2
# Check that both tasks are different
assert task3.get('id') != task4.get('id'), "Tasks should be different"
assert task1.get('id') != task3.get('id'), "Tasks should be different"
assert task2.get('id') != task4.get('id'), "Tasks should be different"
# Check that a big offset returns None
res = self.app.get(url + '?offset=11')
assert json.loads(res.data) == {}, res.data
@with_context
def test_task_preloading_limit(self):
"""Test TASK Pre-loading with limit works"""
# Register
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
TaskFactory.create_batch(10, project=project)
self.register()
self.signin()
assigned_tasks = []
url = 'api/project/%s/newtask?limit=2' % project.id
res = self.app.get(url)
tasks1 = json.loads(res.data)
# Check that we received a Task
for t in tasks1:
assert t.get('id'), t
# Pre-load the next tasks for the user
res = self.app.get(url + '&offset=2')
tasks2 = json.loads(res.data)
# Check that we received a Task
for t in tasks2:
assert t.get('id'), t
# Check that both tasks are different
tasks1_ids = set([t['id'] for t in tasks1])
tasks2_ids = set([t['id'] for t in tasks2])
assert len(tasks1_ids.union(tasks2_ids)) == 4, "Tasks should be different"
## Save the assigned task
for t in tasks1:
assigned_tasks.append(t)
for t in tasks2:
assigned_tasks.append(t)
# Submit an Answer for the assigned and pre-loaded task
for t in assigned_tasks:
tr = dict(project_id=t['project_id'], task_id=t['id'], info={'answer': 'No'})
tr = json.dumps(tr)
self.app.post('/api/taskrun', data=tr)
# Get two tasks again
res = self.app.get(url)
tasks3 = json.loads(res.data)
# Check that we received a Task
for t in tasks3:
assert t.get('id'), t
# Pre-load the next task for the user
res = self.app.get(url + '&offset=2')
tasks4 = json.loads(res.data)
# Check that we received a Task
for t in tasks4:
assert t.get('id'), t
# Check that both tasks are different
tasks3_ids = set([t['id'] for t in tasks3])
tasks4_ids = set([t['id'] for t in tasks4])
assert len(tasks3_ids.union(tasks4_ids)) == 4, "Tasks should be different"
# Check that a big offset returns None
res = self.app.get(url + '&offset=11')
assert json.loads(res.data) == {}, res.data
@with_context
def test_task_preloading_external_uid(self):
"""Test TASK Pre-loading for external user IDs works"""
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
TaskFactory.create_batch(10, project=project)
assigned_tasks = []
# Get Task until scheduler returns None
project = project_repo.get(1)
headers = self.get_headers_jwt(project)
url = 'api/project/%s/newtask?external_uid=2xb' % project.id
res = self.app.get(url, headers=headers)
task1 = json.loads(res.data)
# Check that we received a Task
assert task1.get('id'), task1
# Pre-load the next task for the user
res = self.app.get(url + '&offset=1', headers=headers)
task2 = json.loads(res.data)
# Check that we received a Task
assert task2.get('id'), task2
# Check that both tasks are different
assert task1.get('id') != task2.get('id'), "Tasks should be different"
## Save the assigned task
assigned_tasks.append(task1)
assigned_tasks.append(task2)
# Submit an Answer for the assigned and pre-loaded task
for t in assigned_tasks:
tr = dict(project_id=t['project_id'],
task_id=t['id'], info={'answer': 'No'},
external_uid='2xb')
tr = json.dumps(tr)
res = self.app.post('/api/taskrun?external_uid=2xb',
data=tr, headers=headers)
# Get two tasks again
res = self.app.get(url, headers=headers)
task3 = json.loads(res.data)
# Check that we received a Task
assert task3.get('id'), task1
# Pre-load the next task for the user
res = self.app.get(url + '&offset=1', headers=headers)
task4 = json.loads(res.data)
# Check that we received a Task
assert task4.get('id'), task2
# Check that both tasks are different
assert task3.get('id') != task4.get('id'), "Tasks should be different"
assert task1.get('id') != task3.get('id'), "Tasks should be different"
assert task2.get('id') != task4.get('id'), "Tasks should be different"
# Check that a big offset returns None
res = self.app.get(url + '&offset=11', headers=headers)
assert json.loads(res.data) == {}, res.data
@with_context
def test_task_preloading_external_uid_limit(self):
"""Test TASK Pre-loading for external user IDs works with limit"""
# Del previous TaskRuns
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
TaskFactory.create_batch(10, project=project)
assigned_tasks = []
# Get Task until scheduler returns None
headers = self.get_headers_jwt(project)
url = 'api/project/%s/newtask?external_uid=2xb&limit=2' % project.id
res = self.app.get(url, headers=headers)
tasks1 = json.loads(res.data)
# Check that we received a Task
for t in tasks1:
assert t.get('id'), task1
# Pre-load the next task for the user
res = self.app.get(url + '&offset=2', headers=headers)
tasks2 = json.loads(res.data)
# Check that we received a Task
for t in tasks2:
assert t.get('id'), t
# Check that both tasks are different
tasks1_ids = set([task['id'] for task in tasks1])
tasks2_ids = set([task['id'] for task in tasks2])
assert len(tasks1_ids.union(tasks2_ids)) == 4, "Tasks should be different"
## Save the assigned task
for t in tasks1:
assigned_tasks.append(t)
for t in tasks2:
assigned_tasks.append(t)
# Submit an Answer for the assigned and pre-loaded task
for t in assigned_tasks:
tr = dict(project_id=t['project_id'],
task_id=t['id'], info={'answer': 'No'},
external_uid='2xb')
tr = json.dumps(tr)
res = self.app.post('/api/taskrun?external_uid=2xb',
data=tr, headers=headers)
# Get two tasks again
res = self.app.get(url, headers=headers)
tasks3 = json.loads(res.data)
# Check that we received a Task
for t in tasks3:
assert t.get('id'), t
# Pre-load the next task for the user
res = self.app.get(url + '&offset=2', headers=headers)
tasks4 = json.loads(res.data)
# Check that we received a Task
for t in tasks4:
assert t.get('id'), t
# Check that both tasks are different
tasks3_ids = set([task['id'] for task in tasks3])
tasks4_ids = set([task['id'] for task in tasks4])
assert len(tasks3_ids.union(tasks4_ids)) == 4, "Tasks should be different"
# Check that a big offset returns None
res = self.app.get(url + '&offset=11', headers=headers)
assert json.loads(res.data) == {}, res.data
@with_context
def test_task_priority(self):
"""Test SCHED respects priority_0 field"""
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
TaskFactory.create_batch(10, project=project)
# Register
self.register()
self.signin()
# By default, tasks without priority should be ordered by task.id (FIFO)
tasks = db.session.query(Task).filter_by(project_id=1).order_by('id').all()
url = 'api/project/%s/newtask' % project.id
res = self.app.get(url)
task1 = json.loads(res.data)
# Check that we received a Task
err_msg = "Task.id should be the same"
assert task1.get('id') == tasks[0].id, err_msg
# Now let's change the priority to a random task
import random
t = random.choice(tasks)
# Increase priority to maximum
t.priority_0 = 1
db.session.add(t)
db.session.commit()
# Request again a new task
res = self.app.get(url + '?orderby=priority_0&desc=true')
task1 = json.loads(res.data)
# Check that we received a Task
err_msg = "Task.id should be the same"
assert task1.get('id') == t.id, err_msg
err_msg = "Task.priority_0 should be the 1"
assert task1.get('priority_0') == 1, err_msg
@with_context
def test_task_priority_limit(self):
"""Test SCHED respects priority_0 field with limit"""
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
TaskFactory.create_batch(10, project=project)
# Register
self.register()
self.signin()
# By default, tasks without priority should be ordered by task.id (FIFO)
tasks = db.session.query(Task).filter_by(project_id=project.id).order_by('id').all()
url = 'api/project/%s/newtask?limit=2' % project.id
res = self.app.get(url)
tasks1 = json.loads(res.data)
# Check that we received a Task
err_msg = "Task.id should be the same"
assert tasks1[0].get('id') == tasks[0].id, err_msg
# Now let's change the priority to a random task
import random
t = random.choice(tasks)
# Increase priority to maximum
t.priority_0 = 1
db.session.add(t)
db.session.commit()
# Request again a new task
res = self.app.get(url + '&orderby=priority_0&desc=true')
tasks1 = json.loads(res.data)
# Check that we received a Task
err_msg = "Task.id should be the same"
assert tasks1[0].get('id') == t.id, (err_msg, tasks1[0])
err_msg = "Task.priority_0 should be the 1"
assert tasks1[0].get('priority_0') == 1, err_msg
@with_context
def test_task_priority_external_uid(self):
"""Test SCHED respects priority_0 field for externa uid"""
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
TaskFactory.create_batch(10, project=project)
# By default, tasks without priority should be ordered by task.id (FIFO)
tasks = db.session.query(Task).filter_by(project_id=1).order_by('id').all()
project = project_repo.get(1)
headers = self.get_headers_jwt(project)
url = 'api/project/%s/newtask?external_uid=342' % project.id
res = self.app.get(url, headers=headers)
task1 = json.loads(res.data)
# Check that we received a Task
err_msg = "Task.id should be the same"
assert task1.get('id') == tasks[0].id, err_msg
# Now let's change the priority to a random task
import random
t = random.choice(tasks)
# Increase priority to maximum
t.priority_0 = 1
db.session.add(t)
db.session.commit()
# Request again a new task
res = self.app.get(url + '&orderby=priority_0&desc=true', headers=headers)
task1 = json.loads(res.data)
# Check that we received a Task
err_msg = "Task.id should be the same"
assert task1.get('id') == t.id, (err_msg, task1, t)
err_msg = "Task.priority_0 should be the 1"
assert task1.get('priority_0') == 1, err_msg
@with_context
def test_task_priority_external_uid_limit(self):
"""Test SCHED respects priority_0 field for externa uid with limit"""
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=UserFactory.create(id=500))
TaskFactory.create_batch(10, project=project)
# By default, tasks without priority should be ordered by task.id (FIFO)
tasks = db.session.query(Task).filter_by(project_id=project.id).order_by('id').all()
headers = self.get_headers_jwt(project)
url = 'api/project/%s/newtask?external_uid=342&limit=2' % project.id
res = self.app.get(url, headers=headers)
tasks1 = json.loads(res.data)
# Check that we received a Task
err_msg = "Task.id should be the same"
assert tasks1[0].get('id') == tasks[0].id, err_msg
# Now let's change the priority to a random task
import random
t = random.choice(tasks)
# Increase priority to maximum
t.priority_0 = 1
db.session.add(t)
db.session.commit()
# Request again a new task
res = self.app.get(url + '&orderby=priority_0&desc=true', headers=headers)
tasks1 = json.loads(res.data)
# Check that we received a Task
err_msg = "Task.id should be the same"
assert tasks1[0].get('id') == t.id, err_msg
err_msg = "Task.priority_0 should be the 1"
assert tasks1[0].get('priority_0') == 1, err_msg
def _add_task_run(self, app, task, user=None):
tr = AnonymousTaskRunFactory.create(project=app, task=task)
@with_context
def test_no_more_tasks(self):
"""Test that a users gets always tasks"""
owner = UserFactory.create()
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=owner,
short_name='egil',
name='egil',
description='egil')
project_id = project.id
all_tasks = TaskFactory.create_batch(20, project=project, n_answers=10)
for t in all_tasks[0:10]:
TaskRunFactory.create_batch(10, task=t, project=project)
tasks = db.session.query(Task).filter_by(project_id=project.id, state='ongoing').all()
assert tasks[0].n_answers == 10
url = 'api/project/%s/newtask' % project.id
res = self.app.get(url)
data = json.loads(res.data)
err_msg = "User should get a task"
assert 'project_id' in data.keys(), err_msg
assert data['project_id'] == project_id, err_msg
assert data['id'] == all_tasks[0].id, err_msg
assert data['state'] == 'completed', err_msg
@with_context
def test_no_more_tasks_limit(self):
"""Test that a users gets always tasks with limit"""
owner = UserFactory.create()
project = ProjectFactory.create(info=dict(sched='depth_first_all'),
owner=owner,
short_name='egil',
name='egil',
description='egil')
project_id = project.id
all_tasks = TaskFactory.create_batch(20, project=project, n_answers=10)
for t in all_tasks[0:10]:
TaskRunFactory.create_batch(10, task=t, project=project)
tasks = db.session.query(Task).filter_by(project_id=project.id, state='ongoing').all()
assert tasks[0].n_answers == 10
url = 'api/project/%s/newtask?limit=2&orderby=id' % project_id
res = self.app.get(url)
data = json.loads(res.data)
err_msg = "User should get a task"
i = 0
for t in data:
print t['id']
assert 'project_id' in t.keys(), err_msg
assert t['project_id'] == project_id, err_msg
assert t['id'] == all_tasks[i].id, (err_msg, t, all_tasks[i].id)
assert t['state'] == 'completed', err_msg
i += 1
|
agpl-3.0
| 3,603,119,875,285,052,000
| 39.744735
| 106
| 0.57549
| false
| 3.753512
| true
| false
| false
|
alxnov/ansible-modules-core
|
cloud/amazon/ec2_vol.py
|
1
|
22132
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_vol
short_description: create and attach a volume, return volume id and device map
description:
- creates an EBS volume and optionally attaches it to an instance. If both an instance ID and a device name is given and the instance has a device at the device name, then no volume is created and no attachment is made. This module has a dependency on python-boto.
version_added: "1.1"
options:
instance:
description:
- instance ID if you wish to attach the volume. Since 1.9 you can set to None to detach.
required: false
default: null
name:
description:
- volume Name tag if you wish to attach an existing volume (requires instance)
required: false
default: null
version_added: "1.6"
id:
description:
- volume id if you wish to attach an existing volume (requires instance) or remove an existing volume
required: false
default: null
version_added: "1.6"
volume_size:
description:
- size of volume (in GB) to create.
required: false
default: null
volume_type:
description:
- Type of EBS volume; standard (magnetic), gp2 (SSD), io1 (Provisioned IOPS). "Standard" is the old EBS default
and continues to remain the Ansible default for backwards compatibility.
required: false
default: standard
version_added: "1.9"
iops:
description:
- the provisioned IOPs you want to associate with this volume (integer).
required: false
default: 100
version_added: "1.3"
encrypted:
description:
- Enable encryption at rest for this volume.
default: false
version_added: "1.8"
device_name:
description:
- device id to override device mapping. Assumes /dev/sdf for Linux/UNIX and /dev/xvdf for Windows. Can figure out a free device_name if device_name is a string with {}, {X}, {N} templates. Template syntax: {},{X} is a character in the [f-p] range, {N} is the character in the [1-6] range, according to EBS attachment notation docs here: http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html .
required: false
default: null
delete_on_termination:
description:
- When set to "yes", the volume will be deleted upon instance termination.
required: false
default: "no"
choices: ["yes", "no"]
version_added: "2.1"
zone:
description:
- zone in which to create the volume, if unset uses the zone the instance is in (if set)
required: false
default: null
aliases: ['aws_zone', 'ec2_zone']
snapshot:
description:
- snapshot ID on which to base the volume
required: false
default: null
version_added: "1.5"
validate_certs:
description:
- When set to "no", SSL certificates will not be validated for boto versions >= 2.6.0.
required: false
default: "yes"
choices: ["yes", "no"]
version_added: "1.5"
state:
description:
- whether to ensure the volume is present or absent, or to list existing volumes (The C(list) option was added in version 1.8).
required: false
default: present
choices: ['absent', 'present', 'list']
version_added: "1.6"
author: "Lester Wade (@lwade)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Simple attachment action
- ec2_vol:
instance: XXXXXX
volume_size: 5
device_name: sdd
# Example using custom iops params
- ec2_vol:
instance: XXXXXX
volume_size: 5
iops: 100
device_name: sdd
# Example using snapshot id
- ec2_vol:
instance: XXXXXX
snapshot: "{{ snapshot }}"
# Playbook example combined with instance launch
- ec2:
keypair: "{{ keypair }}"
image: "{{ image }}"
wait: yes
count: 3
register: ec2
- ec2_vol:
instance: "{{ item.id }} "
volume_size: 5
with_items: ec2.instances
register: ec2_vol
# Example: Launch an instance and then add a volume if not already attached
# * Volume will be created with the given name if not already created.
# * Nothing will happen if the volume is already attached.
# * Requires Ansible 2.0
- ec2:
keypair: "{{ keypair }}"
image: "{{ image }}"
zone: YYYYYY
id: my_instance
wait: yes
count: 1
register: ec2
- ec2_vol:
instance: "{{ item.id }}"
name: my_existing_volume_Name_tag
device_name: /dev/xvdf
with_items: ec2.instances
register: ec2_vol
# Example: Launch an instance and then add a volume if not already attached
# * Volume will be created with the given name if not already created.
# * Volume will pick the first free /dev/xvd* slot according to template.
# * Nothing will happen if the volume is already attached.
# * Requires Ansible 2.0
- ec2_vol:
instance: "{{ item.id }}"
name: my_existing_volume_Name_tag
device_name: /dev/xvd{}
with_items: ec2.instances
register: ec2_vol
# Remove a volume
- ec2_vol:
id: vol-XXXXXXXX
state: absen
# Detach a volume (since 1.9)
- ec2_vol:
id: vol-XXXXXXXX
instance: None
# List volumes for an instance
- ec2_vol:
instance: i-XXXXXX
state: list
# Create new volume using SSD storage
- ec2_vol:
instance: XXXXXX
volume_size: 50
volume_type: gp2
device_name: /dev/xvdf
# Attach an existing volume to instance. The volume will be deleted upon instance termination.
- ec2_vol:
instance: XXXXXX
id: XXXXXX
device_name: /dev/sdf
delete_on_termination: yes
'''
RETURN = '''
device:
description: device name of attached volume
returned: when success
type: string
sample: "/def/sdf"
volume_id:
description: the id of volume
returned: when success
type: string
sample: "vol-35b333d9"
volume_type:
description: the volume type
returned: when success
type: string
sample: "standard"
volume:
description: a dictionary containing detailed attributes of the volume
returned: when success
type: string
sample: {
"attachment_set": {
"attach_time": "2015-10-23T00:22:29.000Z",
"deleteOnTermination": "false",
"device": "/dev/sdf",
"instance_id": "i-8356263c",
"status": "attached"
},
"create_time": "2015-10-21T14:36:08.870Z",
"encrypted": false,
"id": "vol-35b333d9",
"iops": null,
"size": 1,
"snapshot_id": "",
"status": "in-use",
"tags": {
"env": "dev"
},
"type": "standard",
"zone": "us-east-1b"
}
'''
import time
from distutils.version import LooseVersion
try:
import boto.ec2
from boto.exception import BotoServerError
from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def get_volume(module, ec2):
name = module.params.get('name')
id = module.params.get('id')
zone = module.params.get('zone')
filters = {}
volume_ids = None
# If no name or id supplied, just try volume creation based on module parameters
if id is None and name is None:
return None
if zone:
filters['availability_zone'] = zone
if name:
filters = {'tag:Name': name}
if id:
volume_ids = [id]
try:
vols = ec2.get_all_volumes(volume_ids=volume_ids, filters=filters)
except boto.exception.BotoServerError, e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
if not vols:
if id:
msg = "Could not find the volume with id: %s" % id
if name:
msg += (" and name: %s" % name)
module.fail_json(msg=msg)
else:
return None
if len(vols) > 1:
module.fail_json(msg="Found more than one volume in zone (if specified) with name: %s" % name)
return vols[0]
def get_volumes(module, ec2):
instance = module.params.get('instance')
try:
if not instance:
vols = ec2.get_all_volumes()
else:
vols = ec2.get_all_volumes(filters={'attachment.instance-id': instance})
except boto.exception.BotoServerError, e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
return vols
def delete_volume(module, ec2):
volume_id = module.params['id']
try:
ec2.delete_volume(volume_id)
module.exit_json(changed=True)
except boto.exception.EC2ResponseError as ec2_error:
if ec2_error.code == 'InvalidVolume.NotFound':
module.exit_json(changed=False)
module.fail_json(msg=ec2_error.message)
def boto_supports_volume_encryption():
"""
Check if Boto library supports encryption of EBS volumes (added in 2.29.0)
Returns:
True if boto library has the named param as an argument on the request_spot_instances method, else False
"""
return hasattr(boto, 'Version') and LooseVersion(boto.Version) >= LooseVersion('2.29.0')
def create_volume(module, ec2, zone):
changed = False
name = module.params.get('name')
iops = module.params.get('iops')
encrypted = module.params.get('encrypted')
volume_size = module.params.get('volume_size')
volume_type = module.params.get('volume_type')
snapshot = module.params.get('snapshot')
# If custom iops is defined we use volume_type "io1" rather than the default of "standard"
if iops:
volume_type = 'io1'
volume = get_volume(module, ec2)
if volume is None:
try:
if boto_supports_volume_encryption():
volume = ec2.create_volume(volume_size, zone, snapshot, volume_type, iops, encrypted)
changed = True
else:
volume = ec2.create_volume(volume_size, zone, snapshot, volume_type, iops)
changed = True
while volume.status != 'available':
time.sleep(3)
volume.update()
if name:
ec2.create_tags([volume.id], {"Name": name})
except boto.exception.BotoServerError, e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
return volume, changed
# See: http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html
# http://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/device_naming.html
#
DEVICE_LETTER_RANGE = tuple('fghijklmnop')
DEVICE_NUMBER_RANGE = tuple('123456')
ALL_DEVICE_NAME_TEMPLATES = ('{}', '{X}', '{N}')
def get_default_device_name_template(instance, ec2):
"""
Try to infer whether to use Windows or linux device name pattern.
Use instance.platform, password_data as indicators that instance
is a Windows machine.
"""
instance.update()
if (instance.platform or '').lower() == 'windows':
device_name = '/dev/xvd{}'
elif ec2.get_password_data(instance.id):
device_name = '/dev/xvd{}'
else:
device_name = '/dev/sd{}'
return device_name
def is_device_name_templated(device_name):
return any(t in device_name for t in ALL_DEVICE_NAME_TEMPLATES)
def get_next_device_name_from_template(device_name, module, ec2):
"""
Look at already attached volumes and device_name template,
and return the next free device name in alphabetical order
"""
volumes = get_volumes(module, ec2)
# python 2.6 str.format does not like unnamed items in templates
device_name = device_name.replace('{}', '{X}')
dev_choice_set = set(
device_name.format(X=c, N=n)
for c in DEVICE_LETTER_RANGE
for n in DEVICE_NUMBER_RANGE
)
dev_busy_set = set(v.attach_data.device for v in volumes)
dev_choices_left = sorted(dev_choice_set.difference(dev_busy_set))
if 0 == len(dev_choices_left):
module.fail_json(msg="Cant attach %s to %s: all /dev/ EBS devices busy"
% (volume.id, instance),
changed=True)
device_name = dev_choices_left[0]
return device_name
def attach_volume(module, ec2, volume, instance):
device_name = module.params.get('device_name')
delete_on_termination = module.params.get('delete_on_termination')
changed = False
# If device_name isn't set, make a choice based on best practices here:
# http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/block-device-mapping-concepts.html
# In future this needs to be more dynamic but combining block device mapping best practices
# (bounds for devices, as above) with instance.block_device_mapping data would be tricky. For me ;)
# Use password data attribute to tell whether the instance is Windows or Linux
if device_name is None:
try:
device_name = get_default_device_name_template(instance, ec2)
except boto.exception.BotoServerError, e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
if volume.attachment_state() is not None:
adata = volume.attach_data
if adata.instance_id != instance.id:
module.fail_json(msg = "Volume %s is already attached to another instance: %s"
% (volume.id, adata.instance_id))
else:
# If device_name is a template to grab an available spot,
# bring it into consistency with actual attachment data
device_name = adata.device
# Volume is already attached to right instance
changed = modify_dot_attribute(module, ec2, instance, device_name)
else:
if is_device_name_templated(device_name):
t = device_name
device_name = get_next_device_name_from_template(t, module, ec2)
try:
volume.attach(instance.id, device_name)
while volume.attachment_state() != 'attached':
time.sleep(3)
volume.update()
changed = True
except boto.exception.BotoServerError, e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
modify_dot_attribute(module, ec2, instance, device_name)
return volume, changed
def modify_dot_attribute(module, ec2, instance, device_name):
""" Modify delete_on_termination attribute """
delete_on_termination = module.params.get('delete_on_termination')
changed = False
try:
instance.update()
dot = instance.block_device_mapping[device_name].delete_on_termination
except boto.exception.BotoServerError, e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
if delete_on_termination != dot:
try:
bdt = BlockDeviceType(delete_on_termination=delete_on_termination)
bdm = BlockDeviceMapping()
bdm[device_name] = bdt
ec2.modify_instance_attribute(instance_id=instance.id, attribute='blockDeviceMapping', value=bdm)
while instance.block_device_mapping[device_name].delete_on_termination != delete_on_termination:
time.sleep(3)
instance.update()
changed = True
except boto.exception.BotoServerError, e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
return changed
def detach_volume(module, ec2, volume):
changed = False
if volume.attachment_state() is not None:
adata = volume.attach_data
volume.detach()
while volume.attachment_state() is not None:
time.sleep(3)
volume.update()
changed = True
return volume, changed
def get_volume_info(volume, state):
# If we're just listing volumes then do nothing, else get the latest update for the volume
if state != 'list':
volume.update()
volume_info = {}
attachment = volume.attach_data
volume_info = {
'create_time': volume.create_time,
'encrypted': volume.encrypted,
'id': volume.id,
'iops': volume.iops,
'size': volume.size,
'snapshot_id': volume.snapshot_id,
'status': volume.status,
'type': volume.type,
'zone': volume.zone,
'attachment_set': {
'attach_time': attachment.attach_time,
'device': attachment.device,
'instance_id': attachment.instance_id,
'status': attachment.status
},
'tags': volume.tags
}
if hasattr(attachment, 'deleteOnTermination'):
volume_info['attachment_set']['deleteOnTermination'] = attachment.deleteOnTermination
return volume_info
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
instance = dict(),
id = dict(),
name = dict(),
volume_size = dict(),
volume_type = dict(choices=['standard', 'gp2', 'io1'], default='standard'),
iops = dict(),
encrypted = dict(type='bool', default=False),
device_name = dict(),
delete_on_termination = dict(type='bool', default=False),
zone = dict(aliases=['availability_zone', 'aws_zone', 'ec2_zone']),
snapshot = dict(),
state = dict(choices=['absent', 'present', 'list'], default='present')
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
id = module.params.get('id')
name = module.params.get('name')
instance = module.params.get('instance')
volume_size = module.params.get('volume_size')
encrypted = module.params.get('encrypted')
device_name = module.params.get('device_name')
zone = module.params.get('zone')
snapshot = module.params.get('snapshot')
state = module.params.get('state')
# Ensure we have the zone or can get the zone
if instance is None and zone is None and state == 'present':
module.fail_json(msg="You must specify either instance or zone")
# Set volume detach flag
if instance == 'None' or instance == '':
instance = None
detach_vol_flag = True
else:
detach_vol_flag = False
# Set changed flag
changed = False
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region:
try:
ec2 = connect_to_aws(boto.ec2, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError), e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="region must be specified")
if state == 'list':
returned_volumes = []
vols = get_volumes(module, ec2)
for v in vols:
attachment = v.attach_data
returned_volumes.append(get_volume_info(v, state))
module.exit_json(changed=False, volumes=returned_volumes)
if encrypted and not boto_supports_volume_encryption():
module.fail_json(msg="You must use boto >= v2.29.0 to use encrypted volumes")
# Here we need to get the zone info for the instance. This covers situation where
# instance is specified but zone isn't.
# Useful for playbooks chaining instance launch with volume create + attach and where the
# zone doesn't matter to the user.
inst = None
if instance:
try:
reservation = ec2.get_all_instances(instance_ids=instance)
except BotoServerError as e:
module.fail_json(msg=e.message)
inst = reservation[0].instances[0]
zone = inst.placement
# Check if there is a volume already mounted there.
if device_name:
if device_name in inst.block_device_mapping:
module.exit_json(msg="Volume mapping for %s already exists on instance %s" % (device_name, instance),
volume_id=inst.block_device_mapping[device_name].volume_id,
device=device_name,
changed=False)
# Delaying the checks until after the instance check allows us to get volume ids for existing volumes
# without needing to pass an unused volume_size
if not volume_size and not (id or name or snapshot):
module.fail_json(msg="You must specify volume_size or identify an existing volume by id, name, or snapshot")
# Cannot resize existing volumes, but can make a new volume of larger size
# from snapshot
if volume_size and id:
module.fail_json(msg="Cannot specify volume_size together with id")
if state == 'present':
volume, changed = create_volume(module, ec2, zone)
if detach_vol_flag:
volume, changed = detach_volume(module, ec2, volume)
elif inst is not None:
volume, changed = attach_volume(module, ec2, volume, inst)
# Add device, volume_id and volume_type parameters separately to maintain backward compatability
volume_info = get_volume_info(volume, state)
module.exit_json(changed=changed, volume=volume_info, device=volume_info['attachment_set']['device'], volume_id=volume_info['id'], volume_type=volume_info['type'])
elif state == 'absent':
delete_volume(module, ec2)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
gpl-3.0
| -8,963,196,909,828,346,000
| 32.482602
| 414
| 0.628682
| false
| 3.838363
| false
| false
| false
|
tjcsl/cslbot
|
cslbot/commands/metar.py
|
1
|
2400
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013-2018 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Tris Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from xml.etree import ElementTree
from requests import get
from ..helpers import arguments
from ..helpers.command import Command
@Command(['metar'], ['nick', 'config', 'db', 'name', 'source', 'handler'])
def cmd(send, msg, args):
"""Gets the weather.
Syntax: {command} <station> [station2...]
"""
parser = arguments.ArgParser(args['config'])
parser.add_argument('stations', nargs='*')
try:
cmdargs = parser.parse_args(msg)
except arguments.ArgumentException as e:
send(str(e))
return
if not cmdargs.stations:
send("What station?")
return
if isinstance(cmdargs.stations, list):
cmdargs.stations = ','.join(cmdargs.stations)
req = get('http://aviationweather.gov/adds/dataserver_current/httpparam',
params={
'datasource': 'metars',
'requestType': 'retrieve',
'format': 'xml',
'mostRecentForEachStation': 'constraint',
'hoursBeforeNow': '1.25',
'stationString': cmdargs.stations
})
xml = ElementTree.fromstring(req.text)
errors = xml.find('./errors')
if len(errors):
errstring = ','.join([error.text for error in errors])
send('Error: %s' % errstring)
return
data = xml.find('./data')
if data is None or data.attrib['num_results'] == '0':
send('No results found.')
else:
for station in data:
send(station.find('raw_text').text)
|
gpl-2.0
| 6,708,503,875,459,143,000
| 35.923077
| 135
| 0.64125
| false
| 3.896104
| false
| false
| false
|
googleapis/google-api-java-client-services
|
generator/src/googleapis/codegen/schema.py
|
1
|
18349
|
#!/usr/bin/python2.7
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API data models - schemas and their properties.
This module handles the objects created for the "schema" section of an API.
"""
__author__ = 'aiuto@google.com (Tony Aiuto)'
import collections
import logging
from googleapis.codegen import data_types
from googleapis.codegen import template_objects
from googleapis.codegen.api_exception import ApiException
_ADDITIONAL_PROPERTIES = 'additionalProperties'
_LOGGER = logging.getLogger('codegen')
class Schema(data_types.ComplexDataType):
"""The definition of a schema."""
def __init__(self, api, default_name, def_dict, parent=None):
"""Construct a Schema object from a discovery dictionary.
Schemas represent data models in the API.
Args:
api: (Api) the Api instance owning the Schema
default_name: (str) the default name of the Schema. If there is an 'id'
member in the definition, that is used for the name instead.
def_dict: (dict) a discovery dictionary
parent: (Schema) The containing schema. To be used to establish unique
names for anonymous sub-schemas.
"""
super(Schema, self).__init__(default_name, def_dict, api, parent=parent)
name = def_dict.get('id', default_name)
_LOGGER.debug('Schema(%s)', name)
# Protect against malicious discovery
template_objects.CodeObject.ValidateName(name)
self.SetTemplateValue('wireName', name)
class_name = api.ToClassName(name, self, element_type='schema')
self.SetTemplateValue('className', class_name)
self.SetTemplateValue('isSchema', True)
self.SetTemplateValue('properties', [])
self._module = (template_objects.Module.ModuleFromDictionary(self.values)
or api.model_module)
@classmethod
def Create(cls, api, default_name, def_dict, wire_name, parent=None):
"""Construct a Schema or DataType from a discovery dictionary.
Schemas contain either object declarations, simple type declarations, or
references to other Schemas. Object declarations conceptually map to real
classes. Simple types will map to a target language built-in type.
References should effectively be replaced by the referenced Schema.
Args:
api: (Api) the Api instance owning the Schema
default_name: (str) the default name of the Schema. If there is an 'id'
member in the definition, that is used for the name instead.
def_dict: (dict) a discovery dictionary
wire_name: The name which will identify objects of this type in data on
the wire. The path of wire_names can trace an item back through
discovery.
parent: (Schema) The containing schema. To be used to establish nesting
for anonymous sub-schemas.
Returns:
A Schema or DataType.
Raises:
ApiException: If the definition dict is not correct.
"""
schema_id = def_dict.get('id')
if schema_id:
name = schema_id
else:
name = default_name
class_name = api.ToClassName(name, None, element_type='schema')
_LOGGER.debug('Create: %s, parent=%s', name,
parent.values.get('wireName', '<anon>') if parent else 'None')
# Schema objects come in several patterns.
#
# 1. Simple objects
# { type: object, properties: { "foo": {schema} ... }}
#
# 2. Maps of objects
# { type: object, additionalProperties: { "foo": {inner_schema} ... }}
#
# What we want is a data type which is Map<string, {inner_schema}>
# The schema we create here is essentially a built in type which we
# don't want to generate a class for.
#
# 3. Arrays of objects
# { type: array, items: { inner_schema }}
#
# Same kind of issue as the map, but with List<{inner_schema}>
#
# 4. Primitive data types, described by type and format.
# { type: string, format: int32 }
# { type: string, enum: ["value", ...], enumDescriptions: ["desc", ...]}
#
# 5. Refs to another schema.
# { $ref: name }
#
# 6. Variant schemas
# { type: object, variant: { discriminant: "prop", map:
# [ { 'type_value': value, '$ref': wireName }, ... ] } }
#
# What we do is map the variant schema to a schema with a single
# property for the discriminant. To that property, we attach
# the variant map which specifies which discriminator values map
# to which schema references. We also collect variant information
# in the api so we can later associate discriminator value and
# base type with the generated variant subtypes.
if 'type' in def_dict:
# The 'type' field of the schema can either be 'array', 'object', or a
# base json type.
json_type = def_dict['type']
if json_type == 'object':
# Look for variants
variant = def_dict.get('variant')
if variant:
return cls._CreateVariantType(variant, api, name,
def_dict, wire_name, parent)
# Look for full object definition. You can have properties or
# additionalProperties, but it does not do anything useful to have
# both.
# Replace properties dict with Property's
props = def_dict.get('properties')
if props:
# This case 1 from above
return cls._CreateObjectWithProperties(props, api, name,
def_dict, wire_name, parent)
# Look for case 2
additional_props = def_dict.get(_ADDITIONAL_PROPERTIES)
if additional_props:
return cls._CreateMapType(additional_props, api, name, wire_name,
class_name, parent)
# no properties
return cls._CreateSchemaWithoutProperties(api, name, def_dict,
wire_name, parent)
elif json_type == 'array':
# Case 3: Look for array definition
return cls._CreateArrayType(api, def_dict, wire_name, class_name,
schema_id, parent)
else:
# Case 4: This must be a basic type. Create a DataType for it.
return data_types.CreatePrimitiveDataType(def_dict, api, wire_name,
parent=parent)
referenced_schema = def_dict.get('$ref')
if referenced_schema:
# Case 5: Reference to another Schema.
#
# There are 4 ways you can see '$ref' in discovery.
# 1. In a property of a schema or a method request/response, pointing
# back to a previously defined schema
# 2. As above, pointing to something not defined yet.
# 3. In a method request or response or property of a schema pointing to
# something undefined.
#
# For case 1, the schema will be in the API name to schema map.
#
# For case 2, just creating this placeholder here is fine. When the
# actual schema is hit in the loop in _BuildSchemaDefinitions, we will
# replace the entry and DataTypeFromJson will resolve the to the new def.
#
# For case 3, we will end up with a dangling reference and fail later.
schema = api.SchemaByName(referenced_schema)
# The stored "schema" may not be an instance of Schema, but rather a
# data_types.PrimitiveDataType, which has no 'wireName' value.
if schema:
_LOGGER.debug('Schema.Create: %s => %s',
default_name, schema.values.get('wireName', '<unknown>'))
return schema
return data_types.SchemaReference(referenced_schema, api)
raise ApiException('Cannot decode JSON Schema for: %s' % def_dict)
@classmethod
def _CreateObjectWithProperties(cls, props, api, name, def_dict,
wire_name, parent):
properties = []
schema = cls(api, name, def_dict, parent=parent)
if wire_name:
schema.SetTemplateValue('wireName', wire_name)
for prop_name in sorted(props):
prop_dict = props[prop_name]
_LOGGER.debug(' adding prop: %s to %s', prop_name, name)
properties.append(Property(api, schema, prop_name, prop_dict))
# Some APIs express etag directly in the response, others don't.
# Knowing that we have it explicitly makes special case code generation
# easier
if prop_name == 'etag':
schema.SetTemplateValue('hasEtagProperty', True)
schema.SetTemplateValue('properties', properties)
# check for @ clashing. E.g. No 'foo' and '@foo' in the same object.
names = set()
for p in properties:
wire_name = p.GetTemplateValue('wireName')
no_at_sign = wire_name.replace('@', '')
if no_at_sign in names:
raise ApiException(
'Property name clash in schema %s:'
' %s conflicts with another property' % (name, wire_name))
names.add(no_at_sign)
return schema
@classmethod
def _CreateVariantType(cls, variant, api, name, def_dict,
wire_name, parent):
"""Creates a variant type."""
variants = collections.OrderedDict()
schema = cls(api, name, def_dict, parent=parent)
if wire_name:
schema.SetTemplateValue('wireName', wire_name)
discriminant = variant['discriminant']
# Walk over variants building the variant map and register
# variant info on the api.
for variant_entry in variant['map']:
discriminant_value = variant_entry['type_value']
variant_schema = api.DataTypeFromJson(variant_entry, name, parent=parent)
variants[discriminant_value] = variant_schema
# Set variant info. We get the original wire name from the JSON properties
# via '$ref' it is not currently accessible via variant_schema.
api.SetVariantInfo(variant_entry.get('$ref'), discriminant,
discriminant_value, schema)
prop = Property(api, schema, discriminant, {'type': 'string'},
key_for_variants=variants)
schema.SetTemplateValue('is_variant_base', True)
schema.SetTemplateValue('discriminant', prop)
schema.SetTemplateValue('properties', [prop])
return schema
@classmethod
def _CreateMapType(cls, additional_props, api, name, wire_name,
class_name, parent):
_LOGGER.debug('Have only additionalProps for %s, dict=%s',
name, additional_props)
# TODO(user): Remove this hack at the next large breaking change
# The "Items" added to the end is unneeded and ugly. This is for
# temporary backwards compatibility. Same for _CreateArrayType().
if additional_props.get('type') == 'array':
name = '%sItem' % name
subtype_name = additional_props.get('id', name + 'Element')
# Note, since this is an interim, non class just to hold the map
# make the parent schema the parent passed in, not myself.
_LOGGER.debug('name:%s, wire_name:%s, subtype name %s', name, wire_name,
subtype_name)
# When there is a parent, we synthesize a wirename when none exists.
# Purpose is to avoid generating an extremely long class name, since we
# don't do so for other nested classes.
if parent and wire_name:
base_wire_name = wire_name + 'Element'
else:
base_wire_name = None
base_type = api.DataTypeFromJson(
additional_props, subtype_name, parent=parent,
wire_name=base_wire_name)
map_type = data_types.MapDataType(name, base_type, parent=parent,
wire_name=wire_name)
map_type.SetTemplateValue('className', class_name)
_LOGGER.debug(' %s is MapOf<string, %s>',
class_name, base_type.class_name)
return map_type
@classmethod
def _CreateSchemaWithoutProperties(cls, api, name, def_dict, wire_name,
parent):
if parent:
# code objects have __getitem__(), but not .get()
try:
pname = parent['id']
except KeyError:
pname = '<unknown>'
name_to_log = '%s.%s' % (pname, name)
else:
name_to_log = name
logging.warning('object without properties %s: %s',
name_to_log, def_dict)
schema = cls(api, name, def_dict, parent=parent)
if wire_name:
schema.SetTemplateValue('wireName', wire_name)
return schema
@classmethod
def _CreateArrayType(cls, api, def_dict, wire_name,
class_name, schema_id, parent):
items = def_dict.get('items')
if not items:
raise ApiException('array without items in: %s' % def_dict)
tentative_class_name = class_name
# TODO(user): We should not rename things items.
# if we have an anonymous type within a map or array, it should be
# called 'Item', and let the namespacing sort it out.
if schema_id:
_LOGGER.debug('Top level schema %s is an array', class_name)
tentative_class_name += 'Items'
base_type = api.DataTypeFromJson(items, tentative_class_name,
parent=parent, wire_name=wire_name)
_LOGGER.debug(' %s is ArrayOf<%s>', class_name, base_type.class_name)
array_type = data_types.ArrayDataType(tentative_class_name, base_type,
wire_name=wire_name,
parent=parent)
if schema_id:
array_type.SetTemplateValue('className', schema_id)
return array_type
@property
def class_name(self):
return self.values['className']
@property
def anonymous(self):
return 'id' not in self.raw
@property
def properties(self):
return self.values['properties']
@property
def isContainerWrapper(self):
"""Is this schema just a simple wrapper around another container.
A schema is just a wrapper for another datatype if it is an object that
contains just a single container datatype and (optionally) a kind and
etag field. This may be used by language generators to create iterators
directly on the schema. E.g. You could have
SeriesList ret = api.GetSomeSeriesMethod(args).Execute();
for (series in ret) { ... }
rather than
for (series in ret->items) { ... }
Returns:
None or ContainerDataType
"""
return self._GetPropertyWhichWeWrap() is not None
@property
def containerProperty(self):
"""If isContainerWrapper, returns the propery which holds the container."""
return self._GetPropertyWhichWeWrap()
def _GetPropertyWhichWeWrap(self):
"""Returns the property which is the type we are wrapping."""
container_property = None
for p in self.values['properties']:
if p.values['wireName'] == 'kind' or p.values['wireName'] == 'etag':
continue
if p.data_type.GetTemplateValue('isContainer'):
if container_property:
return None
container_property = p
else:
return None
return container_property
def __str__(self):
return '<%s Schema {%s}>' % (self.values['wireName'], self.values)
class Property(template_objects.CodeObject):
"""The definition of a schema property.
Example property in the discovery schema:
"id": {"type": "string"}
"""
def __init__(self, api, schema, name, def_dict, key_for_variants=None):
"""Construct a Property.
A Property requires several elements in its template value dictionary which
are set here:
wireName: the string which labels this Property in the JSON serialization.
dataType: the DataType of this property.
Args:
api: (Api) The Api which owns this Property
schema: (Schema) the schema this Property is part of
name: (string) the name for this Property
def_dict: (dict) the JSON schema dictionary
key_for_variants: (dict) if given, maps discriminator values to
variant schemas.
Raises:
ApiException: If we have an array type without object definitions.
"""
super(Property, self).__init__(def_dict, api, wire_name=name)
self.ValidateName(name)
self.schema = schema
self._key_for_variants = key_for_variants
# TODO(user): find a better way to mark a schema as an array type
# so we can display schemas like BlogList in method responses
try:
if self.values['wireName'] == 'items' and self.values['type'] == 'array':
self.schema.values['isList'] = True
except KeyError:
pass
# If the schema value for this property defines a new object directly,
# rather than refering to another schema, we will have to create a class
# name for it. We create a unique name by prepending the schema we are
# in to the object name.
tentative_class_name = api.NestedClassNameForProperty(name, schema)
self._data_type = api.DataTypeFromJson(def_dict, tentative_class_name,
parent=schema, wire_name=name)
@property
def code_type(self):
if self._language_model:
self._data_type.SetLanguageModel(self._language_model)
return self._data_type.code_type
@property
def safe_code_type(self):
if self._language_model:
self._data_type.SetLanguageModel(self._language_model)
return self._data_type.safe_code_type
@property
def primitive_data_type(self):
if self._language_model:
self._data_type.SetLanguageModel(self._language_model)
return self._data_type.primitive_data_type
@property
def data_type(self):
return self._data_type
@property
def member_name_is_json_name(self):
return self.memberName == self.values['wireName']
@property
def is_variant_key(self):
return self._key_for_variants
@property
def variant_map(self):
return self._key_for_variants
|
apache-2.0
| -3,918,453,363,792,271,400
| 37.548319
| 80
| 0.642378
| false
| 4.031861
| false
| false
| false
|
dthgeek/QuickOSM
|
core/actions.py
|
1
|
4118
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
QuickOSM
A QGIS plugin
OSM Overpass API frontend
-------------------
begin : 2014-06-11
copyright : (C) 2014 by 3Liz
email : info at 3liz dot com
contributor : Etienne Trimaille
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtWebKit import QWebView
from PyQt4.QtGui import QDesktopServices
from PyQt4.QtCore import QUrl
from qgis.utils import iface
from qgis.gui import QgsMessageBar
from QuickOSM.core.utilities.tools import tr
class Actions(object):
"""
Manage actions available on layers
"""
@staticmethod
def run(field, value):
"""
Run an action with only one value as parameter
@param field:Type of the action
@type field:str
@param value:Value of the field for one entity
@type value:str
"""
if value == '':
iface.messageBar().pushMessage(
tr("QuickOSM",
u"Sorry man, this field is empty for this entity."),
level=QgsMessageBar.WARNING, duration=7)
else:
field = unicode(field, "UTF-8")
value = unicode(value, "UTF-8")
if field in ["url", "website", "wikipedia"]:
var = QDesktopServices()
url = None
if field == "url" or field == "website":
url = value
if field == "ref_UAI":
url = "http://www.education.gouv.fr/pid24302/annuaire-" \
"resultat-recherche.html?lycee_name=" + value
if field == "wikipedia":
url = "http://en.wikipedia.org/wiki/" + value
var.openUrl(QUrl(url))
elif field == "josm":
import urllib2
try:
url = "http://localhost:8111/load_object?objects=" + value
urllib2.urlopen(url).read()
except urllib2.URLError:
iface.messageBar().pushMessage(
tr("QuickOSM",
u"The JOSM remote seems to be disabled."),
level=QgsMessageBar.CRITICAL,
duration=7)
# NOT USED
elif field == "rawedit":
url = QUrl("http://rawedit.openstreetmap.fr/edit/" + value)
web_browser = QWebView(None)
web_browser.load(url)
web_browser.show()
@staticmethod
def run_sketch_line(network, ref):
"""
Run an action with two values for sketchline
@param network:network of the bus
@type network:str
@param ref:ref of the bus
@type ref:str
"""
network = unicode(network, "UTF-8")
ref = unicode(ref, "UTF-8")
if network == '' or ref == '':
iface.messageBar().pushMessage(
tr("QuickOSM",
u"Sorry man, this field is empty for this entity."),
level=QgsMessageBar.WARNING,
duration=7)
else:
var = QDesktopServices()
url = "http://www.overpass-api.de/api/sketch-line?" \
"network=" + network + "&ref=" + ref
var.openUrl(QUrl(url))
|
gpl-2.0
| 1,764,082,413,191,741,200
| 34.196581
| 78
| 0.448033
| false
| 4.805134
| false
| false
| false
|
Amber-MD/ambertools-conda-build
|
conda_tools/test/test_fix_conda_gfortran_linking_osx.py
|
1
|
1041
|
# pytest -vs .
import os
import sys
from mock import patch
import shutil
sys.path.insert(0, '..')
from fix_conda_gfortran_linking_osx import repack_conda_package, main
this_dir = os.path.dirname(__file__)
PACK_SCRIPT = os.path.join(this_dir, '..',
'pack_binary_without_conda_install.py')
FAKE_TAR = os.path.join(this_dir, 'fake_data', 'fake_osx.tar.bz2')
has_gfortran_local = os.path.exists('/usr/local/gfortran/')
def test_repack_conda_package():
class Opt():
pass
opt = Opt()
opt.tarfile = FAKE_TAR
opt.output_dir = '.'
opt.date = False
opt.dry_run = False
with patch('update_gfortran_libs_osx.main') as mock_g_main:
repack_conda_package(opt)
mock_g_main.assert_called_with(['.'])
os.remove(os.path.basename(FAKE_TAR))
def test_main():
junk = './tmp_fdasfda'
output_dir = '{}/heyhey'.format(junk)
main([FAKE_TAR, '-o', output_dir])
assert os.path.exists(os.path.join(output_dir, os.path.basename(FAKE_TAR)))
shutil.rmtree(junk)
|
mit
| 669,227,652,938,590,500
| 26.394737
| 79
| 0.635927
| false
| 2.949008
| false
| false
| false
|
brightiup/brightiup
|
brightiup/compiler/bt_lexer.py
|
1
|
1391
|
import ply.lex as lex
class BTLexerException(Exception):
def __init__(self, message):
self.message = message
def __str__(self):
return self.message
class BTLexer(object):
"""BT lexer"""
keywords = [
# "import",
"state",
]
tokens = [keyword.upper() for keyword in keywords] + [
'ID',
'VARIABLE',
]
t_ignore = " \t"
t_VARIABLE = r'''\$[A-Za-z][A-Za-z0-9_]*'''
literals = ".{};="
_keyword_map = {}
for keyword in keywords:
_keyword_map[keyword] = keyword.upper()
@staticmethod
def t_NEWLINE(t):
r'''\n+'''
t.lexer.lineno += t.value.count('\n')
@staticmethod
def t_error(t):
raise BTLexerException('Illegal character %s at line %s'%(t.value[0], t.lineno))
@staticmethod
def t_ID(t):
r'''[A-Za-z][A-Za-z0-9_]*'''
t.type = BTLexer._keyword_map.get(t.value, 'ID')
return t
def __init__(self, **kwargs):
self.lexer = lex.lex(module=self, **kwargs)
def test(self, data):
self.lexer.input(data)
while True:
tok = self.lexer.token()
if not tok:
break
print tok
if __name__ == '__main__':
lexer = BTLexer()
lexer.test(open('../script/http.bt').read())
|
gpl-2.0
| -243,517,580,238,202,750
| 21.803279
| 88
| 0.4867
| false
| 3.557545
| false
| false
| false
|
holzman/glideinwms-old
|
lib/condorMonitor.py
|
1
|
24800
|
#
# Project:
# glideinWMS
#
# File Version:
#
# Description:
# This module implements classes to query the condor daemons
# and manipulate the results
# Please notice that it also converts \" into "
#
# Author:
# Igor Sfiligoi (Aug 30th 2006)
#
import condorExe
import condorSecurity
import os
import string
import copy
import socket
import xml.parsers.expat
#
# Configuration
#
# Set path to condor binaries
def set_path(new_condor_bin_path):
global condor_bin_path
condor_bin_path = new_condor_bin_path
#
# Caching classes
#
# dummy caching class, when you don't want caching
# used as base class below, too
class NoneScheddCache:
#returns (cms arg schedd string,LOCAL_DIR)
def getScheddId(self,schedd_name,pool_name):
return (self.iGetCmdScheddStr(schedd_name),{})
# INTERNAL and for inheritance
def iGetCmdScheddStr(self,schedd_name):
if schedd_name is None:
schedd_str=""
else:
schedd_str = "-name %s " % schedd_name
return schedd_str
# The schedd can be found either through -name attr
# or through the local disk lookup
# Remember which one to use
class LocalScheddCache(NoneScheddCache):
def __init__(self):
self.enabled=True
# dictionary of
# (schedd_name,pool_name)=>(cms arg schedd string,env)
self.cache={}
self.my_ips=socket.gethostbyname_ex(socket.gethostname())[2]
try:
self.my_ips+=socket.gethostbyname_ex('localhost')[2]
except socket.gaierror,e:
pass # localhost not defined, ignore
def enable(self):
self.enabled=True
def disable(self):
self.enabled=False
#returns (cms arg schedd string,env)
def getScheddId(self,schedd_name,pool_name):
if schedd_name is None: # special case, do not cache
return ("",{})
if self.enabled:
k=(schedd_name,pool_name)
if not self.cache.has_key(k): # not in cache, discover it
env=self.iGetEnv(schedd_name, pool_name)
if env is None: #
self.cache[k]=(self.iGetCmdScheddStr(schedd_name),{})
else:
self.cache[k]=("",env)
return self.cache[k]
else: # not enabled, just return the str
return (self.iGetCmdScheddStr(schedd_name),{})
#
# PRIVATE
#
# return None if not found
# Can raise exceptions
def iGetEnv(self,schedd_name, pool_name):
cs=CondorStatus('schedd',pool_name)
data=cs.fetch(constraint='Name=?="%s"'%schedd_name,format_list=[('ScheddIpAddr','s'),('SPOOL_DIR_STRING','s'),('LOCAL_DIR_STRING','s')])
if not data.has_key(schedd_name):
raise RuntimeError, "Schedd '%s' not found"%schedd_name
el=data[schedd_name]
if 'SPOOL_DIR_STRING' not in el and 'LOCAL_DIR_STRING' not in el: # not advertising, cannot use disk optimization
return None
if not el.has_key('ScheddIpAddr'): # This should never happen
raise RuntimeError, "Schedd '%s' is not advertising ScheddIpAddr"%schedd_name
schedd_ip=el['ScheddIpAddr'][1:].split(':')[0]
if schedd_ip in self.my_ips: #seems local, go for the dir
l=el.get('SPOOL_DIR_STRING', el.get('LOCAL_DIR_STRING'))
if os.path.isdir(l): # making sure the directory exists
if 'SPOOL_DIR_STRING' in el:
return {'_CONDOR_SPOOL': '%s' %l }
else: # LOCAL_DIR_STRING
return {'_CONDOR_SPOOL': '%s/spool' %l }
else: #dir does not exist, likely not relevant, revert to standard behaviour
return None
else: # not local
return None
# default global object
local_schedd_cache=LocalScheddCache()
def condorq_attrs(q_constraint, attribute_list):
"""
Retrieves a list of a single item from the all the factory queues.
"""
attr_str = ""
for attr in attribute_list:
attr_str += " -attr %s" % attr
xml_data = condorExe.exe_cmd("condor_q","-g -l %s -xml -constraint '%s'" % (attr_str, q_constraint))
classads_xml = []
tmp_list = []
for line in xml_data:
# look for the xml header
if line[:5] == "<?xml":
if len(tmp_list) > 0:
classads_xml.append(tmp_list)
tmp_list = []
tmp_list.append(line)
q_proxy_list = []
for ad_xml in classads_xml:
cred_list = xml2list(ad_xml)
q_proxy_list.extend(cred_list)
return q_proxy_list
#
# Condor monitoring classes
#
# Generic, you most probably don't want to use these
class AbstractQuery: # pure virtual, just to have a minimum set of methods defined
# returns the data, will not modify self
def fetch(self,constraint=None,format_list=None):
raise NotImplementedError,"Fetch not implemented"
# will fetch in self.stored_data
def load(self,constraint=None,format_list=None):
raise NotImplementedError,"Load not implemented"
# constraint_func is a boolean function, with only one argument (data el)
# same output as fetch, but limited to constraint_func(el)==True
#
# if constraint_func==None, return all the data
def fetchStored(self,constraint_func=None):
raise NotImplementedError,"fetchStored not implemented"
class StoredQuery(AbstractQuery): # still virtual, only fetchStored defined
stored_data = {}
def fetchStored(self,constraint_func=None):
return applyConstraint(self.stored_data,constraint_func)
#
# format_list is a list of
# (attr_name, attr_type)
# where attr_type is one of
# "s" - string
# "i" - integer
# "r" - real (float)
# "b" - bool
#
#
# security_obj, if defined, should be a child of condorSecurity.ProtoRequest
class QueryExe(StoredQuery): # first fully implemented one, execute commands
def __init__(self,exe_name,resource_str,group_attribute,pool_name=None,security_obj=None,env={}):
self.exe_name=exe_name
self.env=env
self.resource_str=resource_str
self.group_attribute=group_attribute
self.pool_name=pool_name
if pool_name is None:
self.pool_str=""
else:
self.pool_str = "-pool %s" % pool_name
if security_obj is not None:
if security_obj.has_saved_state():
raise RuntimeError, "Cannot use a security object which has saved state."
self.security_obj=copy.deepcopy(security_obj)
else:
self.security_obj=condorSecurity.ProtoRequest()
def require_integrity(self,requested_integrity): # if none, dont change, else forse that one
if requested_integrity is None:
condor_val=None
elif requested_integrity:
condor_val="REQUIRED"
else:
# if not required, still should not fail if the other side requires it
condor_val='OPTIONAL'
self.security_obj.set('CLIENT','INTEGRITY',condor_val)
def get_requested_integrity(self):
condor_val = self.security_obj.get('CLIENT','INTEGRITY')
if condor_val is None:
return None
return (condor_val=='REQUIRED')
def require_encryption(self,requested_encryption): # if none, dont change, else forse that one
if requested_encryption is None:
condor_val=None
elif requested_encryption:
condor_val="REQUIRED"
else:
# if not required, still should not fail if the other side requires it
condor_val='OPTIONAL'
self.security_obj.set('CLIENT','ENCRYPTION',condor_val)
def get_requested_encryption(self):
condor_val = self.security_obj.get('CLIENT','ENCRYPTION')
if condor_val is None:
return None
return (condor_val=='REQUIRED')
def fetch(self,constraint=None,format_list=None):
if constraint is None:
constraint_str=""
else:
constraint_str="-constraint '%s'"%constraint
full_xml=(format_list is None)
if format_list is not None:
format_arr=[]
for format_el in format_list:
attr_name,attr_type=format_el
attr_format={'s':'%s','i':'%i','r':'%f','b':'%i'}[attr_type]
format_arr.append('-format "%s" "%s"'%(attr_format,attr_name))
format_str=string.join(format_arr," ")
# set environment for security settings
self.security_obj.save_state()
self.security_obj.enforce_requests()
if full_xml:
xml_data = condorExe.exe_cmd(self.exe_name,"%s -xml %s %s"%(self.resource_str,self.pool_str,constraint_str),env=self.env);
else:
xml_data = condorExe.exe_cmd(self.exe_name,"%s %s -xml %s %s"%(self.resource_str,format_str,self.pool_str,constraint_str),env=self.env);
# restore old values
self.security_obj.restore_state()
list_data = xml2list(xml_data)
del xml_data
dict_data = list2dict(list_data, self.group_attribute)
return dict_data
def load(self, constraint=None, format_list=None):
self.stored_data = self.fetch(constraint, format_list)
#
# Fully usable query functions
#
# condor_q
class CondorQ(QueryExe):
def __init__(self,schedd_name=None,pool_name=None,security_obj=None,schedd_lookup_cache=local_schedd_cache):
self.schedd_name=schedd_name
if schedd_lookup_cache is None:
schedd_lookup_cache=NoneScheddCache()
schedd_str,env=schedd_lookup_cache.getScheddId(schedd_name, pool_name)
QueryExe.__init__(self,"condor_q",schedd_str,["ClusterId","ProcId"],pool_name,security_obj,env)
def fetch(self, constraint=None, format_list=None):
if format_list is not None:
# check that ClusterId and ProcId are present, and if not add them
format_list = complete_format_list(format_list, [("ClusterId", 'i'), ("ProcId", 'i')])
return QueryExe.fetch(self, constraint=constraint, format_list=format_list)
# condor_q, where we have only one ProcId x ClusterId
class CondorQLite(QueryExe):
def __init__(self,schedd_name=None,pool_name=None,security_obj=None,schedd_lookup_cache=local_schedd_cache):
self.schedd_name=schedd_name
if schedd_lookup_cache is None:
schedd_lookup_cache=NoneScheddCache()
schedd_str,env=schedd_lookup_cache.getScheddId(schedd_name, pool_name)
QueryExe.__init__(self,"condor_q",schedd_str,"ClusterId",pool_name,security_obj,env)
def fetch(self, constraint=None, format_list=None):
if format_list is not None:
# check that ClusterId is present, and if not add it
format_list = complete_format_list(format_list, [("ClusterId", 'i')])
return QueryExe.fetch(self, constraint=constraint, format_list=format_list)
# condor_status
class CondorStatus(QueryExe):
def __init__(self,subsystem_name=None,pool_name=None,security_obj=None):
if subsystem_name is None:
subsystem_str=""
else:
subsystem_str = "-%s" % subsystem_name
QueryExe.__init__(self,"condor_status",subsystem_str,"Name",pool_name,security_obj,{})
def fetch(self, constraint=None, format_list=None):
if format_list is not None:
# check that Name present and if not, add it
format_list = complete_format_list(format_list, [("Name",'s')])
return QueryExe.fetch(self, constraint=constraint, format_list=format_list)
#
# Subquery classes
#
# Generic, you most probably don't want to use this
class BaseSubQuery(StoredQuery):
def __init__(self, query, subquery_func):
self.query = query
self.subquery_func = subquery_func
def fetch(self, constraint=None):
indata = self.query.fetch(constraint)
return self.subquery_func(self, indata)
#
# NOTE: You need to call load on the SubQuery object to use fetchStored
# and had query.load issued before
#
def load(self, constraint=None):
indata = self.query.fetchStored(constraint)
self.stored_data = self.subquery_func(indata)
#
# Fully usable subquery functions
#
class SubQuery(BaseSubQuery):
def __init__(self, query, constraint_func=None):
BaseSubQuery.__init__(self, query, lambda d:applyConstraint(d, constraint_func))
class Group(BaseSubQuery):
# group_key_func - Key extraction function
# One argument: classad dictionary
# Returns: value of the group key
# group_data_func - Key extraction function
# One argument: list of classad dictionaries
# Returns: a summary classad dictionary
def __init__(self, query, group_key_func, group_data_func):
BaseSubQuery.__init__(self, query, lambda d:doGroup(d, group_key_func, group_data_func))
#
# Summarizing classes
#
class Summarize:
# hash_func - Hashing function
# One argument: classad dictionary
# Returns: hash value
# if None, will not be counted
# if a list, all elements will be used
def __init__(self, query, hash_func=lambda x:1):
self.query = query
self.hash_func = hash_func
# Parameters:
# constraint - string to be passed to query.fetch()
# hash_func - if !=None, use this instead of the main one
# Returns a dictionary of hash values
# Elements are counts (or more dictionaries if hash returns lists)
def count(self, constraint=None, hash_func=None):
data = self.query.fetch(constraint)
return fetch2count(data, self.getHash(hash_func))
# Use data pre-stored in query
# Same output as count
def countStored(self, constraint_func=None, hash_func=None):
data = self.query.fetchStored(constraint_func)
return fetch2count(data, self.getHash(hash_func))
# Parameters, same as count
# Returns a dictionary of hash values
# Elements are lists of keys (or more dictionaries if hash returns lists)
def list(self, constraint=None, hash_func=None):
data = self.query.fetch(constraint)
return fetch2list(data, self.getHash(hash_func))
# Use data pre-stored in query
# Same output as list
def listStored(self,constraint_func=None,hash_func=None):
data=self.query.fetchStored(constraint_func)
return fetch2list(data,self.getHash(hash_func))
### Internal
def getHash(self, hash_func):
if hash_func is None:
return self.hash_func
else:
return hash_func
class SummarizeMulti:
def __init__(self, queries, hash_func=lambda x:1):
self.counts = []
for query in queries:
self.counts.append(self.count(query,hash_func))
self.hash_func=hash_func
# see Count for description
def count(self, constraint=None, hash_func=None):
out = {}
for c in self.counts:
data = c.count(constraint, hash_func)
addDict(out, data)
return out
# see Count for description
def countStored(self, constraint_func=None, hash_func=None):
out = {}
for c in self.counts:
data = c.countStored(constraint_func, hash_func)
addDict(out, data)
return out
############################################################
#
# P R I V A T E, do not use
#
############################################################
# check that req_format_els are present in in_format_list, and if not add them
# return a new format_list
def complete_format_list(in_format_list, req_format_els):
out_format_list = in_format_list[0:]
for req_format_el in req_format_els:
found = False
for format_el in in_format_list:
if format_el[0] == req_format_el[0]:
found = True
break
if not found:
out_format_list.append(req_format_el)
return out_format_list
#
# Convert Condor XML to list
#
# For Example:
#
#<?xml version="1.0"?>
#<!DOCTYPE classads SYSTEM "classads.dtd">
#<classads>
#<c>
# <a n="MyType"><s>Job</s></a>
# <a n="TargetType"><s>Machine</s></a>
# <a n="AutoClusterId"><i>0</i></a>
# <a n="ExitBySignal"><b v="f"/></a>
# <a n="TransferOutputRemaps"><un/></a>
# <a n="WhenToTransferOutput"><s>ON_EXIT</s></a>
#</c>
#<c>
# <a n="MyType"><s>Job</s></a>
# <a n="TargetType"><s>Machine</s></a>
# <a n="AutoClusterId"><i>0</i></a>
# <a n="OnExitRemove"><b v="t"/></a>
# <a n="x509userproxysubject"><s>/DC=gov/DC=fnal/O=Fermilab/OU=People/CN=Igor Sfiligoi/UID=sfiligoi</s></a>
#</c>
#</classads>
#
# 3 xml2list XML handler functions
def xml2list_start_element(name, attrs):
global xml2list_data, xml2list_inclassad, xml2list_inattr, xml2list_intype
if name == "c":
xml2list_inclassad = {}
elif name == "a":
xml2list_inattr = {"name": attrs["n"], "val": ""}
xml2list_intype = "s"
elif name == "i":
xml2list_intype = "i"
elif name == "r":
xml2list_intype = "r"
elif name == "b":
xml2list_intype = "b"
if attrs.has_key('v'):
xml2list_inattr["val"] = (attrs["v"] in ('T', 't', '1'))
else:
# extended syntax... value in text area
xml2list_inattr["val"] = None
elif name == "un":
xml2list_intype = "un"
xml2list_inattr["val"] = None
elif name in ("s", "e"):
pass # nothing to do
elif name == "classads":
pass # top element, nothing to do
else:
raise TypeError, "Unsupported type: %s" % name
def xml2list_end_element(name):
global xml2list_data, xml2list_inclassad, xml2list_inattr, xml2list_intype
if name == "c":
xml2list_data.append(xml2list_inclassad)
xml2list_inclassad = None
elif name == "a":
xml2list_inclassad[xml2list_inattr["name"]] = xml2list_inattr["val"]
xml2list_inattr = None
elif name in ("i", "b", "un", "r"):
xml2list_intype = "s"
elif name in ("s", "e"):
pass # nothing to do
elif name == "classads":
pass # top element, nothing to do
else:
raise TypeError, "Unexpected type: %s" % name
def xml2list_char_data(data):
global xml2list_data, xml2list_inclassad, xml2list_inattr, xml2list_intype
if xml2list_inattr is None:
# only process when in attribute
return
if xml2list_intype == "i":
xml2list_inattr["val"] = int(data)
elif xml2list_intype == "r":
xml2list_inattr["val"] = float(data)
elif xml2list_intype == "b":
if xml2list_inattr["val"] is not None:
#nothing to do, value was in attribute
pass
else:
xml2list_inattr["val"] = (data[0] in ('T', 't', '1'))
elif xml2list_intype == "un":
#nothing to do, value was in attribute
pass
else:
unescaped_data = string.replace(data, '\\"', '"')
xml2list_inattr["val"] += unescaped_data
def xml2list(xml_data):
global xml2list_data, xml2list_inclassad, xml2list_inattr, xml2list_intype
xml2list_data = []
xml2list_inclassad = None
xml2list_inattr = None
xml2list_intype = None
p = xml.parsers.expat.ParserCreate()
p.StartElementHandler = xml2list_start_element
p.EndElementHandler = xml2list_end_element
p.CharacterDataHandler = xml2list_char_data
found_xml = -1
for line in range(len(xml_data)):
# look for the xml header
if xml_data[line][:5] == "<?xml":
found_xml = line
break
if found_xml >= 0:
try:
p.Parse(string.join(xml_data[found_xml:]), 1)
except TypeError, e:
raise RuntimeError, "Failed to parse XML data, TypeError: %s" % e
except:
raise RuntimeError, "Failed to parse XML data, generic error"
# else no xml, so return an empty list
return xml2list_data
#
# Convert a list to a dictionary
#
def list2dict(list_data, attr_name):
if type(attr_name) in (type([]), type((1, 2))):
attr_list = attr_name
else:
attr_list = [attr_name]
dict_data = {}
for list_el in list_data:
if type(attr_name) in (type([]), type((1, 2))):
dict_name = []
list_keys=list_el.keys()
for an in attr_name:
if an in list_keys:
dict_name.append(list_el[an])
else:
# Try lower cases
for k in list_keys:
if an.lower()==k.lower():
dict_name.append(list_el[k])
break
dict_name=tuple(dict_name)
else:
dict_name = list_el[attr_name]
# dict_el will have all the elements but those in attr_list
dict_el = {}
for a in list_el:
if not (a in attr_list):
dict_el[a] = list_el[a]
dict_data[dict_name] = dict_el
return dict_data
def applyConstraint(data, constraint_func):
if constraint_func is None:
return data
else:
outdata = {}
for key in data.keys():
if constraint_func(data[key]):
outdata[key] = data[key]
return outdata
def doGroup(indata, group_key_func, group_data_func):
gdata = {}
for k in indata.keys():
inel = indata[k]
gkey = group_key_func(inel)
if gdata.has_key(gkey):
gdata[gkey].append(inel)
else:
gdata[gkey] = [inel]
outdata = {}
for k in gdata.keys():
outdata[k] = group_data_func(gdata[k])
return outdata
#
# Inputs
# data - data from a fetch()
# hash_func - Hashing function
# One argument: classad dictionary
# Returns: hash value
# if None, will not be counted
# if a list, all elements will be used
#
# Returns a dictionary of hash values
# Elements are counts (or more dictionaries if hash returns lists)
#
def fetch2count(data, hash_func):
count = {}
for k in data.keys():
el = data[k]
hid = hash_func(el)
if hid is None:
# hash tells us it does not want to count this
continue
# cel will point to the real counter
cel = count
# check if it is a list
if (type(hid) == type([])):
# have to create structure inside count
for h in hid[:-1]:
if not cel.has_key(h):
cel[h] = {}
cel = cel[h]
hid = hid[-1]
if cel.has_key(hid):
count_el = cel[hid] + 1
else:
count_el = 1
cel[hid] = count_el
return count
#
# Inputs
# data - data from a fetch()
# hash_func - Hashing function
# One argument: classad dictionary
# Returns: hash value
# if None, will not be counted
# if a list, all elements will be used
#
# Returns a dictionary of hash values
# Elements are lists of keys (or more dictionaries if hash returns lists)
#
def fetch2list(data, hash_func):
return_list = {}
for k in data.keys():
el = data[k]
hid = hash_func(el)
if hid is None:
# hash tells us it does not want to list this
continue
# lel will point to the real list
lel = return_list
# check if it is a list
if (type(hid) == type([])):
# have to create structure inside list
for h in hid[:-1]:
if not lel.has_key(h):
lel[h] = {}
lel = lel[h]
hid = hid[-1]
if lel.has_key(hid):
list_el = lel[hid].append[k]
else:
list_el = [k]
lel[hid] = list_el
return return_list
#
# Recursivelly add two dictionaries
# Do it in place, using the first one
#
def addDict(base_dict, new_dict):
for k in new_dict.keys():
new_el = new_dict[k]
if not base_dict.has_key(k):
# nothing there?, just copy
base_dict[k] = new_el
else:
if type(new_el) == type({}):
#another dictionary, recourse
addDict(base_dict[k], new_el)
else:
base_dict[k] += new_el
|
bsd-3-clause
| 6,990,342,089,642,197,000
| 31.124352
| 148
| 0.586048
| false
| 3.561172
| false
| false
| false
|
apuigsech/emv-framework
|
iso7816.py
|
1
|
5968
|
#!/usr/bin/python
#
# Python ISO7816 (as part of EMV Framework)
# Copyrigh 2012 Albert Puigsech Galicia <albert@puigsech.com>
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
from smartcard.CardType import AnyCardType
from smartcard.CardRequest import CardRequest
from smartcard.CardConnection import CardConnection
from smartcard.CardConnectionObserver import ConsoleCardConnectionObserver
from smartcard.Exceptions import CardRequestTimeoutException
from tlv import *
INS_DB = (
{
'name':'READ_BINARY',
'code':0xb0
},
{
'name':'WRITE_BINARY',
'code':0xd0
},
{
'name':'UPDATE_BINARY',
'code':0xd6
},
{
'name':'ERASE_BINARY',
'code':0x0e
},
{
'name':'READ_RECORD',
'code':0xb2
},
{
'name':'WRITE_RECORD',
'code':0xd2
},
{
'name':'APPEND_RECORD',
'code':0xe2
},
{
'name':'UPDATE RECORD',
'code':0xdc
},
{
'name':'GET_DATA',
'code':0xca
},
{
'name':'PUT_DATA',
'code':0xda
},
{
'name':'SELECT_FILE',
'code':0xa4
},
{
'name':'VERIFY',
'code':0x20
},
{
'name':'INTERNAL_AUTHENTICATE',
'code':0x88
},
{
'name':'EXTERNAL AUTHENTICATE',
'code':0xb2
},
{
'name':'GET_CHALLENGE',
'code':0xb4
},
{
'name':'MANAGE_CHANNEL',
'code':0x70
},
{
'name':'GET_RESPONSE',
'code':0xc0
}
)
class APDU_Command:
def __init__(self, cla=0x00, ins=0x00, p1=0x00, p2=0x00, lc=None, data=None, le=None):
self.cla = cla
self.ins = ins
self.p1 = p1
self.p2 = p2
if data != None and lc == None:
lc = len(data)
self.lc = lc
self.data = data
self.le = le
def raw(self):
apdu_cmd_raw = [self.cla, self.ins, self.p1, self.p2]
if self.data != None:
apdu_cmd_raw += [self.lc] + self.data
if self.le != None:
apdu_cmd_raw += [self.le]
return apdu_cmd_raw
def str(self):
apdu_cmd_str = '{0:02x} {1:02x} {2:02x} {3:02x}'.format(self.cla, self.ins, self.p1, self.p2)
if self.data != None:
apdu_cmd_str += ' {0:02x}'.format(self.lc)
for d in self.data:
apdu_cmd_str += ' {0:02x}'.format(d)
if self.le != None:
apdu_cmd_str += ' {0:02x}'.format(self.le)
return apdu_cmd_str
class APDU_Response:
def __init__(self, sw1=0x00, sw2=0x00, data=None):
self.sw1 = sw1
self.sw2 = sw2
self.data = data
def raw(self):
apdu_res_raw = []
if self.data != None:
apdu_res_raw += self.data
apdu_res_raw += [self.sw1, self.sw2]
return apdu_res_raw
def str(self):
apdu_res_str = ''
if self.data != None:
for d in self.data:
apdu_res_str += '{0:02x} '.format(d)
apdu_res_str += '{0:02x} {1:02x}'.format(self.sw1, self.sw2)
return apdu_res_str
class ISO7816:
def __init__(self):
cardtype = AnyCardType()
cardrequest = CardRequest(timeout=10, cardType=cardtype)
self.card = cardrequest.waitforcard()
self.card.connection.connect()
self.ins_db = []
self.ins_db_update(INS_DB)
self.log = []
self.auto_get_response = True
def ins_db_update(self, new):
self.ins_db += new
def ins_db_resolv(self, name=None, code=None):
for e in self.ins_db:
if name != None and e['name'] == name:
return e['code']
if code != None and e['code'] == code:
return e['name']
return None
def send_command(self, cmd, p1=0, p2=0, tlvparse=False, cla=0x00, data=None, le=None):
ins = self.ins_db_resolv(name=cmd)
return self.send_apdu(APDU_Command(ins=ins, p1=p1, p2=p2, cla=cla, data=data, le=le))
def send_apdu(self, apdu_cmd):
#print '>>> ' + apdu_cmd.str()
data,sw1,sw2 = self.send_apdu_raw(apdu_cmd.raw())
apdu_res = APDU_Response(sw1=sw1, sw2=sw2, data=data)
#print '<<< ' + apdu_res.str()
if self.auto_get_response == True:
if sw1 == 0x6c:
apdu_cmd.le = sw2
apdu_res = self.send_apdu(apdu_cmd)
if sw1 == 0x61:
apdu_res = self.GET_RESPONSE(sw2)
return apdu_res
def send_apdu_raw(self, apdu):
return self.card.connection.transmit(apdu)
def log_add(self, log_item):
self.log.append(log_item)
def log_print(self):
return
def READ_BINARY(self, p1=0x00, p2=0x00, len=0x00):
return self.send_command('READ_BINARY', p1=p1, p2=p2, le=len)
def WRITE_BINARY(self, p1=0x00, p2=0x00, data=[]):
return self.send_command('WRITE_BINARY', p1=p1, p2=p2, data=data)
def UPDATE_BINRY(self, p1=0x00, p2=0x00, data=[]):
return self.send_command('UPDATE_BINRY', p1=p1, p2=p2, data=data)
def ERASE_BINARY(self, p1=0x00, p2=0x00, data=None):
return self.send_command('ERASE_BINARY', p1=p1, p2=p2, data=data)
def READ_RECORD(self, sfi, record=0x00, variation=0b100):
return self.send_command('READ_RECORD', p1=record, p2=(sfi<<3)+variation, le=0)
def WRITE_RECORD(self, sfi, data, record=0x00, variation=0b100):
return self.send_command('WRITE_RECORD', p1=record, p2=(sfi<<3)+variation, data=data)
def APPEND_RECORD(self, sfi, variation=0b100):
return self.send_command('APPEND_RECORD', p1=0x00, p2=(sfi<<3)+variation, data=data)
def UPDATE_RECORD(self, sfi, data, record=0x00, variation=0b100):
return self.send_command('UPDATE_RECORD', p1=record, p2=(sfi<<3)+variation, data=data)
def GET_DATA(self, data_id):
return self.send_command('GET_DATA', p1=data_id[0], p2=data_id[1])
def PUT_DATA(self, data_id, data):
return self.send_command('PUT_DATA', p1=data_id[0], p2=data_id[1], data=data)
def SELECT_FILE(self, data, p1=0x00, p2=0x00):
return self.send_command('SELECT_FILE', p1=p1, p2=p2, data=data)
def VERIFY(self):
return
def INTERNAL_AUTHENTICATE(self):
return
def EXTERNAL_AUTHENTICATE(self):
return
def GET_CHALLENGE(self):
return
def MANAGE_CHANNEL(self):
return
def GET_RESPONSE(self, le):
return self.send_command('GET_RESPONSE', le=le)
def ENVELOPPE(self):
return
def SEARCH_RECORD(self):
return
def DISABLE_CHV(self):
return
def UNBLOCK_CHV(self):
return
|
gpl-3.0
| 7,305,658,734,531,248,000
| 22.131783
| 95
| 0.649464
| false
| 2.385292
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.