code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
#!/usr/bin/python -tt
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# copyright (c) 2008 Red Hat, Inc - written by Seth Vidal and Will Woods
import yum
import sys
import os
from yum.misc import getCacheDir, checksum
import urlparse
from yum import Errors
from optparse import OptionParser
import ConfigParser
# Subclass ConfigParser so that the options don't get lowercased. This is
# important given that they are path names.
class LocalConfigParser(ConfigParser.ConfigParser):
"""A subclass of ConfigParser which does not lowercase options"""
def optionxform(self, optionstr):
return optionstr
####
# take a file path to a repo as an option, verify all the metadata vs repomd.xml
# optionally go through packages and verify them vs the checksum in the primary
# Error values
BAD_REPOMD = 1
BAD_METADATA = 2
BAD_COMPS = 4
BAD_PACKAGES = 8
BAD_IMAGES = 16
# Testopia case/plan numbers
plan_number = 13
case_numbers = {'REPODATA': 56, 'CORE_PACKAGES': 57, 'COMPS': 58,
'BOOT_IMAGES': 59}
def get_schema_path():
"""Return the local path to the RELAX NG comps schema."""
# Depending on whether our distro uses versioned or unversioned docdirs
# (the former is true for Fedora < 20, see bug 998579), the schema file
# should be provided by yum at either of the following locations:
paths = ['/usr/share/doc/yum%s/comps.rng' % s
for s in ('', '-' + yum.__version__)]
for path in paths:
# Better than os.path.exists() as this also ensures we can actually
# read the file
try:
with open(path):
return path
except IOError:
continue
raise IOError(paths)
def testopia_create_run(plan):
'''Create a run of the given test plan. Returns the run ID.'''
run_id = 49 # STUB actually create the run
print "Testopia: created run %i of plan %i" % (run_id,plan)
return run_id
def testopia_report(run,case,result):
print " testopia: reporting %s for case %s in run %i" % (result,
str(case),run)
if type(case) == str:
case = case_numbers[case]
# STUB actually do the reporting
def checkfileurl(pkg):
pkg_path = pkg.remote_url
pkg_path = pkg_path.replace('file://', '')
(csum_type, csum) = pkg.returnIdSum()
try:
filesum = checksum(csum_type, pkg_path)
except Errors.MiscError:
return False
if filesum != csum:
return False
return True
def treeinfo_checksum(treeinfo):
# read treeinfo file into cp
# take checksums section
result = 0
cp = LocalConfigParser()
try:
cp.read(treeinfo)
except ConfigParser.MissingSectionHeaderError:
# Generally this means we failed to access the file
print " could not find sections in treeinfo file %s" % treeinfo
return BAD_IMAGES
except ConfigParser.Error:
print " could not parse treeinfo file %s" % treeinfo
return BAD_IMAGES
if not cp.has_section('checksums'):
print " no checksums section in treeinfo file %s" % treeinfo
return BAD_IMAGES
dir_path = os.path.dirname(treeinfo)
for opt in cp.options('checksums'):
fnpath = dir_path + '/%s' % opt
fnpath = os.path.normpath(fnpath)
csuminfo = cp.get('checksums', opt).split(':')
if len(csuminfo) < 2:
print " checksum information doesn't make any sense for %s." % opt
result = BAD_IMAGES
continue
if not os.path.exists(fnpath):
print " cannot find file %s listed in treeinfo" % fnpath
result = BAD_IMAGES
continue
csum = checksum(csuminfo[0], fnpath)
if csum != csuminfo[1]:
print " file %s %s does not match:\n ondisk %s vs treeinfo: %s" % (opt, csuminfo[0], csum, csuminfo[1])
result = BAD_IMAGES
continue
return result
def main():
parser = OptionParser()
parser.usage = """
verifytree - verify that a local yum repository is consistent
verifytree /path/to/repo"""
parser.add_option("-a","--checkall",action="store_true",default=False,
help="Check all packages in the repo")
parser.add_option("--nocomps", "--nogroups",action="store_true",
default=False,
help="Do not read and check comps")
parser.add_option("--noplugins",action="store_true",default=False,
help="Do not load any plugins")
parser.add_option("-t","--testopia",action="store",type="int",
help="Report results to the given testopia run number")
parser.add_option("-r","--treeinfo", action="store_true", default=False,
help="check the checksums of listed files in a .treeinfo file, if available")
opts, args = parser.parse_args()
if not args:
print "Must provide a file url to the repo"
sys.exit(1)
# FIXME: check that "args" is a valid dir before proceeding
# (exists, isdir, contains .treeinfo, etc)
url = args[0]
if url[0] == '/':
url = 'file://' + url
s = urlparse.urlsplit(url)[0]
h,d = urlparse.urlsplit(url)[1:3]
if s != 'file':
print "Must be a file:// url or you will not like this"
sys.exit(1)
repoid = '%s/%s' % (h, d)
repoid = repoid.replace('/', '_')
# Bad things happen if we're missing a trailing slash here
if url[-1] != '/':
url += '/'
basedir = url.replace('file://', '') # for a normal path thing
my = yum.YumBase()
if opts.noplugins:
my.preconf.init_plugins = False
my.conf.cachedir = getCacheDir()
my.repos.disableRepo('*')
newrepo = yum.yumRepo.YumRepository(repoid)
newrepo.name = repoid
newrepo.baseurl = [url]
newrepo.basecachedir = my.conf.cachedir
newrepo.metadata_expire = 0
newrepo.timestamp_check = False
newrepo.enablegroups = 1
# we want *all* metadata
newrepo.mdpolicy = 'group:all'
# add our new repo
my.repos.add(newrepo)
# enable that repo
my.repos.enableRepo(repoid)
# setup the repo dirs/etc
my.doRepoSetup(thisrepo=repoid)
# Initialize results and reporting
retval = 0
if opts.testopia:
run_id = testopia_create_run(opts.testopia)
report = lambda case,result: testopia_report(run_id,case,result)
else:
report = lambda case,result: None
# Check the metadata
print "Checking repodata:"
try:
md_types = newrepo.repoXML.fileTypes()
print " verifying repomd.xml with yum"
except yum.Errors.RepoError:
print " failed to load repomd.xml."
report('REPODATA','FAILED')
report('CORE_PACKAGES','BLOCKED')
report('COMPS','BLOCKED')
return retval | BAD_REPOMD
for md_type in md_types:
try:
print " verifying %s checksum" % md_type
newrepo.retrieveMD(md_type)
except Errors.RepoError, e:
print " %s metadata missing or does not match checksum" % md_type
retval = retval | BAD_METADATA
if retval & BAD_METADATA:
report('REPODATA','FAILED')
else:
report('REPODATA','PASSED')
if not opts.nocomps:
print "Checking groups (comps.xml):"
try:
print " verifying comps.xml with yum"
b = my.comps.compscount
comps = newrepo.getGroups()
except (Errors.GroupsError, Errors.RepoMDError):
print ' comps file missing or unparseable'
report('COMPS','FAILED')
retval = retval | BAD_COMPS
if not (retval & BAD_COMPS):
print " verifying comps.xml grammar with xmllint"
try:
schema = get_schema_path()
except IOError as e:
print ' could not read schema file, paths tried:'
for path in e.args[0]:
print ' ' + path
print (' make sure you have the latest version of yum '
'properly installed')
r = 1
else:
r = os.system("xmllint --noout --nowarning --relaxng %s %s" %
(schema, comps))
if r != 0:
retval = retval | BAD_COMPS
report('COMPS','FAILED')
else:
report('COMPS','PASSED')
# if we've got a .treeinfo file and we are told to check it, then do so
tr_path = basedir + '/.treeinfo'
if opts.treeinfo and os.path.exists(tr_path):
print "Checking checksums of files in .treeinfo"
tr_val = treeinfo_checksum(tr_path)
retval = tr_val | retval
sack = []
packages_ok = True
if opts.checkall:
print "Checking all packages"
sack = my.pkgSack
elif not (retval & BAD_COMPS or opts.nocomps):
print "Checking mandatory @core packages"
group = my.comps.return_group('core')
if group is not None:
pkgs = group.mandatory_packages
else:
print " @core group not found"
retval = retval | BAD_COMPS
report('COMPS','FAILED')
pkgs = []
for pname in pkgs:
# FIXME: this pulls from pkgSack, which (I guess) is populated
# based on the arch etc. of the current host.. so you can't check
# the x86_64 repo from an i386 machine, f'rinstance.
try:
sack.extend(my.pkgSack.searchNevra(name=pname))
except yum.Errors.RepoError:
print " something went wrong with the repodata."
sack = []
break
for pkg in sack:
if checkfileurl(pkg):
print " verifying %s checksum" % pkg
else:
print " verifying %s checksum FAILED" % pkg
packages_ok = False
if sack:
if packages_ok is True:
report('CORE_PACKAGES','PASSED')
else:
report('CORE_PACKAGES','FAILED')
retval = retval | BAD_PACKAGES
else:
# we couldn't test anything
report('CORE_PACKAGES','BLOCKED')
# All done!
if retval == 0:
print "Tree verified."
return retval
if __name__ == "__main__":
rc = main()
sys.exit(rc)
| rpm-software-management/yum-utils | verifytree.py | Python | gpl-2.0 | 11,046 |
# -*- coding: utf-8 -*-
#!/usr/bin/python
#
# This is derived from a cadquery script for generating PDIP models in X3D format
#
# from https://bitbucket.org/hyOzd/freecad-macros
# author hyOzd
# This is a
# Dimensions are from Microchips Packaging Specification document:
# DS00000049BY. Body drawing is the same as QFP generator#
## requirements
## cadquery FreeCAD plugin
## https://github.com/jmwright/cadquery-freecad-module
## to run the script just do: freecad main_generator.py modelName
## e.g. c:\freecad\bin\freecad main_generator.py DIP8
## the script will generate STEP and VRML parametric models
## to be used with kicad StepUp script
#* These are a FreeCAD & cadquery tools *
#* to export generated models in STEP & VRML format. *
#* *
#* cadquery script for generating QFP/SOIC/SSOP/TSSOP models in STEP AP214 *
#* Copyright (c) 2015 *
#* Maurice https://launchpad.net/~easyw *
#* All trademarks within this guide belong to their legitimate owners. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., *
#* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA *
#* *
#****************************************************************************
__title__ = "make Valve 3D models"
__author__ = "Stefan, based on Valve script"
__Comment__ = 'make varistor 3D models exported to STEP and VRML for Kicad StepUP script'
___ver___ = "1.3.3 14/08/2015"
# maui import cadquery as cq
# maui from Helpers import show
from collections import namedtuple
import math
import sys, os
import datetime
from datetime import datetime
sys.path.append("../_tools")
import exportPartToVRML as expVRML
import shaderColors
# maui start
import FreeCAD, Draft, FreeCADGui
import ImportGui
import FreeCADGui as Gui
#from Gui.Command import *
outdir=os.path.dirname(os.path.realpath(__file__) + os.sep + '..' + os.sep + '_3Dmodels')
scriptdir=os.path.dirname(os.path.realpath(__file__))
sys.path.append(outdir)
sys.path.append(scriptdir)
if FreeCAD.GuiUp:
from PySide import QtCore, QtGui
# Licence information of the generated models.
#################################################################################################
STR_licAuthor = "kicad StepUp"
STR_licEmail = "ksu"
STR_licOrgSys = "kicad StepUp"
STR_licPreProc = "OCC"
STR_licOrg = "FreeCAD"
#################################################################################################
import cq_belfuse # modules parameters
from cq_belfuse import *
import cq_keystone # modules parameters
from cq_keystone import *
import cq_bulgin # modules parameters
from cq_bulgin import *
import cq_schurter # modules parameters
from cq_schurter import *
import cq_tme # modules parameters
from cq_tme import *
import cq_littlefuse # modules parameters
from cq_littlefuse import *
different_models = [
cq_belfuse(),
cq_keystone(),
cq_bulgin(),
cq_schurter(),
cq_tme(),
cq_littlefuse(),
]
def make_3D_model(models_dir, model_class, modelID):
LIST_license = ["",]
CheckedmodelName = 'A_' + modelID.replace('.', '').replace('-', '_').replace('(', '').replace(')', '')
CheckedmodelName = CheckedmodelName
Newdoc = App.newDocument(CheckedmodelName)
App.setActiveDocument(CheckedmodelName)
Gui.ActiveDocument=Gui.getDocument(CheckedmodelName)
destination_dir = model_class.get_dest_3D_dir(modelID)
material_substitutions = model_class.make_3D_model(modelID)
modelName = model_class.get_model_name(modelID)
doc = FreeCAD.ActiveDocument
doc.Label = CheckedmodelName
objs=GetListOfObjects(FreeCAD, doc)
objs[0].Label = CheckedmodelName
restore_Main_Tools()
script_dir=os.path.dirname(os.path.realpath(__file__))
expVRML.say(models_dir)
out_dir=models_dir+os.sep+destination_dir
if not os.path.exists(out_dir):
os.makedirs(out_dir)
exportSTEP(doc, modelName, out_dir)
if LIST_license[0]=="":
LIST_license=Lic.LIST_int_license
LIST_license.append("")
Lic.addLicenseToStep(out_dir + os.sep, modelName+".step", LIST_license,\
STR_licAuthor, STR_licEmail, STR_licOrgSys, STR_licOrg, STR_licPreProc)
# scale and export Vrml model
scale=1/2.54
#exportVRML(doc,modelName,scale,out_dir)
del objs
objs=GetListOfObjects(FreeCAD, doc)
expVRML.say("######################################################################")
expVRML.say(objs)
expVRML.say("######################################################################")
export_objects, used_color_keys = expVRML.determineColors(Gui, objs, material_substitutions)
export_file_name=out_dir+os.sep+modelName+'.wrl'
colored_meshes = expVRML.getColoredMesh(Gui, export_objects , scale)
#expVRML.writeVRMLFile(colored_meshes, export_file_name, used_color_keys)# , LIST_license
expVRML.writeVRMLFile(colored_meshes, export_file_name, used_color_keys, LIST_license)
#scale=0.3937001
#exportVRML(doc,modelName,scale,out_dir)
# Save the doc in Native FC format
saveFCdoc(App, Gui, doc, modelName,out_dir)
#display BBox
Gui.activateWorkbench("PartWorkbench")
Gui.SendMsgToActiveView("ViewFit")
Gui.activeDocument().activeView().viewAxometric()
#FreeCADGui.ActiveDocument.activeObject.BoundingBox = True
def run():
## # get variant names from command line
return
#import step_license as L
import add_license as Lic
# when run from command line
if __name__ == "__main__" or __name__ == "main_generator":
FreeCAD.Console.PrintMessage('\r\nRunning...\r\n')
full_path=os.path.realpath(__file__)
expVRML.say(full_path)
scriptdir=os.path.dirname(os.path.realpath(__file__))
expVRML.say(scriptdir)
sub_path = full_path.split(scriptdir)
expVRML.say(sub_path)
sub_dir_name =full_path.split(os.sep)[-2]
expVRML.say(sub_dir_name)
sub_path = full_path.split(sub_dir_name)[0]
expVRML.say(sub_path)
models_dir=sub_path+"_3Dmodels"
model_to_build = ''
if len(sys.argv) < 3:
FreeCAD.Console.PrintMessage('No variant name is given, add a valid model name as an argument or the argument "all"\r\n')
sys.exit()
else:
model_to_build=sys.argv[2]
found_one = False
if len(model_to_build) > 0:
if model_to_build == 'all' or model_to_build == 'All' or model_to_build == 'ALL':
found_one = True
for n in different_models:
listall = n.get_list_all()
for i in listall:
make_3D_model(models_dir, n, i)
elif model_to_build == 'list':
found_one = True
FreeCAD.Console.PrintMessage('\r\n')
for n in different_models:
listall = n.get_list_all()
for i in listall:
FreeCAD.Console.PrintMessage(i + '\r\n')
else:
for n in different_models:
if n.model_exist(model_to_build):
found_one = True
make_3D_model(models_dir, n, model_to_build)
if not found_one:
print("Parameters for %s doesn't exist, skipping. " % model_to_build)
| easyw/kicad-3d-models-in-freecad | cadquery/FCAD_script_generator/Fuse/main_generator.py | Python | gpl-2.0 | 8,654 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------#
# #
# This file is part of the Horus Project #
# #
# Copyright (C) 2014-2015 Mundo Reader S.L. #
# #
# Date: August, November 2014 #
# Author: Jesús Arroyo Torrens <jesus.arroyo@bq.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 2 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
#-----------------------------------------------------------------------#
__author__ = "Jesús Arroyo Torrens <jesus.arroyo@bq.com>"
__license__ = "GNU General Public License v2 http://www.gnu.org/licenses/gpl.html"
import time
import serial
import threading
class Error(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return repr(self.msg)
class WrongFirmware(Error):
def __init__(self, msg="WrongFirmware"):
super(Error, self).__init__(msg)
class BoardNotConnected(Error):
def __init__(self, msg="BoardNotConnected"):
super(Error, self).__init__(msg)
class Board:
"""Board class. For accessing to the scanner board"""
"""
Gcode commands:
G1 Fnnn : feed rate
G1 Xnnn : move motor
M70 Tn : switch off laser n
M71 Tn : switch on laser n
"""
def __init__(self, parent=None, serialName='/dev/ttyUSB0', baudRate=115200):
self.parent = parent
self.serialName = serialName
self.baudRate = baudRate
self.serialPort = None
self.isConnected = False
self.unplugCallback = None
self._position = 0
self._direction = 1
self._n = 0 # Check if command fails
def setSerialName(self, serialName):
self.serialName = serialName
def setBaudRate(self, baudRate):
self.baudRate = baudRate
def setInvertMotor(self, invertMotor):
if invertMotor:
self._direction = -1
else:
self._direction = +1
def setUnplugCallback(self, unplugCallback=None):
self.unplugCallback = unplugCallback
def connect(self):
""" Opens serial port and performs handshake"""
print ">>> Connecting board {0} {1}".format(self.serialName, self.baudRate)
self.isConnected = False
try:
self.serialPort = serial.Serial(self.serialName, self.baudRate, timeout=2)
if self.serialPort.isOpen():
#-- Force Reset and flush
self._reset()
version = self.serialPort.readline()
if version == "Horus 0.1 ['$' for help]\r\n":
self.setSpeedMotor(1)
self.setAbsolutePosition(0)
self.serialPort.timeout = 0.05
print ">>> Done"
self.isConnected = True
else:
raise WrongFirmware()
else:
raise BoardNotConnected()
except:
print "Error opening the port {0}\n".format(self.serialName)
self.serialPort = None
raise BoardNotConnected()
def disconnect(self):
""" Closes serial port """
if self.isConnected:
print ">>> Disconnecting board {0}".format(self.serialName)
try:
if self.serialPort is not None:
self.setLeftLaserOff()
self.setRightLaserOff()
self.disableMotor()
self.serialPort.close()
del self.serialPort
except serial.SerialException:
print "Error closing the port {0}\n".format(self.serialName)
print ">>> Error"
self.isConnected = False
print ">>> Done"
def enableMotor(self):
return self._sendCommand("M17")
def disableMotor(self):
return self._sendCommand("M18")
def setSpeedMotor(self, feedRate):
self.feedRate = feedRate
return self._sendCommand("G1F{0}".format(self.feedRate))
def setAccelerationMotor(self, acceleration):
self.acceleration = acceleration
return self._sendCommand("$120={0}".format(self.acceleration))
def setRelativePosition(self, pos):
self._posIncrement = pos
def setAbsolutePosition(self, pos):
self._posIncrement = 0
self._position = pos
def moveMotor(self, nonblocking=False, callback=None):
self._position += self._posIncrement * self._direction
return self._sendCommand("G1X{0}".format(self._position), nonblocking, callback)
def setRightLaserOn(self):
return self._sendCommand("M71T2")
def setLeftLaserOn(self):
return self._sendCommand("M71T1")
def setRightLaserOff(self):
return self._sendCommand("M70T2")
def setLeftLaserOff(self):
return self._sendCommand("M70T1")
def getLDRSensor(self, pin):
value = self.sendRequest("M50T"+pin, readLines=True).split("\n")[0]
try:
return int(value)
except ValueError:
return 0
def sendRequest(self, req, nonblocking=False, callback=None, readLines=False):
if nonblocking:
threading.Thread(target=self._sendRequest, args=(req, callback, readLines)).start()
else:
return self._sendRequest(req, callback, readLines)
def _sendRequest(self, req, callback=None, readLines=False):
"""Sends the request and returns the response"""
ret = ''
if self.isConnected and req != '':
if self.serialPort is not None and self.serialPort.isOpen():
try:
self.serialPort.flushInput()
self.serialPort.flushOutput()
self.serialPort.write(req+"\r\n")
while ret == '': # TODO: add timeout
if readLines:
ret = ''.join(self.serialPort.readlines())
else:
ret = ''.join(self.serialPort.readline())
time.sleep(0.01)
self._success()
except:
if callback is not None:
callback(ret)
self._fail()
else:
self._fail()
if callback is not None:
callback(ret)
return ret
def _success(self):
self._n = 0
def _fail(self):
self._n += 1
if self._n >= 1:
self._n = 0
if self.unplugCallback is not None and \
self.parent is not None and not self.parent.unplugged:
self.parent.unplugged = True
self.unplugCallback()
def _checkAcknowledge(self, ack):
if ack is not None:
return ack.endswith("ok\r\n")
else:
return False
def _sendCommand(self, cmd, nonblocking=False, callback=None):
if nonblocking:
self.sendRequest(cmd, nonblocking, callback)
else:
return self._checkAcknowledge(self._sendRequest(cmd))
def _reset(self):
self.serialPort.flushInput()
self.serialPort.flushOutput()
self.serialPort.write("\x18\r\n") # Ctrl-x
self.serialPort.readline() | javivi001/horus | src/horus/engine/board.py | Python | gpl-2.0 | 7,274 |
# -- coding: utf-8 --
# ===========================================================================
# eXe
# Copyright 2010-2011, Pedro Peña Pérez
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
import os
import mimetypes
from chardet import universaldetector
from chardet import latin1prober
import re
import sys
from bs4 import BeautifulSoup, UnicodeDammit
from urllib import quote, unquote
from exe.engine.freetextidevice import FreeTextIdevice
from exe.engine.resource import Resource
from exe.engine.path import Path
import logging
log = logging.getLogger(__name__)
class FixedLatin1Prober(latin1prober.Latin1Prober):
"""La clase Latin1Prober baja a la mitad la confidencia para mejorar los resultados de
otros probers. Dejamos la confidencia en su valor real."""
def get_confidence(self):
return latin1prober.Latin1Prober.get_confidence(self)*2
class FixedUniversalDetector(universaldetector.UniversalDetector):
"""Para usar FixedLatin1Prober"""
def __init__(self):
from chardet.mbcsgroupprober import MBCSGroupProber # multi-byte character sets
from chardet.sbcsgroupprober import SBCSGroupProber # single-byte character sets
universaldetector.UniversalDetector.__init__(self)
self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(), FixedLatin1Prober()]
def detect(aBuf):
"""Autodetecta la codificacion de una cadena usando FixedUniversalDetector"""
u = FixedUniversalDetector()
u.reset()
u.feed(aBuf)
u.close()
return u.result
def relpath(path, start):
"""Implementa os.path.relpath independientemente del sistema y arregla un fallo cuando start una unidad en windows"""
if sys.platform[:3] == 'win':
if not hasattr(os.path,'relpath'):
r = nt_relpath(path,start).replace(os.sep,os.altsep)
else:
r = os.path.relpath(path, start).replace(os.sep,os.altsep)
if os.path.splitdrive(start)[1] in ['',os.curdir,os.sep,os.sep + os.curdir]:
r = r.replace(os.pardir + os.altsep,'')
return r
else:
return os.path.relpath(path, start)
curdir = '.'
def nt_relpath(path, start=curdir):
"""Implementa os.path.relpath para Windows ya que en python 2.5 no esta implementada"""
from ntpath import abspath, splitunc, sep, pardir, join
if not path:
raise ValueError("no path specified")
start_list = abspath(start).split(sep)
path_list = abspath(path).split(sep)
if start_list[0].lower() != path_list[0].lower():
unc_path, rest = splitunc(path)
unc_start, rest = splitunc(start)
if bool(unc_path) ^ bool(unc_start):
raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" % (path, start))
else:
raise ValueError("path is on drive %s, start on drive %s" % (path_list[0], start_list[0]))
# Work out how much of the filepath is shared by start and path.
for i in range(min(len(start_list), len(path_list))):
if start_list[i].lower() != path_list[i].lower():
break
else:
i += 1
rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return curdir
return join(*rel_list)
class Link:
def __init__(self, url, relative, referrer, tag = None, key = None, match = None):
self.url = url
self.referrer = referrer
self.tag = tag
self.key = key
self.relative = relative
self.match = match
def __repr__(self):
if self.tag and self.tag.name:
return "<%s %s=%s>" % (self.tag.name,self.key,str(self.url))
else:
return "<%s>" % (str(self.url))
class Url:
def __init__(self, path, start='.'):
self.path = path
self.start = start
self.relpath = relpath(self.path,self.start)
parent = os.path.split(self.relpath)[0]
self.parentpath = u"." if parent == u"" else parent
self.basename = os.path.basename(self.relpath)
if os.path.isdir(self.path):
self.type = 'dir'
elif os.path.isfile(self.path):
self.type = 'file'
self.mime, self.dataencoding = mimetypes.guess_type(self.path)
self.links = []
self.plinks = []
self.rlinks = set()
self.soup = None
self.content = None
self.contentUpdated = []
self.l = unquote(self.relpath)
self.absl = self.start + os.path.sep + self.l
def setSoup(self,soup):
if self.mime == 'text/html':
self.soup = soup
def getSoup(self):
return self.soup
def setContent(self,content,encoding):
self.content = content
self.contentEncoding = encoding
def getContent(self):
return self.content
def createNode(self,parent, name = None):
self.node = parent.createChild()
self.node.setTitle(name if name else self.basename)
def createIdevice(self):
self.idevice = FreeTextIdevice()
self.idevice.edit = False
self.node.addIdevice(self.idevice)
return self.idevice
def __str__(self):
return self.relpath
def __repr__(self):
return self.relpath
def addLink(self,link):
self.links.append(link)
def addPLink(self,link):
self.plinks.append(link)
def addRLink(self,link):
self.rlinks.add(link)
class Resources:
cancel = False
@classmethod
def cancelImport(cls):
cls.cancel = True
#TODO Deshacer todo lo que se lleve hecho
def __init__(self, baseurl, node, client=None):
self.baseurl = baseurl.decode(sys.getfilesystemencoding())
self.node = node
self.client = client
self.numdirs = 0
resources = {}
resources['mimes'] = {}
resources['urls'] = {}
url = Url(self.baseurl, self.baseurl)
url.createNode(node, _('Contents of directory'))
resources['urls'][url.relpath] = url
try:
for root, dirs, files in self._safewalk(self.baseurl):
if self.cancel:
return
self.numdirs += 1
except UnicodeDecodeError:
raise
i = 1
for root, dirs, files in self._safewalk(self.baseurl):
html = u""
idevice = None
if self.client:
self.client.call('eXe.app.getController("Toolbar").updateImportProgressWindow',_(u'Analizing directory %d of %d: %s') % (i, self.numdirs,root.encode(sys.getfilesystemencoding())))
for dir in dirs:
if self.cancel:
return
path = root + os.path.sep + dir
url = Url(path, self.baseurl)
url.createNode(resources['urls'][url.parentpath].node)
resources['urls'][url.relpath] = url
for file in files:
if self.cancel:
return
path = root + os.path.sep + file
url = Url(path, self.baseurl)
parent = resources['urls'][url.parentpath]
if not idevice:
idevice = parent.createIdevice()
try:
p = Path(path)
p.setSalt(str(url))
r = Resource(idevice,p)
except:
continue
url.href = 'resources/%s' % (quote(r.storageName))
html += u"<p><a href=%s>%s</p>\n" % (url.href,url.basename)
resources['urls'][url.relpath] = url
if url.mime in resources['mimes'].keys():
resources['mimes'][url.mime].append(url)
else:
resources['mimes'][url.mime] = [ url ]
if idevice:
idevice.setContent(html)
i += 1
self.resources = resources
def _safewalk(self, top):
try:
names = os.listdir(top)
except error, err:
return
dirs, nondirs = [], []
for name in names:
try:
name.encode(sys.getfilesystemencoding())
except:
return
if os.path.isdir(os.path.join(top, name)):
dirs.append(name)
else:
nondirs.append(name)
yield top, dirs, nondirs
for name in dirs:
path = os.path.join(top, name)
if not os.path.islink(path):
for x in self._safewalk(path):
yield x
def _computeRelpaths(self):
i = 1
for url in self.resources['urls'].values():
if url.type == 'dir':
if self.client:
self.client.call('eXe.app.getController("Toolbar").updateImportProgressWindow',_(u'Calculating relative paths to directory %d of %d: %s') % (i, self.numdirs, url.relpath.encode(sys.getfilesystemencoding())))
url.relpaths = []
absd = ''.join([self.baseurl, os.path.sep, url.relpath])
for link in self.resources['urls'].values():
if self.cancel:
return
if link.relpath.encode(sys.getfilesystemencoding()) == '.':
continue
rl = relpath(link.absl,absd)
url.relpaths.append((link.l,rl))
i += 1
def _computeLinks(self):
self._computeRelpaths()
htmls = self.resources['mimes']['text/html']
total = len(htmls)
i = 1
for url in htmls:
if self.cancel:
return
if self.client:
self.client.call('eXe.app.getController("Toolbar").updateImportProgressWindow',_(u'Analyzing HTML file labels %d of %d: %s') % (i, total, str(url)))
content = open(url.path).read()
encoding = detect(content)['encoding']
#ucontent = unicode(content,encoding)
soup = BeautifulSoup(content,from_encoding=encoding)
declaredHTMLEncoding = getattr(soup, 'declared_html_encoding')
if declaredHTMLEncoding:
content = UnicodeDammit(content,[declaredHTMLEncoding]).unicode_markup
encoding = declaredHTMLEncoding
else:
pass
url.setContent(content,encoding)
url.setSoup(soup)
for tag in soup.find_all():
if self.cancel:
return
if not tag.attrs:
continue
matches = []
for key, value in tag.attrs.iteritems():
if value == "":
continue
for val in value:
unq_value = unquote(val)
unq_low_value = unquote(val.lower())
for l, rl in self.resources['urls'][url.parentpath].relpaths:
low_rl = rl.lower()
if rl in unq_value:
L = Link(self.resources['urls'][l],rl,url,tag,key,rl)
matches.append(L)
elif low_rl in unq_value:
L = Link(self.resources['urls'][l],rl,url,tag,key,low_rl)
matches.append(L)
elif l in unq_value:
L = Link(self.resources['urls'][l],rl,url,tag,key,l)
matches.append(L)
matches_final = []
for l1 in matches:
matches_ = [ m for m in matches if m != l1 ]
found = False
for l2 in matches_:
if re.search(re.escape(l1.relative),l2.relative):
found = True
if not found:
matches_final.append(l1)
if matches_final:
for match in matches_final:
url.addLink( match )
url.addRLink( str(match.url) )
i += 1
csss = self.resources['mimes']['text/css'] if 'text/css' in self.resources['mimes'].keys() else None
csss_and_htmls = csss + htmls if csss else htmls
total = len(csss_and_htmls)
i = 1
for url in csss_and_htmls:
if self.cancel:
return
if url.mime == 'text/css':
tipo = 'CSS'
else:
tipo = 'HTML'
content = url.getContent()
if not content:
content = open(url.path).read()
encoding = detect(content)['encoding']
content = unicode(content,encoding)
url.setContent(content,encoding)
if self.client:
self.client.call('eXe.app.getController("Toolbar").updateImportProgressWindow',_(u'Exhaustively analyzed file %s %d of %d: %s') % (tipo, i, total, str(url)))
matches = []
for l, rl in self.resources['urls'][url.parentpath].relpaths:
low_rl = rl.lower()
if rl in content:
L = Link(self.resources['urls'][l],rl,url,match=rl)
matches.append(L)
elif low_rl in content:
L = Link(self.resources['urls'][l],rl,url,match=low_rl)
matches.append(L)
matches_final = []
for l1 in matches:
matches_ = [ m for m in matches if m != l1 ]
found = False
for l2 in matches_:
if re.search(re.escape(l1.relative),l2.relative):
found = True
if not found:
matches_final.append(l1)
if matches_final:
for match in matches_final:
if not [ link for link in url.links if link.relative == match.relative ]:
url.addLink( match )
url.addRLink( str(match.url) )
i += 1
def _computeDepths(self,url):
from collections import deque
q = deque()
q.append(([url],0))
while q:
if self.cancel:
return
links, depth = q.pop()
for link in links:
if link in self.depths.keys():
self.depths[link] = depth if self.depths[link] > depth else self.depths[link]
else:
self.depths[link] = depth
if self.depths[link] < depth:
continue
q.appendleft((self.resources['urls'][link].rlinks,depth + 1))
def insertNode(self,urls=['index.html','index.htm']):
if self.cancel:
return
for url in urls:
if url not in self.resources['urls'].keys():
continue
else:
self.depths = {}
self._computeLinks()
if self.cancel:
return
if self.client:
self.client.call('eXe.app.getController("Toolbar").updateImportProgressWindow',_(u'Calculating depth of links'))
self._computeDepths(url)
if self.cancel:
return
self._insertNode(None,url)
def _guessName(self,url):
return str(url)
soup = url.getSoup()
if soup.title:
return str(soup.title.string)
names = {}
for link in url.plinks:
if link.tag.contents and isinstance(link.tag.contents[0],unicode) and link.tag.contents[0].lstrip() != u"":
if link.tag.contents[0] in names.keys():
names[link.tag.contents[0]] += 1
else:
names[link.tag.contents[0]] = 1
max = 0
max_name_ocurr = str(url)
for name in names.keys():
if names[name] > max:
max_name_ocurr = name
max = names[name]
return unquote(max_name_ocurr)
def _insertNode(self, node, url, depth=0, idevice=None):
if self.cancel:
return
if isinstance(url,str):
link = None
url = self.resources['urls'][url]
elif isinstance(url,Link):
link = url
url = link.url
if url.mime == 'text/html' and self.depths[str(url)] >= depth:
if self.client:
self.client.call('eXe.app.getController("Toolbar").updateImportProgressWindow',_(u'Inserting %s') % (str(url)))
type = link.tag.name if link and link.tag else 'a'
if type not in ['frame','iframe'] and node:
node = node.createChild()
node.setTitle(self._guessName(url))
if depth == 1:
node.up()
if not node:
node = self.node
parent = idevice if type in ['frame','iframe'] else None
idevice = FreeTextIdevice(type=type,parent=parent)
idevice.edit = False
node.addIdevice(idevice)
if url.type == "file":
p = Path(self.baseurl + os.path.sep + str(url))
p.setSalt(str(url))
r = Resource(idevice,p)
url.storageName = quote(r.storageName)
if link and link.relative not in link.referrer.contentUpdated:
if link.match:
link.referrer.content = link.referrer.content.replace(link.match,'###resources###/%s' % (url.storageName))
else:
link.referrer.content = link.referrer.content.replace(link.relative,'###resources###/%s' % (url.storageName))
link.referrer.contentUpdated.append(link.relative)
if self.depths[str(url)] < depth:
return
for l in url.links:
if self.cancel:
return
self._insertNode(node, l, depth+1, idevice)
content = url.getContent()
if content:
content_w_resourcePaths = re.sub('###resources###/','resources/',content)
content_wo_resourcePaths = re.sub('###resources###/','',content)
if url.mime == "text/html" and idevice:
soup = url.getSoup()
if soup and soup.declaredHTMLEncoding:
content_w_resourcePaths = re.sub(soup.declaredHTMLEncoding,'utf-8',content_w_resourcePaths,re.IGNORECASE)
content_wo_resourcePaths = re.sub(soup.declaredHTMLEncoding,'utf-8',content_wo_resourcePaths,re.IGNORECASE)
if soup and soup.find('frameset'):
idevice.type = 'frameset'
idevice.setContent(content_w_resourcePaths,content_wo_resourcePaths)
f = open(r.path,"w")
f.write(content_wo_resourcePaths.encode('utf-8'))
f.close()
| exelearning/iteexe | exe/importers/scanresources.py | Python | gpl-2.0 | 19,859 |
"""Testing code for the tupa.features package, unit-testing only."""
import os
from collections import OrderedDict
import pytest
from ucca import textutil
from tupa.action import Actions
from tupa.features.dense_features import DenseFeatureExtractor
from tupa.features.sparse_features import SparseFeatureExtractor
from tupa.model import Model
from tupa.oracle import Oracle
from tupa.states.state import State
from .conftest import passage_files, load_passage, basename
SPARSE = "sparse"
DENSE = "dense"
VOCAB = os.path.join("test_files", "vocab", "en_core_web_lg.csv")
WORD_VECTORS = os.path.join("test_files", "vocab", "wiki.en.vec")
OMITTED = "d"
class FeatureExtractorCreator:
def __init__(self, name, indexed=False, annotated=False, vocab=None, wordvectors=None, omit=None):
self.name = name
self.indexed = indexed
self.annotated = annotated
self.vocab = vocab
self.id = vocab == "-"
self.wordvectors = wordvectors
self.omit = omit
def __str__(self):
return "-".join([self.name] + [attr for attr in ("indexed", "annotated", "vocab", "id", "wordvectors", "omit")
if getattr(self, attr)])
def __call__(self, config):
config.args.vocab = self.vocab
config.args.word_vectors = self.wordvectors
config.args.omit_features = self.omit
return SparseFeatureExtractor(omit_features=self.omit) if self.name == SPARSE else DenseFeatureExtractor(
OrderedDict((p.name, p.create_from_config()) for p in Model(None, config=config).param_defs()),
indexed=self.indexed, node_dropout=0, omit_features=self.omit)
def feature_extractors(*args, **kwargs):
return [FeatureExtractorCreator(SPARSE, *args, **kwargs), FeatureExtractorCreator(DENSE, *args, **kwargs),
FeatureExtractorCreator(DENSE, *args, indexed=True, **kwargs)]
def extract_features(feature_extractor, state, features):
values = feature_extractor.extract_features(state)
if feature_extractor.params:
for key, vs in values.items():
assert len(vs) == feature_extractor.params[key].num, key
features.append(values)
def _test_features(config, feature_extractor_creator, filename, write_features):
feature_extractor = feature_extractor_creator(config)
passage = load_passage(filename, annotate=feature_extractor_creator.annotated)
textutil.annotate(passage, as_array=True, as_extra=False, vocab=config.vocab())
config.set_format(passage.extra.get("format") or "ucca")
oracle = Oracle(passage)
state = State(passage)
actions = Actions()
for key, param in feature_extractor.params.items():
if not param.numeric:
param.dropout = 0
feature_extractor.init_param(key)
features = [feature_extractor.init_features(state)]
while True:
extract_features(feature_extractor, state, features)
action = min(oracle.get_actions(state, actions).values(), key=str)
state.transition(action)
if state.need_label:
extract_features(feature_extractor, state, features)
label, _ = oracle.get_label(state, action)
state.label_node(label)
if state.finished:
break
features = ["%s %s\n" % i for f in features if f for i in (sorted(f.items()) + [("", "")])]
compare_file = os.path.join("test_files", "features", "-".join((basename(filename), str(feature_extractor_creator)))
+ ".txt")
if write_features:
with open(compare_file, "w", encoding="utf-8") as f:
f.writelines(features)
with open(compare_file, encoding="utf-8") as f:
assert f.readlines() == features, compare_file
@pytest.mark.parametrize("feature_extractor_creator",
[f for v in (None, "-", VOCAB) for w in (None, WORD_VECTORS) for o in (None, OMITTED)
for f in feature_extractors(vocab=v, wordvectors=w, omit=o)], ids=str)
@pytest.mark.parametrize("filename", passage_files(), ids=basename)
def test_features(config, feature_extractor_creator, filename, write_features):
_test_features(config, feature_extractor_creator, filename, write_features)
@pytest.mark.parametrize("feature_extractor_creator",
[f for v in ("-", VOCAB) for w in (None, WORD_VECTORS) for o in (None, OMITTED)
for f in feature_extractors(annotated=True, vocab=v, wordvectors=w, omit=o)], ids=str)
@pytest.mark.parametrize("filename", passage_files("conllu"), ids=basename)
def test_features_conllu(config, feature_extractor_creator, filename, write_features):
_test_features(config, feature_extractor_creator, filename, write_features)
@pytest.mark.parametrize("feature_extractor_creator", [f for o in (None, OMITTED)
for f in feature_extractors(omit=o)[:-1]], ids=str)
def test_feature_templates(config, feature_extractor_creator, write_features):
config.set_format("amr")
feature_extractor = feature_extractor_creator(config)
features = ["%s\n" % i for i in feature_extractor.all_features()]
compare_file = os.path.join("test_files", "features", "templates-%s.txt" % str(feature_extractor_creator))
if write_features:
with open(compare_file, "w") as f:
f.writelines(features)
with open(compare_file) as f:
assert f.readlines() == features, compare_file
| danielhers/tupa | tests/test_features.py | Python | gpl-3.0 | 5,494 |
"""This application provides a framework for disqualifying users for various
reasons, as well as simple modeling of any custom disqualification.
"""
| papedaniel/oioioi | oioioi/disqualification/__init__.py | Python | gpl-3.0 | 149 |
# coding=utf-8
# Author: Nic Wolfe <nic@wolfeden.ca>
#
# URL: https://sickrage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import datetime
import sys
import urllib
import sickbeard
from sickbeard.common import Quality, USER_AGENT
from sickrage.helper.common import dateTimeFormat
class SickBeardURLopener(urllib.FancyURLopener, object):
version = USER_AGENT
class SearchResult(object): # pylint: disable=too-few-public-methods, too-many-instance-attributes
"""
Represents a search result from an indexer.
"""
def __init__(self, episodes):
self.provider = None
# release show object
self.show = None
# URL to the NZB/torrent file
self.url = u''
# used by some providers to store extra info associated with the result
self.extraInfo = []
# list of TVEpisode objects that this result is associated with
self.episodes = episodes
# quality of the release
self.quality = Quality.UNKNOWN
# release name
self.name = u''
# size of the release (-1 = n/a)
self.size = -1
# release group
self.release_group = u''
# version
self.version = -1
# hash
self.hash = None
# content
self.content = None
self.resultType = u''
def __str__(self):
if self.provider is None:
return u'Invalid provider, unable to print self'
my_string = u'{0} @ {1}\n'.format(self.provider.name, self.url)
my_string += u'Extra Info:\n'
for extra in self.extraInfo:
my_string += u' {0}\n'.format(extra)
my_string += u'Episodes:\n'
for ep in self.episodes:
my_string += u' {0}\n'.format(ep)
my_string += u'Quality: {0}\n'.format(Quality.qualityStrings[self.quality])
my_string += u'Name: {0}\n'.format(self.name)
my_string += u'Size: {0}\n'.format(self.size)
my_string += u'Release Group: {0}\n'.format(self.release_group)
return my_string
def fileName(self):
return u'{0}.{1}'.format(self.episodes[0].prettyName(), self.resultType)
class NZBSearchResult(SearchResult): # pylint: disable=too-few-public-methods
"""
Regular NZB result with an URL to the NZB
"""
def __init__(self, episodes):
super(NZBSearchResult, self).__init__(episodes)
self.resultType = u'nzb'
class NZBDataSearchResult(SearchResult): # pylint: disable=too-few-public-methods
"""
NZB result where the actual NZB XML data is stored in the extraInfo
"""
def __init__(self, episodes):
super(NZBDataSearchResult, self).__init__(episodes)
self.resultType = u'nzbdata'
class TorrentSearchResult(SearchResult): # pylint: disable=too-few-public-methods
"""
Torrent result with an URL to the torrent
"""
def __init__(self, episodes):
super(TorrentSearchResult, self).__init__(episodes)
self.resultType = u'torrent'
class AllShowsListUI(object): # pylint: disable=too-few-public-methods
"""
This class is for indexer api.
Instead of prompting with a UI to pick the desired result out of a
list of shows it tries to be smart about it based on what shows
are in SickRage.
"""
def __init__(self, config, log=None):
self.config = config
self.log = log
def selectSeries(self, allSeries):
search_results = []
series_names = []
# get all available shows
if allSeries and 'searchterm' in self.config:
search_term = self.config['searchterm']
# try to pick a show that's in my show list
for curShow in allSeries:
if curShow in search_results:
continue
if 'seriesname' in curShow:
series_names.append(curShow['seriesname'])
if 'aliasnames' in curShow:
series_names.extend(curShow['aliasnames'].split('|'))
for name in series_names:
if search_term.lower() in name.lower():
if 'firstaired' not in curShow:
curShow['firstaired'] = 'Unknown'
if curShow not in search_results:
search_results += [curShow]
return search_results
class ShowListUI(object): # pylint: disable=too-few-public-methods
"""
This class is for tvdb-api.
Instead of prompting with a UI to pick the desired result out of a
list of shows it tries to be smart about it based on what shows
are in SickRage.
"""
def __init__(self, config, log=None):
self.config = config
self.log = log
@staticmethod
def selectSeries(allSeries):
try:
# try to pick a show that's in my show list
show_id_list = {int(x.indexerid) for x in sickbeard.showList if x}
for curShow in allSeries:
if int(curShow['id']) in show_id_list:
return curShow
except Exception:
# Maybe curShow doesnt have id? Ignore it
pass
# if nothing matches then return first result
return allSeries[0]
class Proper(object): # pylint: disable=too-few-public-methods, too-many-instance-attributes
def __init__(self, name, url, date, show):
self.name = name
self.url = url
self.date = date
self.provider = None
self.quality = Quality.UNKNOWN
self.release_group = None
self.version = -1
self.show = show
self.indexer = None
self.indexerid = -1
self.season = -1
self.episode = -1
self.scene_season = -1
self.scene_episode = -1
def __str__(self):
return u'{date} {name} {season}x{episode} of {series_id} from {indexer}'.format(
date=self.date, name=self.name, season=self.season, episode=self.episode,
series_id=self.indexerid, indexer=sickbeard.indexerApi(self.indexer).name)
class ErrorViewer(object):
"""
Keeps a static list of UIErrors to be displayed on the UI and allows
the list to be cleared.
"""
errors = []
def __init__(self):
ErrorViewer.errors = []
@staticmethod
def add(error):
ErrorViewer.errors = [e for e in ErrorViewer.errors if e.message != error.message]
ErrorViewer.errors.append(error)
@staticmethod
def clear():
ErrorViewer.errors = []
@staticmethod
def get():
return ErrorViewer.errors
class WarningViewer(object):
"""
Keeps a static list of (warning) UIErrors to be displayed on the UI and allows
the list to be cleared.
"""
errors = []
def __init__(self):
WarningViewer.errors = []
@staticmethod
def add(error):
WarningViewer.errors = [e for e in WarningViewer.errors if e.message != error.message]
WarningViewer.errors.append(error)
@staticmethod
def clear():
WarningViewer.errors = []
@staticmethod
def get():
return WarningViewer.errors
class UIError(object): # pylint: disable=too-few-public-methods
"""
Represents an error to be displayed in the web UI.
"""
def __init__(self, message):
self.title = sys.exc_info()[-2] or message
self.message = message
self.time = datetime.datetime.now().strftime(dateTimeFormat)
| Jusedawg/SickRage | sickbeard/classes.py | Python | gpl-3.0 | 8,125 |
from namespace_class import *
try:
p = Private1()
error = 1
except:
error = 0
if (error):
raise RuntimeError, "Private1 is private"
try:
p = Private2()
error = 1
except:
error = 0
if (error):
raise RuntimeError, "Private2 is private"
EulerT3D.toFrame(1,1,1)
b = BooT_i()
b = BooT_H()
f = FooT_i()
f.quack(1)
f = FooT_d()
f.moo(1)
f = FooT_H()
f.foo(Hi)
| jrversteegh/softsailor | deps/swig-2.0.4/Examples/test-suite/python/namespace_class_runme.py | Python | gpl-3.0 | 379 |
# -*- encoding: utf-8 -*-
"""Test class for Users UI
:Requirement: User
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: UI
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import random
from fauxfactory import gen_string
from nailgun import entities
from robottelo import ssh
from robottelo.config import settings
from robottelo.constants import (
DEFAULT_ORG,
LANGUAGES,
LDAP_ATTR,
LDAP_SERVER_TYPE,
ROLES,
TIMEZONES,
)
from robottelo.datafactory import (
filtered_datapoint,
invalid_emails_list,
invalid_names_list,
invalid_values_list,
valid_data_list,
valid_emails_list,
)
from robottelo.decorators import (
skip_if_not_set,
stubbed,
tier1,
tier2,
tier3,
)
from robottelo.test import UITestCase
from robottelo.ui.factory import make_user, make_usergroup, set_context
from robottelo.ui.locators import common_locators, locators, tab_locators
from robottelo.ui.session import Session
from robozilla.decorators import skip_if_bug_open
@filtered_datapoint
def valid_strings(len1=10):
"""Generates a list of all the input strings, (excluding html)"""
return [
gen_string('alpha', 5),
gen_string('alpha', len1),
u'{0}-{1}'.format(gen_string('alpha', 4),
gen_string('alpha', 4)),
u'{0}-{1}'.format(gen_string('alpha', 4),
gen_string('alpha', 4)),
u'նորօգտվող-{0}'.format(gen_string('alpha', 2)),
u'新用戶-{0}'.format(gen_string('alpha', 2)),
u'новогопользоват-{0}'.format(gen_string('alpha', 2)),
u'uusikäyttäjä-{0}'.format(gen_string('alpha', 2)),
u'νέοςχρήστης-{0}'.format(gen_string('alpha', 2)),
]
class UserTestCase(UITestCase):
"""Implements Users tests in UI"""
@classmethod
def setUpClass(cls):
super(UserTestCase, cls).setUpClass()
# Check whether necessary plug-ins are installed for server instance
result = ssh.command(
'rpm -qa | grep rubygem-foreman_remote_execution'
)
if result.return_code != 0:
ROLES.remove('Remote Execution Manager')
ROLES.remove('Remote Execution User')
@tier1
def test_positive_create_with_username(self):
"""Create User for all variations of Username
:id: 2acc8c7d-cb14-4eda-98f9-fb379950f2f5
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for user_name in valid_strings():
with self.subTest(user_name):
make_user(session, username=user_name)
self.assertIsNotNone(self.user.search(user_name))
@tier1
def test_positive_create_with_first_name(self):
"""Create User for all variations of First Name
:id: dd398cd6-821e-4b0e-a111-22d5a6eeafd8
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for first_name in valid_strings():
with self.subTest(first_name):
name = gen_string('alpha')
make_user(session, username=name, first_name=first_name)
self.user.validate_user(name, 'firstname', first_name)
@tier1
def test_positive_create_with_surname(self):
"""Create User for all variations of Surname
:id: 0a2dc093-0cd1-41eb-99cd-79935c74563f
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for last_name in valid_strings(50):
with self.subTest(last_name):
name = gen_string('alpha')
make_user(session, username=name, last_name=last_name)
self.user.validate_user(name, 'lastname', last_name)
@tier1
def test_positive_create_with_email(self):
"""Create User for all variations of Email Address
:id: 1c6c0f50-401c-4b7d-9795-97a1be3806f8
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for email in valid_emails_list():
with self.subTest(email):
name = gen_string('alpha')
make_user(session, username=name, email=email)
self.user.validate_user(name, 'email', email)
@tier1
def test_positive_create_with_description(self):
"""Create User for all variations of Description
:id: eebeb6d3-c99f-4dc2-991c-0e8268187110
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for description in valid_data_list():
with self.subTest(description):
name = gen_string('alpha')
make_user(session, username=name, description=description)
self.user.validate_user(
name, 'description', description, False
)
@tier1
def test_positive_create_with_language(self):
"""Create User for all variations of Language
:id: 1c5581a8-79ae-40a6-8052-f47be2d4c5eb
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
with Session(self) as session:
for language in LANGUAGES:
with self.subTest(language):
name = gen_string('alpha')
make_user(session, username=name, locale=language)
self.user.validate_user(name, 'language', language, False)
@tier1
def test_positive_create_with_password(self):
"""Create User for all variations of Password
:id: 83d6efe0-7526-465c-9c97-5673c7736fc4
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
test_data = valid_strings()
extra_passwords = (
u'foo@!#$^&*( ) {0}'.format(gen_string('alpha', 2)),
u'bar+{{}}|\"?hi {0}'.format(gen_string('alpha', 2)),
)
test_data.extend(extra_passwords)
with Session(self) as session:
for password in test_data:
with self.subTest(password):
name = gen_string('alpha')
make_user(
session,
username=name,
password1=password,
password2=password,
)
self.assertIsNotNone(self.user.search(name))
@tier1
def test_positive_create_admin(self):
"""Create an Admin user
:id: 9bf56045-1026-435c-bf4c-623e160582d5
:expectedresults: Admin User is created successfully
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=user_name, admin=True)
self.assertIsNotNone(self.user.search(user_name))
@tier1
def test_positive_create_with_one_role(self):
"""Create User with one role
:id: 6d6c795e-8b46-4f0f-84e1-f7e22add6173
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
name = gen_string('alpha')
role = entities.Role().create()
with Session(self) as session:
make_user(session, username=name, roles=[role.name], edit=True)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_roles'])
element = self.user.wait_until_element(
common_locators['entity_deselect'] % role.name)
self.assertIsNotNone(element)
@tier2
def test_positive_create_with_multiple_roles(self):
"""Create User with multiple roles
:id: d3cc4434-25ca-4465-8878-42495390c17b
:expectedresults: User is created successfully
:CaseLevel: Integration
"""
name = gen_string('alpha')
role1 = gen_string('alpha')
role2 = gen_string('alpha')
for role in [role1, role2]:
entities.Role(name=role).create()
with Session(self) as session:
make_user(session, username=name, roles=[role1, role2], edit=True)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_roles'])
for role in [role1, role2]:
self.assertIsNotNone(
self.user.wait_until_element(
common_locators['entity_deselect'] % role
))
@tier2
def test_positive_create_with_all_roles(self):
"""Create User and assign all available roles to it
:id: 814593ca-1566-45ea-9eff-e880183b1ee3
:expectedresults: User is created successfully
:CaseLevel: Integration
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name, roles=ROLES, edit=True)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_roles'])
for role in ROLES:
self.assertIsNotNone(self.user.wait_until_element(
common_locators['entity_deselect'] % role))
@tier1
def test_positive_create_with_one_org(self):
"""Create User associated to one Org
:id: 830bc5fc-e773-466c-9b38-4f33a2c1d05e
:expectedresults: User is created successfully
:CaseImportance: Critical
"""
name = gen_string('alpha')
org_name = gen_string('alpha')
entities.Organization(name=org_name).create()
with Session(self) as session:
make_user(
session, username=name, organizations=[org_name], edit=True)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_organizations'])
element = self.user.wait_until_element(
common_locators['entity_deselect'] % org_name)
self.assertIsNotNone(element)
@tier2
def test_positive_create_with_multiple_orgs(self):
"""Create User associated to multiple Orgs
:id: d74c0284-3995-4a4a-8746-00858282bf5d
:expectedresults: User is created successfully
:CaseLevel: Integration
"""
name = gen_string('alpha')
org_name1 = gen_string('alpha')
org_name2 = gen_string('alpha')
for org_name in [org_name1, org_name2]:
entities.Organization(name=org_name).create()
with Session(self) as session:
set_context(session, org=DEFAULT_ORG)
make_user(
session,
username=name,
organizations=[org_name1, org_name2],
edit=True,
)
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_organizations'])
for org_name in [org_name1, org_name2, DEFAULT_ORG]:
element = self.user.wait_until_element(
common_locators['entity_deselect'] % org_name)
self.assertIsNotNone(element)
@tier1
def test_positive_create_with_default_org(self):
"""Create User and has default organization associated with it
:id: 3d51dead-9053-427d-8292-c42e87ed6289
:expectedresults: User is created with default Org selected.
:CaseImportance: Critical
"""
name = gen_string('alpha')
org_name = gen_string('alpha')
entities.Organization(name=org_name).create()
with Session(self) as session:
make_user(session, username=name, organizations=[org_name],
edit=True, default_org=org_name)
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_organizations'])
element = session.nav.wait_until_element(
common_locators['entity_deselect'] % org_name)
self.assertIsNotNone(element)
# Check that default organization value was really chosen
self.assertEqual(org_name, session.nav.find_element(
locators['users.default_org_value']).text)
@tier1
def test_positive_create_with_default_location(self):
"""Create User and associate a default Location.
:id: 952a0be5-d393-49a2-8fd9-f6dfcc31f762
:expectedresults: User is created with default Location selected.
:CaseImportance: Critical
"""
name = gen_string('alpha')
loc_name = gen_string('alpha')
entities.Location(name=loc_name).create()
with Session(self) as session:
make_user(session, username=name, locations=[loc_name],
edit=True, default_loc=loc_name)
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_locations'])
element = session.nav.wait_until_element(
common_locators['entity_deselect'] % loc_name)
self.assertIsNotNone(element)
# Check that default location value was really chosen
self.assertEqual(loc_name, session.nav.find_element(
locators['users.default_loc_value']).text)
@tier1
def test_negative_create(self):
"""Enter all User creation details and Cancel
:id: 2774be2f-303e-498f-8072-80462f33c52e
:expectedresults: User is not created
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(
session,
username=user_name,
first_name=gen_string('alpha'),
last_name=gen_string('alpha'),
email=u'{0}@example.com'.format(gen_string('numeric')),
submit=False,
)
self.assertIsNone(self.user.search(user_name))
@tier1
def test_negative_create_with_invalid_name(self):
"""Create User with invalid User Name
:id: 31bbe350-0275-4aaf-99ec-3f77bfd4ba00
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
for user_name in invalid_values_list(interface='ui'):
with self.subTest(user_name):
make_user(session, username=user_name)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_create_with_invalid_firstname(self):
"""Create User with invalid FirstName
:id: 21525bf2-4de9-43f0-8c92-b2fad1fdc944
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
# invalid_values_list is not used here because first name is an
# optional field
for first_name in invalid_names_list():
with self.subTest(first_name):
make_user(
session,
username=gen_string('alpha'),
first_name=first_name,
)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_create_with_invalid_surname(self):
"""Create User with invalid Surname
:id: 47d9e8be-3b29-4a56-85d7-898145b5b034
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
# invalid_values_list is not used here because sur name is an
# optional field
for last_name in invalid_names_list():
with self.subTest(last_name):
make_user(
session,
username=gen_string('alpha'),
last_name=last_name,
)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_create_with_invalid_emails(self):
"""Create User with invalid Email Address
:id: 36511b82-e070-41ea-81fa-6e29faa9da1c
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
for email in invalid_emails_list():
with self.subTest(email):
name = gen_string('alpha')
make_user(session, username=name, email=email)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_create_with_blank_auth(self):
"""Create User with blank value for 'Authorized by' field
:id: 68f670ed-ac6e-4052-889c-6671d659e510
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
make_user(session, username=gen_string('alpha'), authorized_by='')
self.assertIsNotNone(
self.user.wait_until_element(common_locators['haserror']))
@tier1
def test_negative_create_with_wrong_pass_confirmation(self):
"""Create User with non-matching values in Password and verify
:id: f818e5fc-b378-4bc7-afa8-18b23ee05053
:expectedresults: User is not created. Appropriate error shown.
:CaseImportance: Critical
"""
with Session(self) as session:
make_user(
session,
username=gen_string('alpha'),
password1=gen_string('alpha'),
password2=gen_string('alpha'),
)
self.assertIsNotNone(
self.user.wait_until_element(common_locators['haserror']))
@tier1
def test_positive_search_by_usergroup(self):
"""Create few users and assign them to usergroup. Perform search for
users by usergroup they are assigned to
:id: dceebf68-8d82-4214-9829-350830a78cdd
:expectedresults: Necessary users can be found and no error raised
:BZ: 1395667
:CaseImportance: Critical
"""
group_name = gen_string('alpha')
org = entities.Organization().create()
# Create new users
user_names = [
entities.User(organization=[org]).create().login
for _ in range(2)
]
with Session(self) as session:
make_usergroup(
session,
name=group_name,
users=user_names,
org=org.name,
)
for user_name in user_names:
self.assertIsNotNone(
self.user.search(
user_name,
_raw_query='usergroup = {}'.format(group_name)
)
)
self.assertIsNone(
self.user.wait_until_element(
common_locators['haserror'], timeout=3)
)
@tier1
def test_positive_update_username(self):
"""Update Username in User
:id: 4ecb2816-9bef-4089-86a0-02d7d065cdb1
:expectedresults: User is updated successfully
:CaseImportance: Critical
"""
name = gen_string('alpha')
password = gen_string('alpha')
with Session(self) as session:
# Role Site meaning 'Site Manager' here
make_user(
session,
username=name,
password1=password,
password2=password,
edit=True,
roles=['Site'],
)
for new_username in valid_strings():
with self.subTest(new_username):
with Session(self):
self.user.update(name, new_username)
self.assertIsNotNone(
self.user.search(new_username))
self.login.logout()
self.login.login(new_username, password)
self.assertTrue(self.login.is_logged())
name = new_username # for next iteration
@tier1
def test_positive_update_firstname(self):
"""Update first name in User
:id: 03ef8a7f-2bf1-4314-b0cd-a7a6acfc17ea
:expectedresults: User is updated successful
:CaseImportance: Critical
"""
first_name = gen_string('alpha')
new_first_name = gen_string('alpha')
username = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username, first_name=first_name)
self.user.update(username, first_name=new_first_name)
self.user.validate_user(username, 'firstname', new_first_name)
@tier1
def test_positive_update_surname(self):
"""Update surname in User
:id: 0326d221-28b0-4a6b-934e-b67ee6c9f696
:expectedresults: User is updated successful
:CaseImportance: Critical
"""
last_name = gen_string('alpha')
new_last_name = gen_string('alpha')
username = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username, last_name=last_name)
self.user.update(username, last_name=new_last_name)
self.user.validate_user(username, 'lastname', new_last_name)
@tier1
def test_positive_update_email(self):
"""Update Email Address in User
:id: e48314b7-2a49-48ec-896d-af7bf427b1c4
:expectedresults: User is updated successfully
:CaseImportance: Critical
"""
email = u'{0}@example.com'.format(gen_string('alpha'))
new_email = u'{0}@myexample.com'.format(gen_string('alpha'))
username = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username, email=email)
self.user.update(username, email=new_email)
self.user.validate_user(username, 'email', new_email)
@tier1
def test_positive_update_description(self):
"""Update Description in User
:id: f08ee305-0e0b-4df0-82d9-d10edcfa66c0
:expectedresults: User is updated successfully
:CaseImportance: Critical
"""
username = gen_string('alpha')
description = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username, description=description)
for new_description in valid_data_list():
with self.subTest(new_description):
self.user.update(username, description=new_description)
self.user.validate_user(
username, 'description', new_description, False
)
@tier1
def test_positive_update_language(self):
"""Update Language in User
:id: 64b6a90e-0d4c-4a55-a4bd-7347010e39f2
:expectedresults: User is updated successfully
:CaseImportance: Critical
"""
locale = random.choice(list(LANGUAGES.keys()))
username = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username)
self.user.update(username, locale=locale)
self.user.validate_user(username, 'language', locale, False)
@tier1
def test_positive_update_password(self):
"""Update password for a user
:id: db57c3bc-4fae-4ee7-bf6d-8e0bcc7fd55c
:expectedresults: User password is updated successfully
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
new_password = gen_string('alpha')
with Session(self) as session:
# Role 'Site' meaning 'Site Manager' here
make_user(session, username=user_name, edit=True, roles=['Site'])
self.user.update(
user_name,
new_password=new_password,
password_confirmation=new_password,
)
self.login.logout()
self.login.login(user_name, new_password)
self.assertTrue(self.login.is_logged())
@tier1
def test_positive_update_to_non_admin(self):
"""Convert an user from an admin user to non-admin user
:id: b41cbcf8-d819-4daa-b217-a4812541dca3
:expectedresults: User is updated and has proper admin role value
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=user_name, admin=True)
self.assertIsNotNone(self.user.search(user_name))
self.assertFalse(
self.user.user_admin_role_toggle(user_name, False))
@tier1
def test_positive_update_to_admin(self):
"""Convert a user to an admin user
:id: d3cdda62-1384-4b49-97a3-0c66764583bb
:expectedresults: User is updated and has proper admin role value
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=user_name, admin=False)
self.assertIsNotNone(self.user.search(user_name))
self.assertTrue(self.user.user_admin_role_toggle(user_name, True))
@tier1
def test_positive_update_role(self):
"""Update role for a user
:id: 2a13529c-3863-403b-a319-9569ca1287cb
:expectedresults: User role is updated
:CaseImportance: Critical
"""
strategy, value = common_locators['entity_deselect']
name = gen_string('alpha')
role_name = entities.Role().create().name
with Session(self) as session:
make_user(session, username=name)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_roles'])
self.assertIsNone(
self.user.wait_until_element((strategy, value % role_name)))
self.user.update(name, new_roles=[role_name])
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_roles'])
self.assertIsNotNone(
self.user.wait_until_element((strategy, value % role_name)))
@tier2
def test_positive_update_with_multiple_roles(self):
"""Update User with multiple roles
:id: 127fb368-09fd-4f10-8319-566a1bcb5cd2
:expectedresults: User is updated successfully
:CaseLevel: Integration
"""
name = gen_string('alpha')
role_names = [
entities.Role().create().name
for _ in range(3)
]
with Session(self) as session:
make_user(session, username=name)
self.user.update(name, new_roles=role_names)
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_roles'])
for role in role_names:
self.assertIsNotNone(
self.user.wait_until_element(
common_locators['entity_deselect'] % role)
)
@tier2
def test_positive_update_with_all_roles(self):
"""Update User with all roles
:id: cd7a9cfb-a700-45f2-a11d-bba6be3c810d
:expectedresults: User is updated successfully
:CaseLevel: Integration
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
self.user.update(name, new_roles=ROLES)
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_roles'])
for role in ROLES:
self.assertIsNotNone(
self.user.wait_until_element(
common_locators['entity_deselect'] % role)
)
@tier1
def test_positive_update_org(self):
"""Assign a User to an Org
:id: d891e54b-76bf-4537-8eb9-c3f8832e4c2c
:expectedresults: User is updated successfully
:CaseImportance: Critical
"""
name = gen_string('alpha')
org_name = gen_string('alpha')
entities.Organization(name=org_name).create()
with Session(self) as session:
make_user(session, username=name)
self.user.update(name, new_organizations=[org_name])
self.user.search_and_click(name)
self.user.click(tab_locators['users.tab_organizations'])
element = self.user.wait_until_element(
common_locators['entity_deselect'] % org_name)
self.assertIsNotNone(element)
@tier2
def test_positive_update_orgs(self):
"""Assign a User to multiple Orgs
:id: a207188d-1ad1-4ff1-9906-bae1d91104fd
:expectedresults: User is updated
:CaseLevel: Integration
"""
name = gen_string('alpha')
org_names = [
entities.Organization().create().name
for _ in range(3)
]
with Session(self) as session:
make_user(session, username=name)
self.user.update(name, new_organizations=org_names)
self.user.click(self.user.search(name))
self.user.click(tab_locators['users.tab_organizations'])
for org in org_names:
self.assertIsNotNone(
self.user.wait_until_element(
common_locators['entity_deselect'] % org)
)
@tier1
def test_negative_update_username(self):
"""Update invalid Username in an User
:id: 7019461e-13c6-4761-b3e9-4df81abcd0f9
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
for new_user_name in invalid_names_list():
with self.subTest(new_user_name):
self.user.update(name, new_username=new_user_name)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update_firstname(self):
"""Update invalid Firstname in an User
:id: 1e3945d1-5b47-45ca-aff9-3ddd44688e6b
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
for new_first_name in invalid_names_list():
with self.subTest(new_first_name):
self.user.update(name, first_name=new_first_name)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update_surname(self):
"""Update invalid Surname in an User
:id: 14033c1f-4c7e-4ee5-8ffc-76c4dd672cc1
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
for new_surname in invalid_names_list():
with self.subTest(new_surname):
self.user.update(name, last_name=new_surname)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update_email(self):
"""Update invalid Email Address in an User
:id: 6aec3816-16ca-487a-b0f1-a5c1fbc3e0a3
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
for new_email in invalid_emails_list():
with self.subTest(new_email):
self.user.update(name, email=new_email)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update_password(self):
"""Update different values in Password and verify fields
:id: ab4a5dbf-70c2-4adc-b948-bc350329e166
:Steps:
1. Create User
2. Update the password by entering different values in Password and
verify fields
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
self.user.update(
name,
new_password=gen_string('alphanumeric'),
password_confirmation=gen_string('alphanumeric'),
)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update_password_empty_confirmation(self):
"""Update user password without providing confirmation value
:id: c2b569c9-8120-4125-8bfe-61324a881395
:Steps:
1. Create User
2. Update the password by entering value only in Password field
:expectedresults: User is not updated. Appropriate error shown.
:CaseImportance: Critical
"""
name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=name)
self.user.update(
name,
new_password=gen_string('alphanumeric'),
password_confirmation='',
)
self.assertIsNotNone(self.user.wait_until_element(
common_locators['haserror']))
@tier1
def test_negative_update(self):
"""[UI ONLY] Attempt to update User info and Cancel
:id: 56c8ea13-4add-4a51-8428-9d9f9ddde33e
:expectedresults: User is not updated.
:CaseImportance: Critical
"""
new_first_name = gen_string('alpha')
new_last_name = gen_string('alpha')
username = gen_string('alpha')
new_username = gen_string('alpha')
with Session(self) as session:
make_user(session, username=username)
self.user.update(
username,
new_username=new_username,
first_name=new_first_name,
last_name=new_last_name,
submit=False,
)
self.assertIsNotNone(self.user.search(username))
self.assertIsNone(self.user.search(new_username))
@tier1
def test_positive_delete_user(self):
"""Delete an existing User
:id: 49534eda-f8ea-404e-9714-a8d0d2210979
:expectedresults: User is deleted successfully
:CaseImportance: Critical
"""
user_name = gen_string('alphanumeric')
with Session(self) as session:
make_user(session, username=user_name)
self.user.delete(user_name)
@tier1
def test_positive_delete_admin(self):
"""Delete an admin user
:id: afda171a-b464-461f-93ce-96d770935200
:expectedresults: User is deleted
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=user_name, admin=True)
self.assertIsNotNone(self.user.search(user_name))
self.user.delete(user_name)
@tier1
def test_negative_delete_user(self):
"""[UI ONLY] Attempt to delete an User and cancel
:id: 43aed0c0-a3c3-4044-addc-910dc29e4f37
:expectedresults: User is not deleted
:CaseImportance: Critical
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(session, username=user_name)
self.assertIsNotNone(self.user.search(user_name))
self.user.delete(user_name, really=False)
@stubbed()
@tier3
def test_positive_end_to_end(self):
"""Create User and perform different operations
:id: 57f7054e-2865-4ab8-bc2b-e300a8dacee5
:Steps:
1. Create User
2. Login with the new user
3. Upload Subscriptions
4. Provision Systems
5. Add/Remove Users
6. Add/Remove Orgs
7. Delete the User
:expectedresults: All actions passed
:caseautomation: notautomated
:CaseLevel: System
"""
@stubbed()
@tier3
def test_positive_end_to_end_without_org(self):
"""Create User with no Org assigned and attempt different
operations
:id: 36b6d667-59cc-4442-aa40-c029bdb2b534
:Steps:
1. Create User. Do not assign any Org
2. Login with the new user
3. Attempt to Upload Subscriptions
4. Attempt to Provision Systems
5. Attempt to Add/Remove Users
6. Attempt to Add/Remove Orgs
:expectedresults: All actions failed since the User is not assigned to
any Org
:caseautomation: notautomated
:CaseLevel: System
"""
@tier1
def test_positive_set_timezone(self):
"""Set a new timezone for the user
:id: 3219c245-2914-4412-8df1-72e041a58a9f
:Steps:
1. Navigate to Administer -> Users
2. Click on the User
3. Select the Timezone Dropdown list
4. Try to apply some timezone
:expectedresults: User should be able to change timezone
:CaseImportance: Critical
"""
with Session(self) as session:
for timezone in TIMEZONES:
with self.subTest(timezone):
name = gen_string('alpha')
make_user(session, username=name, timezone=timezone)
self.user.validate_user(name, 'timezone', timezone, False)
@stubbed()
@tier1
def test_positive_dashboard_shows_new_time(self):
"""Check if the Dashboard shows the time according to the new
timezone set
:id: c2d80855-631c-46f6-8950-c296df8c0cbe
:Steps:
1. Change the timezone for a user in Administer -> Users tab
2. Navigate to Monitor -> Dashboard
3. The left corner displays time according to the new timezone set
:expectedresults: Dashboard UI displays new time based on the new
timezone
:caseautomation: notautomated
:CaseImportance: Critical
"""
@stubbed()
@tier2
def test_positive_logfiles_shows_new_time(self):
"""Check if the logfiles reflect the new timezone set by
the user
:id: b687182b-9d4f-4ff4-9f19-1b6ae3c126ad
:Steps:
1. Change the timezones for user in Administer -> Users Tab
2. Try to modify content view or environment so that the changes
are reflected in log file
3. Check if log file shows the new timezone set
:expectedresults: Logfiles display time according to changed timezone
:caseautomation: notautomated
:CaseLevel: Integration
"""
@stubbed()
@tier2
def test_positive_mails_for_new_timezone(self):
"""Check if the mails are received according to new
timezone set by the user
:id: ab34dd9d-4fc1-43f1-b40a-b0ebf0802887
:Steps:
1. Change the timezones for user in Administer -> Users tab
2. Navigate to Administer -> Users tab
3. Make sure under Email Preferences -> Mail Enabled
4. Send daily/weekly/monthly mails
:expectedresults: Emails are sent according to new timezone set
:caseautomation: notautomated
:CaseLevel: Integration
"""
@stubbed()
@tier1
def test_positive_parameters_tab_access_with_edit_params(self):
"""Check if non admin users with edit_params permission can access
parameters tab on organization details screen
:id: 086ea8bf-2219-425e-acf4-d2ba59a77ee9
:BZ: 1354572
:Steps:
1. Create a Role in Administer -> Roles
2. On Role creation set Resource type to Parameters
3. On Role creation add permission edit_params
4. On Role creation set Resource type to Organization
5. On Role creation add permissions edit_organizations and
view_organizations
6. Create a non admin user in Administer -> Users
7. Add previous role to this user
8. Login with previous user credentials
9. Go to Organization -> Manage Organizations
10. Choose Default Organization
11. Assert "Parameters" tab is present
:expectedresults: Parameters tab visible to users with edit_params
permission
:caseautomation: notautomated
:CaseImportance: Critical
"""
@stubbed()
@tier1
def test_negative_parameters_tab_access_without_edit_params(self):
"""Check if non admin users without edit_params permission can not
access parameters tab on organization details screen
:id: eac65b64-16d4-4df5-8402-e58ddb31050d
:BZ: 1354572
:Steps:
1. Create a Role in Administer -> Roles
2. On Role creation set Resource type to Organization
3. On Role creation add permissions edit_organizations and
view_organizations
4. Create a non admin user in Administer -> Users
5. Add previous role to this user
6. Login with previous user credentials
7. Go to Organization -> Manage Organizations
8. Choose Default Organization
9. Assert "Parameters" tab is not present
:expectedresults: Parameters tab not visible to users with no
edit_params permission
:caseautomation: notautomated
:CaseImportance: Critical
"""
class ActiveDirectoryUserTestCase(UITestCase):
"""Implements Active Directory feature tests for user in UI."""
@classmethod
@skip_if_not_set('ldap')
def setUpClass(cls): # noqa
super(ActiveDirectoryUserTestCase, cls).setUpClass()
cls.ldap_user_name = settings.ldap.username
cls.ldap_user_passwd = settings.ldap.password
cls.base_dn = settings.ldap.basedn
cls.group_base_dn = settings.ldap.grpbasedn
cls.ldap_hostname = settings.ldap.hostname
cls.usergroup_name = gen_string('alpha')
authsource_attrs = entities.AuthSourceLDAP(
onthefly_register=True,
account=cls.ldap_user_name,
account_password=cls.ldap_user_passwd,
base_dn=cls.base_dn,
groups_base=cls.group_base_dn,
attr_firstname=LDAP_ATTR['firstname'],
attr_lastname=LDAP_ATTR['surname'],
attr_login=LDAP_ATTR['login_ad'],
server_type=LDAP_SERVER_TYPE['API']['ad'],
attr_mail=LDAP_ATTR['mail'],
name=gen_string('alpha'),
host=cls.ldap_hostname,
tls=False,
port='389',
).create()
cls.ldap_server_name = authsource_attrs.name
@tier2
def test_positive_create_in_ldap_mode(self):
"""Create User in ldap mode
:id: 0668b2ca-831e-4568-94fb-80e45dd7d001
:expectedresults: User is created without specifying the password
:CaseLevel: Integration
"""
user_name = gen_string('alpha')
with Session(self) as session:
make_user(
session,
username=user_name,
authorized_by='LDAP-' + self.ldap_server_name,
password1='',
password2='',
)
self.assertIsNotNone(self.user.search(user_name))
class SshKeyInUserTestCase(UITestCase):
"""Implements the SSH Key in User Tests"""
@stubbed
@tier2
def test_postitive_ssh_key_tab_presence(self):
"""SSH keys tab presence in User details page
:id: a0c77cc1-0484-4290-b4b3-87ab3d0bde56
:steps:
1. Go to Administer -> Users
2. Attempt to create new user form Super admin
3. Verify SSH Keys tab in user details page
:expectedresults: New user details page should have a tab of SSH Keys
"""
@stubbed
@tier2
def test_postitive_ssh_key_tab_presence_Super_Admin(self):
"""SSH keys tab presence in Super Admin details page
:id: 72dc8c6e-3627-436a-adf3-f32d09b2f1c7
:steps:
1. Go to Administer -> Users
2. Edit Super Admin user details page
3. Verify SSH Keys tab in Super Admin user details page
:expectedresults: Super Admin user details page should have a tab of
SSH Keys
"""
@stubbed
@tier1
@skip_if_bug_open('bugzilla', 1465389)
def test_positive_create_ssh_key(self):
"""SSH Key can be added while creating a new user
:id: e608f1b2-2ca4-4c32-8a70-47bed63e8b09
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Add SSH Key in SSH Keys tab before saving the user
4. Save the new User
:expectedresults: New user should be added with SSH key
"""
@stubbed
@tier1
def test_positive_create_ssh_key_super_admin(self):
"""SSH Key can be added to Super Admin user details page
:id: 31388483-35f5-4828-82e9-9305a76e712d
:steps:
1. Go to Administer -> Users
2. Edit Super Admin user details page
3. Add SSH Key in SSH Keys tab
4. Save the changes of Super Admin user
:expectedresults: Super Admin should be saved with SSH key
"""
@stubbed
@tier1
@skip_if_bug_open('bugzilla', 1465389)
def test_positive_create_multiple_ssh_keys(self):
"""Multiple SSH Keys can be added while creating a new user
:id: 6552194f-63ff-4a6e-9784-5b3dc1772fd5
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Add multiple SSH Keys in SSH Keys tab before saving the user
4. Save the new User
:expectedresults: New user should be added with multiple SSH keys
"""
@stubbed
@tier1
def test_positive_create_multiple_ssh_keys_super_admin(self):
"""Multiple SSH Keys can be added to Super admin user details page
:id: 267cea76-0b75-4b37-a04f-dc3659cab409
:steps:
1. Go to Administer -> Users
2. Edit Super Admin user details page
3. Add multiple SSH Keys in SSH Keys tab
4. Save the changes of Super Admin user
:expectedresults: Super Admin should be saved with multiple SSH keys
"""
@stubbed
@tier1
def test_negative_create_ssh_key(self):
"""Invalid ssh key can not be added in User details page
:id: a815cd8b-142e-4743-b95a-c922def193f6
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Attempt to add invalid string as SSH Key in SSH Keys tab
before saving the user
:expectedresults: Invalid SSH key should not be added in user details
page
"""
@stubbed
@tier1
def test_negative_create_invalid_ssh_key(self):
""""Invalid SSH key can not be added to user and corresponding error
notification displays
:id: ea613925-75a0-421c-b02b-e61ce2fe0d84
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Attempt to add invalid string as SSH Key in SSH Keys tab
before saving the user. e.g blabla
:expectedresults:
1. SSH Key should not be added to user
2. Satellite should show 'Fingerprint could not be generated'
error notification
"""
@stubbed
@tier1
def test_negative_create_too_long_length_ssh_key(self):
"""SSH key with too long length can not be added to user and
corresponding error notification displays
:id: 2a3bb547-a073-4de6-85a7-20ace85992a2
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Attempt to add invalid length of SSH Key in SSH Keys tab
before saving the user
:expectedresults:
1. SSH Key should not be added to user
2. Satellite should show 'Length could not be calculated'
error notification
"""
@stubbed
@tier3
def test_positive_ssh_key_to_pxe_discovered_host(self):
"""Satellite automatically adds SSH key of user to the provisioned host
that is discovered with PXE
:id: 86598125-6ca1-4147-920f-b5e2e9ad8ccd
:steps:
1. Create User with valid ssh-key
2. Configure Satellite with DHCP, DNS and TFTP
3. Enable foreman plugin discovery
4. Install and enable discovery service plugin.
5. Update PXELinux global default template with satellite
capsule url and ONTIMEOUT to discovery
6. Build the PXE default template from Hosts -> Provisioning
templates
7. Update Satellite Kickstart Default provisioning template,
inherit 'create_users' snippet
8. Create Host Group to provision the host
9. Boot a blank bare metal host in a network
10. Wait for successful Discovery Status on bare metal host
11. In Hosts -> Discovered Hosts, find the above discovered
host
12. Choose to provision and choose name, taxonomies and
Hostgroup
13. Check IP, MAC fields and Primary, Managed, Provision
options assigned automatically in Host -> Interface tab
14. Check Host -> Operating System details populated
automatically
15. Resolve PXELinux details page- 'kickstart default PXELinux'
and provision Template - 'Satellite Kickstart Default'
16. Submit these changes for provisioning and wait for
provisioning to complete.
17. Attempt to ssh access the provisioned host from satellite
server
:expectedresults:
1. User should be able to password-less access to provisioned
host
2. Satellite should automatically add SSH key to provisioned
host
"""
@stubbed
@tier3
def test_positive_ssh_key_to_pxeless_provisioned_host(self):
"""Satellite automatically adds SSH key of user to the PXELess
provisioned host
:id: edbbafbd-5a82-4f27-ab93-2aa88d3a3353
:steps:
1. Create User with valid ssh-key
2. Configure Satellite with DHCP and DNS
3. Update Satellite Kickstart Default provisioning template,
inherit 'create_users' snippet
4. Create Host Group to provision the host
5. Attempt to create a new host from Hosts -> New Host
6. Choose name, taxonomies and Hostgroup
7. Check IP, Primary, Managed, Provision
options assigned automatically in Host -> Interface tab
8. Enter the bare metal host mac in interface tab
9. Check Host -> Operating System details populated
automatically
10. Resolve bootdisk template-'Boot disk iPXE - host',
kexec template='Discovery Red Hat kexec' and
provision Template - 'Satellite Kickstart Default'.
11. Submit these changes
12. After creating this host entry, Download the Generic boot disk
from this host -> boot disk -> Generic Image
13. Flash the Generic boot disk in some bootable device
14. Boot the Provisionable host from above media device
15. Wait for host to connect to Satellite, start installation,
finish installation, post installation configurations
16. Attempt to ssh access the provisioned host from satellite
server
:expectedresults:
1. User should be able to password-less access to provisioned
host
2. Satellite should automatically add SSH key to provisioned host
"""
@stubbed
@tier3
def test_positive_ssh_key_to_pxeless_discovered_host(self):
"""Satellite automatically adds SSH key of user to the provisioned
host that is discovered with PXELess
:id: a3a7fcd8-9efd-4863-ac81-48c1a2cdb61b
:steps:
1. Create User with valid ssh-key
2. Configure Satellite with DHCP, DNS and TFTP
3. Update Satellite Kickstart Default provisioning template,
inherit 'create_users' snippet
4. Create Host Group to provision the host
5. Enable foreman plugin discovery through satellite installer
6. Install and enable discovery service plugin
7. Flash the discovery ISO in some bootable device
8. Boot the provisionable host from bootable device
9. In host, Choose Discovery with DHCP
10. Choose primary network interface that connects to the satelite
11. Provide integrated capsule/external capsule server url
12. Set connection type to Proxy
13. Dont provide any custom facts
14. Wait for satellite to discover the host in Hosts -> Discovered
Hosts
15. Choose to provision and choose name, taxonomies and Hostgroup
16. Check IP, MAC fields and Primary, Managed, Provision
options assigned automatically in Host -> Interface tab
17. Check Host -> Operating System details populated
automatically
18. Resolve kexec Template- 'Discovery Red Hat kexec' and
provision Template - 'Satellite Kickstart Default'.
19. Submit these changes for provisioning and wait for
provisioning to complete.
20. Attempt to ssh access the provisioned host from satellite
server
:expectedresults:
1. User should be able to password-less access to provisioned
host
2. Satellite should automatically add SSH key to provisioned host
"""
@stubbed
@tier3
def test_positive_ssh_key_in_network_based_provisioned_host(self):
"""Satellite automatically adds SSH key of user onto the host
provisioned using network based method
:id: ff2efc2a-02d0-4e2e-90d4-be2562fe384e
:steps:
1. Create User with valid ssh-key
2. Configure Satellite with DHCP, DNS
3. Create libvirt/RHEVM/VMWare Compute Resource on satellite
4. Create suitable compute profile for choosed CR
5. Update Satellite Kickstart Default provisioning template,
inherit 'create_users' snippet
6. Create Host Group for provisioning the host
7. Attempt to create a new host from Hosts -> New Host
8. Choose name, taxonomies and Hostgroup
9. Select choosed(in step 2) CR in 'deploy on' option
10. Check IP value amd Primary, Managed, Provision
options assigned automatically in Host -> Interface tab
11. Leave MAC Address blank to be assigned by CR
12. Check Host -> Operating System details populated
automatically, also choose Network Based provisioning
13. Choose appropriate Virtual Machine details
14. Submit these changes for provisioning and wait for provisioning
to complete
15. Attempt to ssh access the provisioned host from satellite
server
:expectedresults:
1. User should be able to password-less access to provisioned
host
2. Satellite should automatically add SSH key to provisioned host
"""
@stubbed
@tier3
def test_positive_ssh_key_in_image_based_provisioned_host(self):
"""Satellite automatically adds SSH key of user onto the host
provisioned using image based method
:id: 470f7142-c805-43c3-b0cc-02bd380f098b
:steps:
1. Create User with valid ssh-key
2. Configure Satellite with DHCP, DNS
3. Create EC2/Openstack/VMware/libvirt/RHEV Compute Resource on
satellite
4. Create suitable compute profile for choosed CR
5. Update Satellite Kickstart Default Finish provisioning template,
inherit 'create_users' snippet
6. Create Host Group for provisioning the host
7. Attempt to create a new host from Hosts -> New Host
8. Choose name, taxonomies and Hostgroup
9. Select choosed(in step 2) CR in 'deploy on' option
10. Check IP value amd Primary, Managed, Provision
options assigned automatically in Host -> Interface tab
11. Leave MAC Address blank to be assigned by CR
12. Check Host -> Operating System details populated
automatically, also choose Image Based provisioning
13. Choose appropriate Virtual Machine details
14. Submit these changes for provisioning and wait for provisioning
to complete
15. Attempt to ssh access the provisioned host from satellite
server
:expectedresults:
1. User should be able to password-less access to provisioned
host
2. Satellite should automatically add SSH key to provisioned host
"""
@stubbed
@tier3
def test_negative_invalid_ssh_key_access_to_provisioned_host(self):
""" Satellite user cannot password-less access with invalid ssh key
:id: 13f2d109-d15e-4fee-ae49-7ce3b27efd17
:steps:
1. Create user with ssh public key which doesnt matches the private
key of user(i.e Wrong public key)
2. Update Satellite Kickstart Default template, inherit
'create_users' snippet
3. Provision a host on libvirt CR with above user
4. Attempt to ssh access the provisioned host from satellite sever
:expectedresults: User should not be able to password-less access to
provisioned host having wrong non matching publc key
"""
@stubbed
@tier3
def test_positive_multiple_key_types_access_to_provisioned_host(self):
""" Satellite automatically adds supported multiple type of SSH key of
user onto the host provisioned
:id: 1532df12-e0a5-4da6-9e28-5d2eba98f0af
:steps:
1. Create user with any type of ssh key, type includes
rsa, dsa, ed25519, ecdsa
2. Update Satellite Kickstart Default template, inherit
'create_users' snippet
3. Provision a host on libvirt CR with above user
4. Attempt to ssh access the provisioned host from satellite sever
:expectedresults:
1. User should be able to password-less access to provisioned
host using any supported type of ssh keys
2. Satellite should automatically add any supported type of SSH key
to provisioned host
"""
@stubbed
@tier1
def test_positive_delete_ssh_key(self):
"""Satellite Admin can delete ssh key from user
:id: e4df559d-3f01-4dfb-a847-ae5f7d91ef90
:steps:
1. Go to Administer -> Users
2. Attempt to create new user with all the details
3. Add SSH Key in SSH Keys tab before saving the user
4. Save the new User
5. Edit the user created above and delete the ssh-key from user
:expectedresults: SSH key should be deleted from user
"""
| ares/robottelo | tests/foreman/ui/test_user.py | Python | gpl-3.0 | 61,539 |
# System imports
import itertools
import concurrent.futures
# Third party imports
import requests
class Client:
"""
Used for sending commands to one or more IDA containers over HTTP.
"""
def __init__(self, urls):
"""
>>> client = Client(['http://host-1:4001', 'http://host-2:4001'])
:param urls: List of addresses of IDA containers including the published port
"""
if urls is None or not any(urls):
raise ValueError('Invalide "urls" value')
self._urls = itertools.cycle(urls)
def send_command(self, command, timeout=None):
"""
Send a command to an IDA container via HTTP
:param command: The command to send, should start with idal or idal64
:param timeout: A timeout given for the command (optional)
:returns True if the command ran successfully, else false
"""
data_to_send = dict(command=command)
if timeout is not None:
data_to_send['timeout'] = timeout
response = requests.post('%s/ida/command' % next(self._urls), data=data_to_send)
return response.status_code == 200
def send_multiple_commands(self, commands, timeout=None, num_of_threads=4):
"""
Send a batch of commands asynchronously to an IDA container via HTTP
:param commands: An iterable of commands to send to the container
:param timeout: A timeout given for the command (optional)
:returns A dictionary where the key is the command and the value is True if succeeded, else false
"""
results = {}
with concurrent.futures.ThreadPoolExecutor(max_workers=num_of_threads) as executor:
future_responses = {executor.submit(self.send_command, command, timeout): command for command in commands}
for response in concurrent.futures.as_completed(future_responses):
command = future_responses[response]
try:
results[command] = response.result()
except Exception as ex:
print('An exception occurred in command %s, The exception was %s' % (command, str(ex)))
return results
| intezer/docker-ida | ida_client/ida_client.py | Python | gpl-3.0 | 2,196 |
import os
import sys
import math
import errno
import subprocess
import tkMessageBox
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from PIL import Image
from matplotlib import style
from datetime import datetime
from matplotlib.widgets import RadioButtons, Button
# Update the version of the program here:
version = "2.2a"
# 'state' is used to keep track of weather the graph has been paused or not
state = 0
# Global arrays that keep the data for plotting the graphs
ltimes = []
wtimes = []
btimes = []
lpings = []
wpings = []
bpings = []
avg_lis = []
top = []
bot = []
# Global variables
sd = 0
avg = 0
num_to = 0 # number of timeout errors
num_un = 0 # number of unreachable errors
sum_ping = 0
min_ping = float('+inf')
max_ping = float('-inf')
count_na = 0
sum_ping_na = 0
sum_sq_dif_na = 0
min_ping_na = float('+inf')
max_ping_na = float('-inf')
count_lan = 0
sum_ping_lan = 0
sum_sq_dif_lan = 0
min_ping_lan = float('+inf')
max_ping_lan = float('-inf')
start = datetime.now()
sq_dif_ar = []
servers = {"NA": "104.160.131.3", "LAN": "104.160.136.3"}
# matplotlib related variable initialization
style.use('seaborn-darkgrid')
fig = plt.figure(figsize=(16, 9))
ax1 = fig.add_subplot(1, 1, 1)
pp_img = Image.open(os.path.dirname(__file__) + '/static/buttons/pp_button.png')
dec_img = Image.open(os.path.dirname(__file__) + '/static/buttons/dec.png')
inc_img = Image.open(os.path.dirname(__file__) + '/static/buttons/inc.png')
null_img = Image.open(os.path.dirname(__file__) + '/static/buttons/null.png')
stgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/stgd.png')
stwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/stwr.png')
stbd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/stbd.png')
unstgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstgd.png')
unstwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstwr.png')
unstbd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstbd.png')
unstlgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstlgd.png')
unstlwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstlwr.png')
unstlbd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstlbd.png')
vunstgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/vunstgd.png')
vunstwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/vunstwr.png')
vunstbd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/vunstbd.png')
pp_img.thumbnail((64, 64), Image.ANTIALIAS)
dec_img.thumbnail((16, 16), Image.ANTIALIAS)
inc_img.thumbnail((16, 16), Image.ANTIALIAS)
stgd_img.thumbnail((16, 16), Image.ANTIALIAS)
stwr_img.thumbnail((16, 16), Image.ANTIALIAS)
stbd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstgd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstwr_img.thumbnail((16, 16), Image.ANTIALIAS)
unstbd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstlgd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstlwr_img.thumbnail((16, 16), Image.ANTIALIAS)
unstlbd_img.thumbnail((16, 16), Image.ANTIALIAS)
vunstgd_img.thumbnail((16, 16), Image.ANTIALIAS)
vunstwr_img.thumbnail((16, 16), Image.ANTIALIAS)
vunstbd_img.thumbnail((16, 16), Image.ANTIALIAS)
icon_manager = mpl.pyplot.get_current_fig_manager()
icon_manager.window.wm_iconbitmap(os.path.dirname(__file__) + '/static/icons/icon.ico')
rax = plt.axes([0.881, 0.535, 0.089, 0.089], aspect='equal', frameon=True, axisbg='white')
radio = RadioButtons(rax, servers.keys())
radio_value = radio.value_selected
class ButtonHandler(object):
"""
Class created to handle button functionality via .on_clicked()
"""
ind = 0
def quit(self, event):
self.ind += 1
close_handler(event)
plt.draw()
def pause(self, event):
global state
self.ind -= 1
state += 1
plt.draw()
def make_databox(vpos, hpos, alpha, fc, ec):
"""
Creates a box of all equal dimensions to hold the text data at the side of the graph - uniformity!
vpos: vertical position float
hpos: horizontal position float
alpha: strength of the colour float
colour: colour of the box string
"""
return ax1.text(vpos, hpos, '______________.', transform=ax1.transAxes, alpha=0,
bbox={'alpha': alpha,
'pad': 5,
"fc": fc,
"ec": ec,
"lw": 2})
def close_handler(event):
"""
Safely shutdown all processes of this program whenever the window is closed by user.
"""
sys.exit()
def spperr_handler(err):
"""
Sub-process ping error handler
Handles common 'errors' we can expect from Window's ping.exe, which is accessed through a subprocess.
'errors' refer to unsuccessful pings.
"""
err_dict = {'Destination host unreachable': 'The destination was unreachable!\nPlease check your internet '
'connection and press Retry.',
'Request timed out': 'The destination took too long to respond!\nPlease check your internet connection '
'and press Retry.'
}
try:
if tkMessageBox.askretrycancel(err, err_dict[err]):
upd_data()
else:
sys.exit()
# This should never occur - this handles errors not in the err_dict (the expected errors).
# Could be useful if a very powerful err_handler was coded, where every line is passed through here.
except KeyError:
if tkMessageBox.showerror('Unknown Error', 'The condition under which this error occurred was unexpected!'):
sys.exit()
def set_savdir(sav_dir='Screenshots'):
"""
Configures the default mpl save directory for screenshots.
Checks if there is a folder named 'Screenshots' in root folder.
If there is no folder there named 'Screenshots', it creates the directory.
"""
if not os.path.isdir(os.path.join(os.path.dirname(__file__), sav_dir).replace('\\', '/')):
try:
os.makedirs(os.path.join(os.path.dirname(__file__), sav_dir).replace('\\', '/'))
except OSError as exc:
if not (exc.errno == errno.EEXIST and os.path.isdir(os.path.join(os.path.dirname(__file__),
sav_dir).replace('\\', '/'))):
raise
# Now that the directory for 'Screenshots' surely exists, set it as default directory.
mpl.rcParams["savefig.directory"] = os.path.join(os.path.dirname(__file__), sav_dir).replace('\\', '/')
def draw_ping(vpos, hpos, ping, up_bound, lo_bound, stdv, vpos_tb, hpos_tb, a_yellow, a_green, a_red):
"""
A powerful function that performs:
1- The specification of the databox which holds the ping data:
a. Inner (face) colour represents the ping range
b. Outer (edge) colour represents the ping state (spiked, below lo_bound etc.)
2- Drawing the circle that summarizes the state of the ping
vpos: the vertical position of the button it draws the ping circle in
hpos: the horizontal position of the button it draws the ping circle in
ping: the value of the current ping
used in data analysis and is a key factor to decide the state of the ping
up_bound: represents the ping + standard deviation
lo_bound: represents the ping - standard deviation
stdv: the standard deviation calculated in upd_data(), passed from animate(i)
vpos_tb: the vertical position of the databox which holds the ping data
hpos_tb: the horizontal position of the databox which holds the ping data
a_yellow: the strength of the databox colour for yellow
a_green: the strength of the databox colour for green
a_red: the strength of the databox colour for red
"""
global avg
# Ping is 'good'
if 0 <= ping <= 199:
# Ping is very unstable - has very large and frequent spikes
if stdv * 2 >= 0.3 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_green, fc="green", ec="red")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=vunstgd_img, color='None')
# Ping is unstable - has a few frequent medium spikes causing the range to go over 15% current average ping
elif stdv * 2 >= 0.15 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_green, fc="green", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstgd_img, color='None')
# Ping is stable
elif lo_bound <= ping <= up_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_green, fc="green", ec="green")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=stgd_img, color='None')
# Ping is out of bounds (unstable)
else:
# If ping is lower than lower bound, then all conditions tend toward a better ping - colour this as blue
if ping <= lo_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_green, fc="green", ec="blue")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstlgd_img, color='None')
# Else it is simply just unstable
else:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_green, fc="green", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstgd_img, color='None')
# Ping is 'not good'
elif 200 <= ping <= 499:
if stdv * 2 >= 0.3 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_yellow, fc="yellow", ec="red")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=vunstwr_img, color='None')
elif stdv * 2 >= 0.15 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_yellow, fc="yellow", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstgd_img, color='None')
elif lo_bound <= ping <= up_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_yellow, fc="yellow", ec="green")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=stwr_img, color='None')
else:
if ping <= lo_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_yellow, fc="yellow", ec="blue")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstlwr_img, color='None')
else:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_yellow, fc="yellow", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstwr_img, color='None')
# Ping is 'bad'
elif ping > 500:
if stdv * 2 >= 0.3 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_red, fc="red", ec="black")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=vunstbd_img, color='None')
elif stdv * 2 >= 0.15 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_red, fc="red", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstgd_img, color='None')
elif lo_bound <= ping <= up_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_red, fc="red", ec="green")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=stbd_img, color='None')
else:
if ping <= lo_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_red, fc="red", ec="blue")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstlbd_img, color='None')
else:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_red, fc="red", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstbd_img, color='None')
def upd_data():
"""
This function performs a Windows ping function and updates:
1- lping : which is stored in global data array lpings each instance
: if lping >= 200 or <= 499, it is stored in global data array wpings each instance
: if lping >= 500, it is stored in global data array bpings each instance
2- ltime : which is stored in global data array ltimes each instance
: is stored in global data array wtimes each instance wpings has a new value
: is stored in global data array btimes each instance wpings has a new value
3- avg : hence also count and sum_ping (based on radio_value's "NA" or "LAN")
: which is stored in global array avg_lis each instance
4- max_ping: (based on radio_value's "NA" or "LAN")
5- min_ping: (based on radio_value's "NA" or "LAN")
6- sd : the standard deviation (lping-avg)^2/count (based on radio_value's "NA" or "LAN")
: used to calculate top (upper bound = avg + sd) and bot (lower bound = avg - sd)
: top and bot are global data arrays
Notes:
1- creationflags=0x08000000 (for subprocess) forces Windows cmd to not generate a window.
"""
global lpings, ltimes, sum_ping, servers, avg, avg_lis, radio_value, num_un, num_to, top, bot, sd, wtimes, wpings, \
bpings, btimes
global sum_ping_na, count_na, max_ping_na, min_ping_na, sum_sq_dif_na
global sum_ping_lan, count_lan, max_ping_lan, min_ping_lan, sum_sq_dif_lan
# Recheck the radio button value so as to ping to the selected server
radio_value = radio.value_selected
sp = subprocess.Popen(["ping.exe", servers[radio_value], "-n", "1", "-l", "500"],
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False,
creationflags=0x08000000)
# For instantaneous interpretation of output from subprocess
while sp.poll() is None:
line = sp.stdout.readline()
# Data is updated in here from the newest subprocess ping
if "time=" in line:
lping = float(line[line.find("time=")+5:line.find("ms")])
if radio_value == "NA":
sum_ping_na += lping
count_na += 1
avg = sum_ping_na / count_na
sq_dif = (lping - avg)*(lping - avg)
sum_sq_dif_na += sq_dif
sd = math.sqrt(sum_sq_dif_na / count_na)
if lping > max_ping_na:
max_ping_na = lping
if min_ping_na > lping:
min_ping_na = lping
if radio_value == "LAN":
sum_ping_lan += lping
count_lan += 1
avg = sum_ping_lan / count_lan
sq_dif = (lping - avg) * (lping - avg)
sum_sq_dif_lan += sq_dif
sd = math.sqrt(sum_sq_dif_lan / count_lan)
if lping > max_ping_lan:
max_ping_lan = lping
if min_ping_lan > lping:
min_ping_lan = lping
top += [avg + sd]
bot += [avg - sd]
avg_lis += [avg]
interval = datetime.now() - start
ltime = interval.total_seconds()
ltimes += [ltime]
lpings += [lping]
if 200 <= lping <= 499:
wpings += [lping]
wtimes += [ltime]
elif lping >= 500:
bpings += [lping]
btimes += [ltime]
elif "Destination host unreachable" in line:
num_un += 1
spperr_handler("Destination host unreachable")
elif "Request timed out" in line:
num_to += 1
spperr_handler("Request timed out")
def animate(i):
"""
Performs the 'graphical updating' based on the newly updated data from upd_date()
"""
global max_ping, min_ping, ltimes, lpings, radio_value, servers, avg, avg_lis, num_to, num_un, top, bot, wtimes,\
wpings, btimes
global sum_ping_na, count_na, max_ping_na, min_ping_na
global sum_ping_lan, count_lan, max_ping_lan, min_ping_lan
if radio_value == "NA":
max_ping = max_ping_na
min_ping = min_ping_na
if radio_value == "LAN":
max_ping = max_ping_lan
min_ping = min_ping_lan
pingar = np.array(lpings)
timear = np.array(ltimes)
w_pingar = np.array(wpings)
w_timear = np.array(wtimes)
b_pingar = np.array(bpings)
b_timear = np.array(btimes)
avgar = np.array(avg_lis)
topar = np.array(top)
botar = np.array(bot)
ax1.clear()
ax1.text(0.999, 1.02, 'by Ryan Chin Sang', ha='right', va='top', color='0.85', size='small',
transform=ax1.transAxes)
# Positions of the first textbox to display data
vpos_tb = 1.01
hpos_tb = 0.973
hpos_img = 0.88
vpos_img = 0.8325
a_red = 0.3
a_grey = 0.2
a_blue = 0.14
a_green = 0.23
a_yellow = 0.17
# Ping data
ax1.text(vpos_tb, hpos_tb, "Ping: " + str(lpings[-1]) + " ms", transform=ax1.transAxes)
draw_ping(vpos=vpos_img + 0.0385, hpos=hpos_img, ping=lpings[-1], up_bound=top[-1], lo_bound=bot[-1], stdv=sd,
vpos_tb=vpos_tb, hpos_tb=hpos_tb, a_green=a_green, a_red=a_red, a_yellow=a_yellow)
# Average ping
if lpings[-1] < avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.05, alpha=a_green, fc="green", ec="green")
ax1.text(vpos_tb, hpos_tb-0.05, "Avg: " + format(avg, '.3f') + " ms", transform=ax1.transAxes)
Button(plt.axes([hpos_img, vpos_img, 0.02, 0.02]), '', image=dec_img, color='None')
elif lpings[-1] > avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.05, alpha=a_red, fc="red", ec="black")
ax1.text(vpos_tb, hpos_tb - 0.05, "Avg: " + format(avg, '.3f') + " ms", transform=ax1.transAxes)
Button(plt.axes([hpos_img, vpos_img, 0.02, 0.02]), '', image=inc_img, color='None')
else:
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.05, alpha=a_blue, fc="blue", ec="blue")
ax1.text(vpos_tb, hpos_tb - 0.05, "Avg: " + format(avg, '.3f') + " ms", transform=ax1.transAxes)
Button(plt.axes([hpos_img, vpos_img, 0.02, 0.02]), '', image=null_img, color='None')
# Time data
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.1, alpha=a_blue, fc="blue", ec="blue")
ax1.text(vpos_tb, hpos_tb-0.1, "Time: " + str(ltimes[-1]) + " s", transform=ax1.transAxes)
# Maximum Ping data
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.15, alpha=a_blue, fc="blue", ec="blue")
ax1.text(vpos_tb, hpos_tb-0.15, "Max: " + str(max_ping) + " ms", transform=ax1.transAxes)
# Minimum Ping data
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.2, alpha=a_blue, fc="blue", ec="blue")
ax1.text(vpos_tb, hpos_tb-0.2, "Min: " + str(min_ping) + " ms", transform=ax1.transAxes)
# No. of timeouts
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.25, alpha=a_grey, fc="grey", ec="black")
ax1.text(vpos_tb, hpos_tb-0.25, "# Timeout: " + str(num_to), transform=ax1.transAxes)
# No. of unreachables
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.3, alpha=a_grey, fc="grey", ec="black")
ax1.text(vpos_tb, hpos_tb-0.3, "# Unreachable: " + str(num_un), transform=ax1.transAxes)
# Shows state of the animated graph
ax1.text(0.92, -0.0925, 'box', transform=ax1.transAxes, fontsize=22, zorder=0, alpha=0,
bbox={'alpha': a_grey, 'pad': 5, "fc": "white", "ec": "black", "lw": 2})
ax1.text(0.92, -0.087, ' Play' if state % 2 else 'Pause', transform=ax1.transAxes, zorder=1)
ax1.set_ylabel('Ping /ms', size='large')
ax1.set_xlabel('Time /s', size='large')
ax1.set_title('Ping to League of Legends [' + radio_value + '] Server (' + servers[radio_value] + ')', fontsize=16,
fontweight='bold')
ax1.plot(timear, pingar, linewidth=1.0, label="Ping")
ax1.plot(timear, avgar, linewidth=0.6, label="Average Ping")
# Draws a yellow graph when ping goes over 200 ms and is less than 499 ms
ax1.plot(w_timear, w_pingar, linewidth=1.5, color='yellow', zorder=1)
# Draws a red graph when ping goes over 500 ms
ax1.plot(b_timear, b_pingar, linewidth=1.5, color='red', zorder=1)
ax1.plot(timear, topar, linewidth=0.3)
ax1.plot(timear, botar, linewidth=0.3)
ax1.fill_between(timear, botar, topar, facecolor='green', interpolate=True, alpha=0.0375)
ax1.legend(loc='upper left')
# Only update the data if state indicates 'play' (opposite of button logic)
if state % 2 == 0:
upd_data()
def set_frame():
"""
Sets the initial frame of the Window in which will be animated through animate(i)
"""
global state
fig.canvas.mpl_connect('close_event', close_handler)
fig.canvas.set_window_title('League Latency v' + version)
ani = animation.FuncAnimation(fig, animate, frames=120)
# [(-=left, +=right), (-=up, +=down), (-=thin, +=wide), (-=thin, +=thick)]
quit_b = Button(plt.axes([0.905, 0.01, 0.089, 0.05]), 'Quit')
quit_b.on_clicked(ButtonHandler().quit)
pp_b = Button(plt.axes([0.835, 0.01, 0.1, 0.05]), '', image=pp_img)
pp_b.on_clicked(ButtonHandler().pause)
plt.show()
set_savdir()
upd_data()
set_frame()
| RyanChinSang/LeagueLatency | History/Raw/v2.2a Stable/LL.py | Python | gpl-3.0 | 21,253 |
#!/usr/bin/env python3
# -*- mode:python; tab-width:4; c-basic-offset:4; intent-tabs-mode:nil; -*-
# ex: filetype=python tabstop=4 softtabstop=4 shiftwidth=4 expandtab autoindent smartindent
#
# Universal Password Changer (UPwdChg)
# Copyright (C) 2014-2018 Cedric Dufour <http://cedric.dufour.name>
# Author: Cedric Dufour <http://cedric.dufour.name>
#
# The Universal Password Changer (UPwdChg) is free software:
# you can redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, Version 3.
#
# The Universal Password Changer (UPwdChg) is distributed in the hope
# that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License for more details.
#
# SPDX-License-Identifier: GPL-3.0
# License-Filename: LICENSE/GPL-3.0.txt
#
#------------------------------------------------------------------------------
# DEPENDENCIES
#------------------------------------------------------------------------------
# UPwdChg
from UPwdChg import \
TokenReader
# Standard
import unittest as UT
import sys
#------------------------------------------------------------------------------
# CLASSES
#------------------------------------------------------------------------------
class testTokenReader_ReadToken(UT.TestCase):
def setUp(self):
self.oToken = TokenReader()
def testPasswordNonceRequest(self):
self.oToken.config('./resources/backend-private.pem', './resources/frontend-public.pem')
self.assertEqual(self.oToken.readToken('./tmp/password-nonce-request.token'), 0)
def testPasswordChange(self):
self.oToken.config('./resources/backend-private.pem', './resources/frontend-public.pem')
self.assertEqual(self.oToken.readToken('./tmp/password-change.token'), 0)
def testPasswordReset(self):
self.oToken.config('./resources/backend-private.pem', './resources/frontend-public.pem')
self.assertEqual(self.oToken.readToken('./tmp/password-reset.token'), 0)
def testPasswordNonce(self):
self.oToken.config('./resources/frontend-private.pem', './resources/backend-public.pem')
self.assertEqual(self.oToken.readToken('./tmp/password-nonce.token'), 0)
class testTokenReader_PasswordNonceRequest(UT.TestCase):
def setUp(self):
self.oToken = TokenReader()
self.oToken.config('./resources/backend-private.pem', './resources/frontend-public.pem')
if(self.oToken.readToken('./tmp/password-nonce-request.token')):
self.skipTest('Failed to read token')
def testType(self):
self.assertIn('type', self.oToken.keys())
self.assertEqual(self.oToken['type'], 'password-nonce-request')
def testTimestamp(self):
self.assertIn('timestamp', self.oToken.keys())
self.assertRegex(self.oToken['timestamp'], '^20[0-9]{2}-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])T([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9]Z$')
def testUsername(self):
self.assertIn('username', self.oToken.keys())
self.assertEqual(self.oToken['username'], 'test-Benützername')
class testTokenReader_PasswordChange(UT.TestCase):
def setUp(self):
self.oToken = TokenReader()
self.oToken.config('./resources/backend-private.pem', './resources/frontend-public.pem')
if(self.oToken.readToken('./tmp/password-change.token')):
self.skipTest('Failed to read token')
def testType(self):
self.assertIn('type', self.oToken.keys())
self.assertEqual(self.oToken['type'], 'password-change')
def testTimestamp(self):
self.assertIn('timestamp', self.oToken.keys())
self.assertRegex(self.oToken['timestamp'], '^20[0-9]{2}-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])T([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9]Z$')
def testUsername(self):
self.assertIn('username', self.oToken.keys())
self.assertEqual(self.oToken['username'], 'test-Benützername')
def testPasswordNew(self):
self.assertIn('password-new', self.oToken.keys())
self.assertEqual(self.oToken['password-new'], 'test-Paßw0rt_new')
def testPasswordOld(self):
self.assertIn('password-old', self.oToken.keys())
self.assertEqual(self.oToken['password-old'], 'test-Paßw0rt_old')
def testPasswordNonce(self):
self.assertIn('password-nonce', self.oToken.keys())
self.assertEqual(self.oToken['password-nonce'], 'test-Paßw0rt_nonce')
class testTokenReader_PasswordReset(UT.TestCase):
def setUp(self):
self.oToken = TokenReader()
self.oToken.config('./resources/backend-private.pem', './resources/frontend-public.pem')
if(self.oToken.readToken('./tmp/password-reset.token')):
self.skipTest('Failed to read token')
def testType(self):
self.assertIn('type', self.oToken.keys())
self.assertEqual(self.oToken['type'], 'password-reset')
def testTimestamp(self):
self.assertIn('timestamp', self.oToken.keys())
self.assertRegex(self.oToken['timestamp'], '^20[0-9]{2}-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])T([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9]Z$')
def testUsername(self):
self.assertIn('username', self.oToken.keys())
self.assertEqual(self.oToken['username'], 'test-Benützername')
def testPasswordNew(self):
self.assertIn('password-new', self.oToken.keys())
self.assertEqual(self.oToken['password-new'], 'test-Paßw0rt_new')
def testPasswordNonce(self):
self.assertIn('password-nonce', self.oToken.keys())
self.assertEqual(self.oToken['password-nonce'], 'test-Paßw0rt_nonce')
class testTokenReader_PasswordNonce(UT.TestCase):
def setUp(self):
self.oToken = TokenReader()
self.oToken.config('./resources/frontend-private.pem', './resources/backend-public.pem')
if(self.oToken.readToken('./tmp/password-nonce.token')):
self.skipTest('Failed to read token')
def testType(self):
self.assertIn('type', self.oToken.keys())
self.assertEqual(self.oToken['type'], 'password-nonce')
def testTimestamp(self):
self.assertIn('timestamp', self.oToken.keys())
self.assertRegex(self.oToken['timestamp'], '^20[0-9]{2}-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])T([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9]Z$')
def testExpiration(self):
self.assertIn('expiration', self.oToken.keys())
self.assertRegex(self.oToken['expiration'], '^20[0-9]{2}-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])T([01][0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9]Z$')
def testUsername(self):
self.assertIn('username', self.oToken.keys())
self.assertEqual(self.oToken['username'], 'test-Benützername')
def testPasswordNonceId(self):
self.assertIn('password-nonce-id', self.oToken.keys())
self.assertEqual(self.oToken['password-nonce-id'], 'test')
def testPasswordNonceSecret(self):
self.assertIn('password-nonce-secret', self.oToken.keys())
#------------------------------------------------------------------------------
# MAIN
#------------------------------------------------------------------------------
if __name__ == '__main__':
#UT.main()
oTestSuite = UT.TestSuite()
oTestSuite.addTest(UT.makeSuite(testTokenReader_ReadToken))
oTestSuite.addTest(UT.makeSuite(testTokenReader_PasswordNonceRequest))
oTestSuite.addTest(UT.makeSuite(testTokenReader_PasswordChange))
oTestSuite.addTest(UT.makeSuite(testTokenReader_PasswordReset))
oTestSuite.addTest(UT.makeSuite(testTokenReader_PasswordNonce))
oTestResult = UT.TextTestRunner(verbosity=2).run(oTestSuite)
sys.exit(0 if oTestResult.wasSuccessful() else 1)
| alex-dot/upwdchg | tests/python-tokenreader-test.py | Python | gpl-3.0 | 7,822 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsEncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
# w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'days': 0,
'fields': [],
'hours': 0,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'NontemporalClassification',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': {
u'field1': {
'fieldname': u'field1',
'n': 121,
'name': u'field1',
'type': 'SDRCategoryEncoder',
'w': 21},
u'classification': {
'classifierOnly': True,
'fieldname': u'classification',
'n': 121,
'name': u'classification',
'type': 'SDRCategoryEncoder',
'w': 21},
},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': False,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActivePerInhArea': 40,
'seed': 1956,
# coincInputPoolPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose coincInputPoolPct * (2*coincInputRadius+1)^2
'coincInputPoolPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
'randomSP': 0,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : False,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nta/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 20,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
'regionName' : 'CLAClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '0',
},
'anomalyParams': {
u'anomalyCacheRecords': None,
u'autoDetectThreshold': None,
u'autoDetectWaitRecords': None
},
'trainSPNetOnlyIfRequested': False,
},
'dataSource': 'fillInBySubExperiment',
'errorMetric': 'fillInBySubExperiment'
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
control = {
# The environment that the current model is being run in
"environment": 'grok',
# Input stream specification per py/nupic/frameworks/opf/jsonschema/stream_def.json.
#
'dataset' : { u'info': u'testSpatialClassification',
u'streams': [ { u'columns': [u'*'],
u'info': u'spatialClassification',
u'source': config['dataSource']}],
u'version': 1},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
'iterationCount' : -1,
# A dictionary containing all the supplementary parameters for inference
"inferenceArgs":{u'predictedField': u'classification', u'predictionSteps': [0]},
# Metrics: A list of MetricSpecs that instantiate the metrics that are
# computed for this experiment
'metrics':[
MetricSpec(field='classification', metric='multiStep',
inferenceElement='multiStepBestPredictions',
params={'errorMetric': config['errorMetric'],
'window': 100,
'steps': 0}),
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
'loggedMetrics': ['.*'],
}
################################################################################
################################################################################
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)
| tkaitchuck/nupic | examples/opf/experiments/spatial_classification/base/description.py | Python | gpl-3.0 | 14,847 |
../../../../../share/pyshared/twisted/test/test_usage.py | Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/twisted/test/test_usage.py | Python | gpl-3.0 | 56 |
from .solvers import pack_vectors
| marklee77/vpack | vectorpack/openopt/__init__.py | Python | gpl-3.0 | 34 |
# -*- coding: utf-8 -*-
# diceware_list -- generate wordlists for diceware
# Copyright (C) 2016-2019. Uli Fouquet
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Tests for libwordlist module
from __future__ import unicode_literals
try:
from urllib.request import urlopen, URLError # python 3.x
except ImportError: # pragma: no cover
from urllib2 import urlopen, URLError # python 2.x
from io import StringIO
import codecs
import decimal
import gzip
import random
import pytest
import sys
from diceware_list import DEFAULT_CHARS
from diceware_list.libwordlist import (
alpha_dist, base10_to_n, filter_chars, base_terms_iterator,
idx_to_dicenums, min_width_iter, normalize, shuffle_max_width_items,
term_iterator, paths_iterator, is_prefix_code, get_matching_prefixes,
get_prefixes, strip_matching_prefixes, flatten_prefix_tree,
AndroidWordList, entropy_per_char_bruteforce, min_word_length,
min_length_iter
)
EMPTY_GZ_FILE = (
b'\x1f\x8b\x08\x08\xea\xc1\xecY\x02\xffsample_emtpy'
b'\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00')
def ggsource_unreachable():
"""Check, whether we can ping android.googlesource.com'.
Respective tests may be skipped if no network is available.
"""
try:
urlopen('https://android.googlesource.com/').read()
except URLError:
return True
return False
def test_base10_to_n():
# we can turn integers into n-based numbers
assert base10_to_n(0, 2) == [0]
assert base10_to_n(1, 2) == [1]
assert base10_to_n(2, 2) == [1, 0]
assert base10_to_n(3, 2) == [1, 1]
assert base10_to_n(7775, 6) == [5, 5, 5, 5, 5]
assert base10_to_n(0, 6) == [0, ]
assert base10_to_n(1, 6) == [1, ]
assert base10_to_n(6, 6) == [1, 0]
assert base10_to_n(34, 6) == [5, 4]
assert base10_to_n(35, 6) == [5, 5]
assert base10_to_n(37, 6) == [1, 0, 1]
assert base10_to_n(38, 6) == [1, 0, 2]
assert base10_to_n(255, 16) == [15, 15]
assert base10_to_n(256, 16) == [1, 0, 0]
def test_filter_chars():
# we can detect words with unwanted chars
assert list(filter_chars([], DEFAULT_CHARS)) == []
assert list(filter_chars(["a", "b"], DEFAULT_CHARS)) == ["a", "b"]
assert list(filter_chars(["ä"], DEFAULT_CHARS)) == []
assert list(filter_chars(["a", "ä"], DEFAULT_CHARS)) == ["a"]
assert list(filter_chars(["ä", "a"], DEFAULT_CHARS)) == ["a"]
assert list(filter_chars(["a", "ä", "b"], DEFAULT_CHARS)) == ["a", "b"]
assert list(filter_chars(["a", "aä", "bö"], DEFAULT_CHARS)) == ["a"]
assert list(filter_chars([u"a", u"ä"], DEFAULT_CHARS)) == [u"a"]
def test_filter_chars_all_allowed():
# if `allowed` is None, no filtering will be done
assert list(filter_chars(['ä'], None)) == ['ä']
def test_idx_to_dicenums():
# we can get dice numbers from list indexes
assert idx_to_dicenums(0, 5) == "1-1-1-1-1"
assert idx_to_dicenums(1, 5) == "1-1-1-1-2"
assert idx_to_dicenums(7774, 5) == "6-6-6-6-5"
assert idx_to_dicenums(7775, 5) == "6-6-6-6-6"
# different dice sides, different results
assert idx_to_dicenums(0, 4, 4) == "1-1-1-1"
assert idx_to_dicenums(255, 4, 4) == "4-4-4-4"
assert idx_to_dicenums(255, 4) == "2-2-1-4"
# we can change the separator string (or leave it out)
assert idx_to_dicenums(0, 3) == "1-1-1" # default
assert idx_to_dicenums(0, 3, separator="sep") == "1sep1sep1"
assert idx_to_dicenums(0, 3, separator="") == "111"
def test_idx_to_dicenums_gives_text():
# we get text from this function, i.e. unicode under py2.
result = idx_to_dicenums(0, 5)
assert isinstance(result, type('text'))
def test_min_width_iter(monkeypatch):
# we can get iterators with minimal list width.
monkeypatch.setattr(random, "shuffle", lambda x: x)
assert list(min_width_iter(["bb", "a", "ccc", "dd"], 3)) == [
"a", "bb", "dd"]
assert list(min_width_iter(["c", "a", "b"], 2)) == ["a", "b"]
assert list(min_width_iter(["c", "a", "b"], 3)) == ["a", "b", "c"]
assert list(min_width_iter(["a", "c", "bb"], 2)) == ["a", "c"]
assert list(min_width_iter(["a", "cc", "b"], 2)) == ["a", "b"]
assert list(min_width_iter(["aa", "c", "bb"], 2)) == ["c", "aa"]
def test_min_length_iter():
assert list(min_length_iter(iter([]))) == []
assert list(min_length_iter(iter([]), 1)) == []
assert list(
min_length_iter(iter(["a", "bb", "ccc"]), 2)) == ["bb", "ccc"]
def test_min_width_iter_shuffle_max_widths_values(monkeypatch):
# words with maximum width are shuffled
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
assert list(min_width_iter(
["a", "aa", "bb"], 2, shuffle_max_width=True)) == ["a", "bb"]
assert list(min_width_iter(
["bbb", "aa", "a"], 2, shuffle_max_width=True)) == ["a", "aa"]
assert list(min_width_iter(
["aa", "a"], 2, shuffle_max_width=True)) == ["a", "aa"]
def test_min_width_iter_discards_min_len_values(monkeypatch):
# too short terms are discarded
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=False, min_len=1))) == ['a', 'b']
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=False, min_len=2))) == ['aa', 'ccc']
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=True, min_len=1))) == ['a', 'b']
assert sorted(list(min_width_iter(
['a', 'aa', 'b', 'ddd', 'ccc'], 2,
shuffle_max_width=True, min_len=2))) in (['aa', 'ccc'], ['aa', 'ddd'])
def test_normalize():
# we can normalize texts.
assert normalize("ªºÀÁÂÃÄÅÆ") == "aoAAAAAEAAE"
assert normalize("ÇÈÉÊËÌÍÎÏ") == "CEEEEIIII"
assert normalize("ÒÓÔÕÖØÙÚÛÜ") == "OOOOOEOEUUUUE"
assert normalize("ĐđÐÑÝßàáâãäåæçèéêë") == "DdDNYssaaaaaeaaeceeee"
assert normalize("ìíîïñòóôõöøùúûüý") == "iiiinoooooeoeuuuuey"
assert normalize("ÿĀāĂ㥹ĆćĈĉĊċČčĎď") == "yAaAaAaCcCcCcCcDd"
assert normalize("ĒēĔĕĖėĘęĚěĜĝĞğĠġĢģ") == "EeEeEeEeEeGgGgGgGg"
assert normalize("ĤĥĨĩĪīĬĭĮįİĒēĔĕĖė") == "HhIiIiIiIiIEeEeEe"
assert normalize("ĘęĚěĜĝĞğĠġĢģĤĥ") == "EeEeGgGgGgGgHh"
assert normalize("ĨĩĪīĬĭĮįİIJijĴĵĶķ") == "IiIiIiIiIIJijJjKk"
assert normalize("ĹĺĻļĽľĿŀŃńŅņŇňŌō") == "LlLlLlL·l·NnNnNnOo"
assert normalize("ŎŏŐőŔŕŖŗŘřŚśŜŝŞşŠš") == "OoOoRrRrRrSsSsSsSs"
assert normalize("ŢţŤťŨũŪūŬŭŮůŰűŲų") == "TtTtUuUuUuUuUuUu"
assert normalize("ŴŵŶŷŸŹźŻżŽžſ") == "WwYyYZzZzZzs"
# "þĦħĦħıĸŁłŊŋʼnŒœŦŧƀƁƂƃƄƅƆƇƈƉƊƋƌƍ""
assert normalize("mäßig") == "maessig"
def test_normalize_gives_text():
# we get unicode/text strings back
assert isinstance(normalize("far"), type("text"))
assert isinstance(normalize("fär"), type("text"))
assert isinstance(normalize(str("far")), type("text"))
def test_shuffle_max_width_items(monkeypatch):
# we can shuffle the max width items of a list
# install a pseudo-shuffler that generates predictable orders
# so that last elements are returned in reverse order.
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
# an ordered list
result = list(shuffle_max_width_items(["a", "aa", "bb", "cc"]))
assert result == ["a", "cc", "bb", "aa"]
# an unordered list
result = list(shuffle_max_width_items(["aa", "d", "bb", "a", "cc"]))
assert result == ["d", "a", "cc", "bb", "aa"]
# a list of which the longes item should not be part of
result = list(shuffle_max_width_items(
["eeee", "bb", "ccc", "aa", "ddd"], max_width=3))
assert "eeee" not in result
# a list with one length only
result = list(shuffle_max_width_items(["aa", "bb", "cc"]))
assert result == ["cc", "bb", "aa"]
def test_shuffle_max_width_items_copes_with_files(monkeypatch, tmpdir):
# when shuffling max width entries we accept file input
monkeypatch.setattr(random, "shuffle", lambda x: x.reverse())
wlist = tmpdir.join("wlist.txt")
wlist.write(b"\n".join([b"a", b"bb", b"cc"]))
with open(str(wlist), "rb") as fd:
result = list(shuffle_max_width_items(fd))
assert result == [b"a", b"cc", b"bb"]
def test_base_terms_iterator():
# we can get an iterator over base terms
base_iter = base_terms_iterator()
base_list = list(base_iter)
assert "a2" in base_list
assert "9z" in base_list
assert "0" in base_list
assert "zzzz" in base_list
def test_base_terms_iterator_option_use_kit():
# we can tell whether to use dicewarekit, diceware416 lists.
assert "yyyy" not in list(base_terms_iterator(use_kit=False))
assert "a2" in list(base_terms_iterator(use_kit=False))
assert "yyyy" in list(base_terms_iterator(use_kit=True))
assert "a2" in list(base_terms_iterator(use_kit=True))
class TestTermIterator(object):
def test_term_iterator(self, tmpdir):
# the term_iterator really returns iterators
wlist = tmpdir.join("wlist.txt")
wlist.write(b"\n".join([b"a", b"b", b"c"]))
with open(str(wlist), "rb") as fd:
result = list(term_iterator([fd, ]))
assert result == [b"a", b"b", b"c"]
def test_term_iterator_multiple_files(self, tmpdir):
# we can feed multiple input files to term_iterator
wlist1 = tmpdir.join("wlist1.txt")
wlist2 = tmpdir.join("wlist2.txt")
wlist1.write(b"\n".join([b"a1", b"b1", b"c1"]))
wlist2.write(b"\n".join([b"a2", b"b2", b"c2"]))
with open(str(wlist1), "rb") as fd1:
with open(str(wlist2), "rb") as fd2:
result = list(term_iterator([fd1, fd2]))
assert result == [b"a1", b"b1", b"c1", b"a2", b"b2", b"c2"]
def test_term_iterator_handles_umlauts(self, tmpdir):
# we can feed term iterators with umlauts
wlist = tmpdir.join("wlist.txt")
wlist.write_text(u"ä\nö\n", "utf-8")
with codecs.open(str(wlist), "r", "utf-8") as fd:
result = list(term_iterator([fd, ]))
assert result == ["ä", "ö"]
def test_term_iterator_ignores_empty_lines(self, tmpdir):
# empty lines will be ignored
wlist = tmpdir.join("wlist.txt")
wlist.write("foo\n\nbar\n\n")
with open(str(wlist), "r") as fd:
result = list(term_iterator([fd, ]))
assert result == ["foo", "bar"]
class TestPathsIterator(object):
def test_paths_iterator(self, tmpdir):
# the paths iterator provides terms from paths
wlist = tmpdir.join("wlist.txt")
wlist.write(b"\n".join([b"a", b"b", b"c"]))
result = list(paths_iterator([str(wlist), ]))
assert result == ["a", "b", "c"]
def test_multiple_paths(self, tmpdir):
# the paths iterator can cope with several files
wlist1 = tmpdir.join("wlist1.txt")
wlist2 = tmpdir.join("wlits2.txt")
wlist1.write(b"a\nb")
wlist2.write(b"c\nd")
result = list(paths_iterator([str(wlist1), str(wlist2)]))
assert result == ["a", "b", "c", "d"]
def test_read_stdin(self, tmpdir, argv_handler):
# we can tell to read from stdin (dash as filename)
sys.stdin = StringIO('term1\nterm2\näöü\n')
result = list(paths_iterator('-'))
assert result == ['term1', 'term2', 'äöü']
class TestIsPrefixCode(object):
def test_is_prefix_code(self):
# we can really tell whether some list is a prefix code.
assert is_prefix_code(["aa", "ab", "ac"]) is True
assert is_prefix_code([]) is True
assert is_prefix_code(["a", "ab", "c"]) is False
assert is_prefix_code(["a", "c", "ab"]) is False
assert is_prefix_code(["aa", "b", "a"]) is False # order
assert is_prefix_code(["a", "a"]) is False # identity
def test_is_prefix_code_sorted_input(self):
# we do not sort already sorted input
assert is_prefix_code(["a", "aa", "b"], is_sorted=True) is False
assert is_prefix_code(["b", "c", "d"], is_sorted=True) is True
assert is_prefix_code(["b", "a"], is_sorted=False) is True
# we do not define behavior for unsorted lists, if `is_sorted` is True
def test_is_prefix_code_accepts_iter(self):
# is_prefix_code really copes with iterators (not only iterables)
assert is_prefix_code(iter(["a", "b", "c"])) is True
assert is_prefix_code(iter(["aa", "a"])) is False
def test_is_prefix_code_non_destructive(self):
# is_prefix_code is a non-destructive function.
iterable = ["d", "b", "c"]
is_prefix_code(iterable, is_sorted=False)
assert iterable == ["d", "b", "c"]
iterable = ["a", "b", "c"]
is_prefix_code(iterable, is_sorted=True)
assert iterable == ["a", "b", "c"]
def test_is_prefix_code_non_ascii(self):
# is_prefix_code copes with umlauts etc.
assert is_prefix_code(["z", "ä", "y", "äh"]) is False
assert is_prefix_code(["a", "äh"]) is True
class TestGetMatchingPrefixes(object):
def test_get_matching_prefixes(self):
assert list(get_matching_prefixes([])) == []
assert list(get_matching_prefixes(["a", "aa", "ab", "b", "x"])) == [
("a", "aa"), ("a", "ab")]
assert list(get_matching_prefixes(["a", "aa"])) == [("a", "aa")]
assert list(get_matching_prefixes(["b", "aa", "a"])) == [("a", "aa")]
def test_get_matching_prefixes_sorted_input(self):
# we can presort input lists
assert list(
get_matching_prefixes(["a", "aa", "ab"], is_sorted=True)) == [
("a", "aa"), ("a", "ab")]
assert list(get_matching_prefixes(["aa", "a"], is_sorted=False)) == [
("a", "aa")]
assert list(
get_matching_prefixes(["a", "aa", "aaa"], is_sorted=True)) == [
("a", "aa"), ("a", "aaa"), ("aa", "aaa")]
assert list(
get_matching_prefixes(["a", "aa", "aaa", "aaaa"], is_sorted=True)
) == [
("a", "aa"), ("a", "aaa"), ("a", "aaaa"), ("aa", "aaa"),
("aa", "aaaa"), ("aaa", "aaaa")]
def test_get_matching_prefixes_non_destructive(self):
# the given input will not be changed.
iterable = ["a", "aa", "c"]
list(get_matching_prefixes(iterable, is_sorted=False))
assert iterable == ["a", "aa", "c"]
list(get_matching_prefixes(iterable, is_sorted=True))
assert iterable == ["a", "aa", "c"]
def test_get_matching_prefixes_non_ascii(self):
# get_matching_prefixes copes with umlauts etc.
get_matching_prefixes(["a", "ä", "ö"], is_sorted=False) == []
get_matching_prefixes(["a", "ä", "äh"], is_sorted=False) == [
("ä", "äh")]
class TestStrinMatchingPrefixes(object):
def test_strip_matching_prefixes(self):
# we can get prefix code from any input
assert list(strip_matching_prefixes(
["a", "aa", "b"], is_sorted=False, prefer_short=True)
) == ["a", "b"]
assert list(strip_matching_prefixes(
["aa", "a", "b"], is_sorted=False, prefer_short=True)
) == ["a", "b"]
assert list(strip_matching_prefixes(
["a", "aa"], is_sorted=False, prefer_short=True)) == ["a"]
assert list(strip_matching_prefixes(
["aa", "a"], is_sorted=False, prefer_short=True)) == ["a"]
def test_strip_matching_prefixes_empty(self):
# we cope with empty iterables
assert list(strip_matching_prefixes([], is_sorted=True)) == []
def test_strip_matching_prefixes_non_destructive(self):
# given input will not be modified
in_list = ["b", "a", "aa"]
result = list(strip_matching_prefixes(in_list, is_sorted=False))
assert in_list == ["b", "a", "aa"] # unchanged
assert result == ["a", "b"]
def test_strip_matching_prefixes_prefer_short(self):
# we can tell to prefer shorter prefixes
in_list = ["a", "aa", "b"]
result1 = list(strip_matching_prefixes(
in_list, is_sorted=False, prefer_short=True))
assert result1 == ["a", "b"]
result2 = list(strip_matching_prefixes(
in_list, is_sorted=False, prefer_short=False))
assert result2 == ["aa", "b"]
result3 = list(strip_matching_prefixes(
["a", "aa", "ab", "c"], is_sorted=True, prefer_short=True))
assert result3 == ["a", "c"]
def test_strip_matching_prefixes_third_nesting_level(self):
# we cope with highly nested prefixes
result = list(strip_matching_prefixes(
["a", "aa", "aaa"], prefer_short=False))
assert result == ["aaa"]
result = list(strip_matching_prefixes(
["a", "aa", "aaa"], prefer_short=True))
assert result == ["a"]
def test_get_prefixes():
# we can create tree-like nested lists of prefixed lists of strings
assert get_prefixes([]) == []
assert get_prefixes(["a"]) == [["a"]]
assert get_prefixes(["a", "b"]) == [["a"], ["b"]]
assert get_prefixes(["a", "ab"]) == [["a", ["ab"]]]
assert get_prefixes(["a", "aa", "b"]) == [["a", ["aa"]], ["b"]]
assert get_prefixes(["a", "b", "ba"]) == [["a"], ["b", ["ba"]]]
assert get_prefixes(["a", "aa", "aaa", "ab"]) == [
['a', ['aa', ['aaa']], ['ab']]]
assert get_prefixes(["a", "aa", "aaa", "ab", "ac"]) == [
['a', ['aa', ['aaa']], ['ab'], ['ac']]]
def test_flatten_prefix_tree():
# we can flatten prefix trees
assert flatten_prefix_tree([["a"], ["b"]]) == ["a", "b"]
assert flatten_prefix_tree([["a", ["ab"]]]) == ["a"]
assert flatten_prefix_tree(
[["a", ["ab"]]], prefer_short=False) == ["ab"]
assert flatten_prefix_tree(
[['a', ['aa', ['aaa']], ['ab'], ['ac']]], prefer_short=False) == [
'aaa', 'ab', 'ac']
def test_alpha_dist():
# we get proper distributions of alphabets
assert alpha_dist([]) == dict()
assert alpha_dist(['a', 'b']) == dict(a=1, b=1)
assert alpha_dist(['ab', 'b']) == dict(a=1, b=2)
def test_entropy_per_char_bruteforce():
# we can get the entropy per char for plain bruteforce
decimal.getcontext().prec = 3
assert entropy_per_char_bruteforce(['ab', ]) == decimal.Decimal(1.0)
assert entropy_per_char_bruteforce(['a', 'b']) == decimal.Decimal(1.0)
assert entropy_per_char_bruteforce(
['aaa', 'b']) == decimal.Decimal('0.811')
assert entropy_per_char_bruteforce(
['ab', 'bc', 'cd', 'da']) == decimal.Decimal('2.0')
assert entropy_per_char_bruteforce(
['art', 'air']) == decimal.Decimal('1.92')
def test_min_word_length():
# we can compute the minimum length of a word required for a wordlist
assert min_word_length([]) == 1
assert min_word_length(['a', 'aa', 'aaa']) == 1
assert min_word_length(['a', 'b']) == 1
assert min_word_length(['abcd'] * 8192) == 7
assert min_word_length(['abab'] * 16) == 4
# we also accept iterators as input
assert min_word_length(iter(['a', 'b'])) == 1
def test_min_word_length_desired_len():
# the desired list length can differ from the current list length
# char entropy = 2.0, 16 = 2^4
assert min_word_length(['abcd'] * 1024, 16) == 2
# char entropy = 2.0, 32 = 2^5
assert min_word_length(['abcd'] * 8192, 32) == 3
class TestAndroidWordlist(object):
def test_attributes(self):
# android wordlists objects provide some attributes we expect
wl = AndroidWordList()
assert hasattr(wl, "base_url")
assert hasattr(wl, "path")
assert hasattr(wl, "gz_data")
assert hasattr(wl, "lang")
def test_init_path(self, local_android_dir):
# we can pass in a path to an unencoded file (no base64).
path = local_android_dir / "de_wordlist.combined.gz"
wl = AndroidWordList('file:////%s' % path)
assert wl.path == 'file:////%s' % path
def test_download(self, local_android_download_b64):
# we can download wordfiles that are base64 encoded.
wl = AndroidWordList(lang="de")
dl_data = wl.download()
assert wl.decompress(dl_data) == (
b'dictionary=main:de,locale=de,description=Deutsch,'
b'date=1414726263,version=54,REQUIRES_GERMAN_UMLAUT_PROCESSING=1'
b'\n word=der,f=216,flags=,originalFreq=216\n word=und,f=213,'
b'flags=,originalFreq=213\n')
def test_download_de(self, local_android_download_b64):
# we can download a german wordlist.
wl = AndroidWordList(lang="de")
wl.download()
assert list(wl.get_words()) == ['der', 'und']
def test_download_en(self, local_android_download_b64):
# we can download an english wordlist.
wl = AndroidWordList(lang="en")
wl.download()
assert list(wl.get_words()) == [
'the', 'to', 'of', 'and', 'hardcore', 'import']
def test_decompress(self, local_android_dir):
# we can decompress downloaded stuff.
wl = AndroidWordList()
path = local_android_dir / "de_wordlist.combined.gz"
data = path.read_binary()
assert wl.decompress(data).startswith(b"dictionary=main:de,locale=de")
def test_save(self, local_android_download_b64, tmpdir):
# we can save downloaded wordlists.
wl = AndroidWordList(lang="en")
wl.download()
path = tmpdir / 'mywordlist.gz'
wl.save(str(path))
assert path.isfile()
assert path.size() == 235
def test_save_no_data(self, local_android_download_b64, tmpdir):
# we do not complain when no data was downloaded already
wl = AndroidWordList()
path = tmpdir / 'mywordlist.gz'
wl.save(str(path))
assert not path.isfile()
def test_get_basename(self):
# we can get the basename of the file to download
wl = AndroidWordList()
assert wl.get_basename() == "en_wordlist.combined.gz"
def test_get_basename_lang(self, local_android_download_b64):
# when getting basename, we can select the language
wl = AndroidWordList()
assert wl.get_basename(lang="de") == "de_wordlist.combined.gz"
def test_get_basename_path(self, local_android_dir):
# we get a correct basename also if path is set manually
wl = AndroidWordList()
path1 = local_android_dir / "de_wordlist.combined.gz"
path2 = local_android_dir / "my_wordlist.gzip"
path1.copy(path2)
wl = AndroidWordList('file:////%s' % path2)
assert wl.get_basename(lang="foo") == "my_wordlist.gzip"
def test_metadata(self, local_android_dir):
# we can extract metadata from android wordfiles
path = local_android_dir / "de_wordlist.combined.gz"
wl = AndroidWordList()
wl.gz_data = path.read_binary()
meta = wl.get_meta_data()
assert meta == {
'dictionary': 'main:de',
'locale': 'de',
'description': 'Deutsch',
'date': '1414726263',
'version': '54',
'REQUIRES_GERMAN_UMLAUT_PROCESSING': '1'
}
def test_metadata_none(self):
# we cope with situation, when no wordfile was set before.
wl = AndroidWordList()
assert wl.get_meta_data() == {}
def test_metadata_empty(self):
# we cope with situation, where the wordfile is empty
wl = AndroidWordList()
wl.gz_data = EMPTY_GZ_FILE
assert wl.get_meta_data() == {}
def test_parse_lines(self, local_android_dir):
# we can raw parse simple lists
path = local_android_dir / "de_wordlist.combined.gz"
wl = AndroidWordList('file:////%s' % path)
lines = wl.parse_lines()
assert [x for x in lines] == [
{
'dictionary': 'main:de',
'locale': 'de',
'description': 'Deutsch',
'date': '1414726263',
'version': '54',
'REQUIRES_GERMAN_UMLAUT_PROCESSING': '1'},
{
'word': 'der', 'f': '216', 'flags': '',
'originalFreq': '216'},
{
'word': 'und', 'f': '213', 'flags': '',
'originalFreq': '213'},
]
def test_parse_lines_ignores_empty_lines(self, tmpdir):
# empty lines in wordlist files are ignored by the parser
path = tmpdir / 'sample_empty_lines.gz'
with gzip.open(str(path), 'wb') as f:
f.write(b'\n\n\n')
wl = AndroidWordList('file:////%s' % path)
lines = wl.parse_lines()
assert list(lines) == []
def test_get_words(self, dictfile_android_short_de):
# we can get plain wordlists from Android lists
wl = AndroidWordList("file:////%s" % str(dictfile_android_short_de))
assert [x for x in wl.get_words()] == ["der", "und"]
def test_get_words_offensive(self, dictfile_android_short_en):
# we can filter out offensive words
wl = AndroidWordList("file:////%s" % str(dictfile_android_short_en))
list1 = list(wl.get_words(offensive=False))
assert "hardcore" not in list1
assert "the" in list1
list2 = list(wl.get_words(offensive=True))
assert "hardcore" in list2
assert "the" not in list2
list3 = list(wl.get_words(offensive=None))
assert "hardcore" in list3
assert "the" in list3
@pytest.mark.skipif(ggsource_unreachable(), reason="no network available")
def test_get_valid_lang_codes(self):
# we can get a list of available language codes.
wl = AndroidWordList()
result = wl.get_valid_lang_codes()
assert result[0:3] == ['cs', 'da', 'de']
def test_get_valid_lang_codes_local(self, local_index):
# get valid lang codes from local copy of index list.
wl = AndroidWordList()
result = wl.get_valid_lang_codes()
assert result == [
'cs', 'da', 'de', 'el', 'en', 'en_GB', 'en_US', 'es',
'fi', 'fr', 'hr', 'it', 'iw', 'lt', 'lv', 'nb', 'nl', 'pl',
'pt_BR', 'pt_PT', 'ro', 'ru', 'sl', 'sr', 'sv', 'tr']
| ulif/wordlist-gen | tests/test_libwordlist.py | Python | gpl-3.0 | 27,107 |
# Authors:
# Jan Cholasta <jcholast@redhat.com>
#
# Copyright (C) 2014 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
LDAP shared certificate store.
"""
from pyasn1.error import PyAsn1Error
from ipapython.dn import DN
from ipapython.certdb import get_ca_nickname
from ipalib import errors, x509
def _parse_cert(dercert):
try:
cert = x509.load_certificate(dercert, x509.DER)
subject = DN(cert.subject)
issuer = DN(cert.issuer)
serial_number = cert.serial_number
public_key_info = x509.get_der_public_key_info(dercert, x509.DER)
except (ValueError, PyAsn1Error) as e:
raise ValueError("failed to decode certificate: %s" % e)
subject = str(subject).replace('\\;', '\\3b')
issuer = str(issuer).replace('\\;', '\\3b')
issuer_serial = '%s;%s' % (issuer, serial_number)
return subject, issuer_serial, public_key_info
def init_ca_entry(entry, dercert, nickname, trusted, ext_key_usage):
"""
Initialize certificate store entry for a CA certificate.
"""
subject, issuer_serial, public_key = _parse_cert(dercert)
if ext_key_usage is not None:
try:
cert_eku = x509.get_ext_key_usage(dercert, x509.DER)
except ValueError as e:
raise ValueError("failed to decode certificate: %s" % e)
if cert_eku is not None:
cert_eku -= {x509.EKU_SERVER_AUTH, x509.EKU_CLIENT_AUTH,
x509.EKU_EMAIL_PROTECTION, x509.EKU_CODE_SIGNING,
x509.EKU_ANY, x509.EKU_PLACEHOLDER}
ext_key_usage = ext_key_usage | cert_eku
entry['objectClass'] = ['ipaCertificate', 'pkiCA', 'ipaKeyPolicy']
entry['cn'] = [nickname]
entry['ipaCertSubject'] = [subject]
entry['ipaCertIssuerSerial'] = [issuer_serial]
entry['ipaPublicKey'] = [public_key]
entry['cACertificate;binary'] = [dercert]
if trusted is not None:
entry['ipaKeyTrust'] = ['trusted' if trusted else 'distrusted']
if ext_key_usage is not None:
ext_key_usage = list(ext_key_usage)
if not ext_key_usage:
ext_key_usage.append(x509.EKU_PLACEHOLDER)
entry['ipaKeyExtUsage'] = ext_key_usage
def update_compat_ca(ldap, base_dn, dercert):
"""
Update the CA certificate in cn=CAcert,cn=ipa,cn=etc,SUFFIX.
"""
dn = DN(('cn', 'CAcert'), ('cn', 'ipa'), ('cn', 'etc'), base_dn)
try:
entry = ldap.get_entry(dn, attrs_list=['cACertificate;binary'])
entry.single_value['cACertificate;binary'] = dercert
ldap.update_entry(entry)
except errors.NotFound:
entry = ldap.make_entry(dn)
entry['objectClass'] = ['nsContainer', 'pkiCA']
entry.single_value['cn'] = 'CAcert'
entry.single_value['cACertificate;binary'] = dercert
ldap.add_entry(entry)
except errors.EmptyModlist:
pass
def clean_old_config(ldap, base_dn, dn, config_ipa, config_compat):
"""
Remove ipaCA and compatCA flags from their previous carriers.
"""
if not config_ipa and not config_compat:
return
try:
result, _truncated = ldap.find_entries(
base_dn=DN(('cn', 'certificates'), ('cn', 'ipa'), ('cn', 'etc'),
base_dn),
filter='(|(ipaConfigString=ipaCA)(ipaConfigString=compatCA))',
attrs_list=['ipaConfigString'])
except errors.NotFound:
return
for entry in result:
if entry.dn == dn:
continue
for config in list(entry['ipaConfigString']):
if config.lower() == 'ipaca' and config_ipa:
entry['ipaConfigString'].remove(config)
elif config.lower() == 'compatca' and config_compat:
entry['ipaConfigString'].remove(config)
try:
ldap.update_entry(entry)
except errors.EmptyModlist:
pass
def add_ca_cert(ldap, base_dn, dercert, nickname, trusted=None,
ext_key_usage=None, config_ipa=False, config_compat=False):
"""
Add new entry for a CA certificate to the certificate store.
"""
container_dn = DN(('cn', 'certificates'), ('cn', 'ipa'), ('cn', 'etc'),
base_dn)
dn = DN(('cn', nickname), container_dn)
entry = ldap.make_entry(dn)
init_ca_entry(entry, dercert, nickname, trusted, ext_key_usage)
if config_ipa:
entry.setdefault('ipaConfigString', []).append('ipaCA')
if config_compat:
entry.setdefault('ipaConfigString', []).append('compatCA')
if config_compat:
update_compat_ca(ldap, base_dn, dercert)
ldap.add_entry(entry)
clean_old_config(ldap, base_dn, dn, config_ipa, config_compat)
def update_ca_cert(ldap, base_dn, dercert, trusted=None, ext_key_usage=None,
config_ipa=False, config_compat=False):
"""
Update existing entry for a CA certificate in the certificate store.
"""
subject, issuer_serial, public_key = _parse_cert(dercert)
filter = ldap.make_filter({'ipaCertSubject': subject})
result, _truncated = ldap.find_entries(
base_dn=DN(('cn', 'certificates'), ('cn', 'ipa'), ('cn', 'etc'),
base_dn),
filter=filter,
attrs_list=['cn', 'ipaCertSubject', 'ipaCertIssuerSerial',
'ipaPublicKey', 'ipaKeyTrust', 'ipaKeyExtUsage',
'ipaConfigString', 'cACertificate;binary'])
entry = result[0]
dn = entry.dn
for old_cert in entry['cACertificate;binary']:
# Check if we are adding a new cert
if old_cert == dercert:
break
else:
# We are adding a new cert, validate it
if entry.single_value['ipaCertSubject'].lower() != subject.lower():
raise ValueError("subject name mismatch")
if entry.single_value['ipaPublicKey'] != public_key:
raise ValueError("subject public key info mismatch")
entry['ipaCertIssuerSerial'].append(issuer_serial)
entry['cACertificate;binary'].append(dercert)
# Update key trust
if trusted is not None:
old_trust = entry.single_value.get('ipaKeyTrust')
new_trust = 'trusted' if trusted else 'distrusted'
if old_trust is not None and old_trust.lower() != new_trust:
raise ValueError("inconsistent trust")
entry.single_value['ipaKeyTrust'] = new_trust
# Update extended key usage
if trusted is not False:
if ext_key_usage is not None:
old_eku = set(entry.get('ipaKeyExtUsage', []))
old_eku.discard(x509.EKU_PLACEHOLDER)
new_eku = old_eku | ext_key_usage
if not new_eku:
new_eku.add(x509.EKU_PLACEHOLDER)
entry['ipaKeyExtUsage'] = list(new_eku)
else:
entry.pop('ipaKeyExtUsage', None)
# Update configuration flags
is_ipa = False
is_compat = False
for config in entry.get('ipaConfigString', []):
if config.lower() == 'ipaca':
is_ipa = True
elif config.lower() == 'compatca':
is_compat = True
if config_ipa and not is_ipa:
entry.setdefault('ipaConfigString', []).append('ipaCA')
if config_compat and not is_compat:
entry.setdefault('ipaConfigString', []).append('compatCA')
if is_compat or config_compat:
update_compat_ca(ldap, base_dn, dercert)
ldap.update_entry(entry)
clean_old_config(ldap, base_dn, dn, config_ipa, config_compat)
def put_ca_cert(ldap, base_dn, dercert, nickname, trusted=None,
ext_key_usage=None, config_ipa=False, config_compat=False):
"""
Add or update entry for a CA certificate in the certificate store.
"""
try:
update_ca_cert(ldap, base_dn, dercert, trusted, ext_key_usage,
config_ipa=config_ipa, config_compat=config_compat)
except errors.NotFound:
add_ca_cert(ldap, base_dn, dercert, nickname, trusted, ext_key_usage,
config_ipa=config_ipa, config_compat=config_compat)
except errors.EmptyModlist:
pass
def make_compat_ca_certs(certs, realm, ipa_ca_subject):
"""
Make CA certificates and associated key policy from DER certificates.
"""
result = []
for cert in certs:
subject, _issuer_serial, _public_key_info = _parse_cert(cert)
subject = DN(subject)
if ipa_ca_subject is not None and subject == DN(ipa_ca_subject):
nickname = get_ca_nickname(realm)
ext_key_usage = {x509.EKU_SERVER_AUTH,
x509.EKU_CLIENT_AUTH,
x509.EKU_EMAIL_PROTECTION,
x509.EKU_CODE_SIGNING}
else:
nickname = str(subject)
ext_key_usage = {x509.EKU_SERVER_AUTH}
result.append((cert, nickname, True, ext_key_usage))
return result
def get_ca_certs(ldap, base_dn, compat_realm, compat_ipa_ca,
filter_subject=None):
"""
Get CA certificates and associated key policy from the certificate store.
"""
if filter_subject is not None:
if not isinstance(filter_subject, list):
filter_subject = [filter_subject]
filter_subject = [str(subj).replace('\\;', '\\3b')
for subj in filter_subject]
certs = []
config_dn = DN(('cn', 'ipa'), ('cn', 'etc'), base_dn)
container_dn = DN(('cn', 'certificates'), config_dn)
try:
# Search the certificate store for CA certificate entries
filters = ['(objectClass=ipaCertificate)', '(objectClass=pkiCA)']
if filter_subject:
filter = ldap.make_filter({'ipaCertSubject': filter_subject})
filters.append(filter)
result, _truncated = ldap.find_entries(
base_dn=container_dn,
filter=ldap.combine_filters(filters, ldap.MATCH_ALL),
attrs_list=['cn', 'ipaCertSubject', 'ipaCertIssuerSerial',
'ipaPublicKey', 'ipaKeyTrust', 'ipaKeyExtUsage',
'cACertificate;binary'])
for entry in result:
nickname = entry.single_value['cn']
trusted = entry.single_value.get('ipaKeyTrust', 'unknown').lower()
if trusted == 'trusted':
trusted = True
elif trusted == 'distrusted':
trusted = False
else:
trusted = None
ext_key_usage = entry.get('ipaKeyExtUsage')
if ext_key_usage is not None:
ext_key_usage = set(str(p) for p in ext_key_usage)
ext_key_usage.discard(x509.EKU_PLACEHOLDER)
for cert in entry.get('cACertificate;binary', []):
try:
_parse_cert(cert)
except ValueError:
certs = []
break
certs.append((cert, nickname, trusted, ext_key_usage))
except errors.NotFound:
try:
ldap.get_entry(container_dn, [''])
except errors.NotFound:
# Fallback to cn=CAcert,cn=ipa,cn=etc,SUFFIX
dn = DN(('cn', 'CAcert'), config_dn)
entry = ldap.get_entry(dn, ['cACertificate;binary'])
cert = entry.single_value['cACertificate;binary']
try:
subject, _issuer_serial, _public_key_info = _parse_cert(cert)
except ValueError:
pass
else:
if filter_subject is not None and subject not in filter_subject:
raise errors.NotFound(reason="no matching entry found")
if compat_ipa_ca:
ca_subject = subject
else:
ca_subject = None
certs = make_compat_ca_certs([cert], compat_realm, ca_subject)
if certs:
return certs
else:
raise errors.NotFound(reason="no such entry")
def trust_flags_to_key_policy(trust_flags):
"""
Convert certutil trust flags to certificate store key policy.
"""
if 'p' in trust_flags:
if 'C' in trust_flags or 'P' in trust_flags or 'T' in trust_flags:
raise ValueError("cannot be both trusted and not trusted")
return False, None, None
elif 'C' in trust_flags or 'T' in trust_flags:
if 'P' in trust_flags:
raise ValueError("cannot be both CA and not CA")
ca = True
elif 'P' in trust_flags:
ca = False
else:
return None, None, set()
trust_flags = trust_flags.split(',')
ext_key_usage = set()
for i, kp in enumerate((x509.EKU_SERVER_AUTH,
x509.EKU_EMAIL_PROTECTION,
x509.EKU_CODE_SIGNING)):
if 'C' in trust_flags[i] or 'P' in trust_flags[i]:
ext_key_usage.add(kp)
if 'T' in trust_flags[0]:
ext_key_usage.add(x509.EKU_CLIENT_AUTH)
return True, ca, ext_key_usage
def key_policy_to_trust_flags(trusted, ca, ext_key_usage):
"""
Convert certificate store key policy to certutil trust flags.
"""
if trusted is False:
return 'p,p,p'
elif trusted is None or ca is None:
return ',,'
elif ext_key_usage is None:
if ca:
return 'CT,C,C'
else:
return 'P,P,P'
trust_flags = ['', '', '']
for i, kp in enumerate((x509.EKU_SERVER_AUTH,
x509.EKU_EMAIL_PROTECTION,
x509.EKU_CODE_SIGNING)):
if kp in ext_key_usage:
trust_flags[i] += ('C' if ca else 'P')
if ca and x509.EKU_CLIENT_AUTH in ext_key_usage:
trust_flags[0] += 'T'
trust_flags = ','.join(trust_flags)
return trust_flags
def put_ca_cert_nss(ldap, base_dn, dercert, nickname, trust_flags,
config_ipa=False, config_compat=False):
"""
Add or update entry for a CA certificate in the certificate store.
"""
trusted, ca, ext_key_usage = trust_flags_to_key_policy(trust_flags)
if ca is False:
raise ValueError("must be CA certificate")
put_ca_cert(ldap, base_dn, dercert, nickname, trusted, ext_key_usage,
config_ipa, config_compat)
def get_ca_certs_nss(ldap, base_dn, compat_realm, compat_ipa_ca,
filter_subject=None):
"""
Get CA certificates and associated trust flags from the certificate store.
"""
nss_certs = []
certs = get_ca_certs(ldap, base_dn, compat_realm, compat_ipa_ca,
filter_subject=filter_subject)
for cert, nickname, trusted, ext_key_usage in certs:
trust_flags = key_policy_to_trust_flags(trusted, True, ext_key_usage)
nss_certs.append((cert, nickname, trust_flags))
return nss_certs
| realsobek/freeipa | ipalib/install/certstore.py | Python | gpl-3.0 | 15,409 |
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2018-2021 Jay Kamat <jaygkamat@gmail.com>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""Implementation of a basic config cache."""
from typing import Any, Dict
from qutebrowser.config import config
class ConfigCache:
"""A 'high-performance' cache for the config system.
Useful for areas which call out to the config system very frequently, DO
NOT modify the value returned, DO NOT require per-url settings, and do not
require partially 'expanded' config paths.
If any of these requirements are broken, you will get incorrect or slow
behavior.
"""
def __init__(self) -> None:
self._cache: Dict[str, Any] = {}
config.instance.changed.connect(self._on_config_changed)
def _on_config_changed(self, attr: str) -> None:
if attr in self._cache:
del self._cache[attr]
def __getitem__(self, attr: str) -> Any:
try:
return self._cache[attr]
except KeyError:
assert not config.instance.get_opt(attr).supports_pattern
result = self._cache[attr] = config.instance.get(attr)
return result
| fiete201/qutebrowser | qutebrowser/config/configcache.py | Python | gpl-3.0 | 1,820 |
import unittest
from golem.network.p2p.node import Node
def is_ip_address(address):
"""
Check if @address is correct IP address
:param address: Address to be checked
:return: True if is correct, false otherwise
"""
from ipaddress import ip_address, AddressValueError
try:
# will raise error in case of incorrect address
ip_address(unicode(address))
return True
except (ValueError, AddressValueError):
return False
class TestNode(unittest.TestCase):
def test_str(self):
n = Node(node_name="Blabla", key="ABC")
self.assertNotIn("at", str(n))
self.assertNotIn("at", "{}".format(n))
self.assertIn("Blabla", str(n))
self.assertIn("Blabla", "{}".format(n))
self.assertIn("ABC", str(n))
self.assertIn("ABC", "{}".format(n))
def test_collect_network_info(self):
""" Test configuring Node object """
node = Node()
node.collect_network_info()
assert is_ip_address(node.pub_addr)
assert is_ip_address(node.prv_addr)
for address in node.prv_addresses:
assert is_ip_address(address)
| Radagast-red/golem | tests/golem/network/p2p/test_node.py | Python | gpl-3.0 | 1,163 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-02 08:34
from __future__ import unicode_literals
import dirtyfields.dirtyfields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0002_remove_content_type_name'),
('ninetofiver', '0071_apikey_name'),
]
operations = [
migrations.CreateModel(
name='WhereaboutDate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('starts_at', models.DateTimeField()),
('ends_at', models.DateTimeField()),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_ninetofiver.whereaboutdate_set+', to='contenttypes.ContentType')),
('timesheet', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='ninetofiver.Timesheet')),
],
options={
'ordering': ['id'],
'abstract': False,
'base_manager_name': 'base_objects',
},
bases=(dirtyfields.dirtyfields.DirtyFieldsMixin, models.Model),
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.RemoveField(
model_name='whereabout',
name='day',
),
migrations.RemoveField(
model_name='whereabout',
name='timesheet',
),
migrations.AddField(
model_name='whereabout',
name='description',
field=models.TextField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='whereabout',
name='user',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
migrations.AlterField(
model_name='whereabout',
name='location',
field=models.CharField(choices=[('home', 'Home'), ('office', 'Office'), ('out_of_office', 'Out of office'), ('other', 'Other')], max_length=32),
),
migrations.AddField(
model_name='whereaboutdate',
name='whereabout',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ninetofiver.Whereabout'),
),
]
| BartDeCaluwe/925r | ninetofiver/migrations/0072_auto_20180502_0834.py | Python | gpl-3.0 | 2,929 |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from urlparse import urlparse
import httplib2
import urllib
import logging
from datetime import datetime
from lxml import etree
from django.conf import settings
from django.db import models
from django.db.models import signals
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext, ugettext_lazy as _
from django.core.urlresolvers import reverse
from geonode import GeoNodeException
from geonode.base.models import ResourceBase, ResourceBaseManager, Link, \
resourcebase_post_save, resourcebase_post_delete
from geonode.utils import _user, _password, get_wms
from geonode.utils import http_client
from geonode.geoserver.helpers import cascading_delete
from geonode.people.models import Profile
from geonode.security.enumerations import AUTHENTICATED_USERS, ANONYMOUS_USERS
from geonode.layers.ows import wcs_links, wfs_links, wms_links, \
wps_execute_layer_attribute_statistics
from geonode.layers.enumerations import LAYER_ATTRIBUTE_NUMERIC_DATA_TYPES
from geonode.utils import ogc_server_settings
from geoserver.catalog import Catalog, FailedRequestError
from agon_ratings.models import OverallRating
logger = logging.getLogger("geonode.layers.models")
class Style(models.Model):
"""Model for storing styles.
"""
name = models.CharField(_('style name'), max_length=255, unique=True)
sld_title = models.CharField(max_length=255, null=True, blank=True)
sld_body = models.TextField(_('sld text'), null=True, blank=True)
sld_version = models.CharField(_('sld version'), max_length=12, null=True, blank=True)
sld_url = models.CharField(_('sld url'), null = True, max_length=1000)
workspace = models.CharField(max_length=255, null=True, blank=True)
def __str__(self):
return "%s" % self.name.encode('utf-8')
class LayerManager(ResourceBaseManager):
def __init__(self):
models.Manager.__init__(self)
url = ogc_server_settings.rest
self.gs_catalog = Catalog(url, _user, _password)
def add_bbox_query(q, bbox):
'''modify the queryset q to limit to the provided bbox
bbox - 4 tuple of floats representing x0,x1,y0,y1
returns the modified query
'''
bbox = map(str, bbox) # 2.6 compat - float to decimal conversion
q = q.filter(bbox_x0__gte=bbox[0])
q = q.filter(bbox_x1__lte=bbox[1])
q = q.filter(bbox_y0__gte=bbox[2])
return q.filter(bbox_y1__lte=bbox[3])
class Layer(ResourceBase):
"""
Layer (inherits ResourceBase fields)
"""
# internal fields
objects = LayerManager()
workspace = models.CharField(max_length=128)
store = models.CharField(max_length=128)
storeType = models.CharField(max_length=128)
name = models.CharField(max_length=128)
typename = models.CharField(max_length=128, unique=True)
popular_count = models.IntegerField(default=0)
share_count = models.IntegerField(default=0)
default_style = models.ForeignKey(Style, related_name='layer_default_style', null=True, blank=True)
styles = models.ManyToManyField(Style, related_name='layer_styles')
def update_thumbnail(self, save=True):
try:
self.save_thumbnail(self._thumbnail_url(width=200, height=150), save)
except RuntimeError, e:
logger.warn('Could not create thumbnail for %s' % self, e)
def _render_thumbnail(self, spec):
resp, content = http_client.request(spec)
if 'ServiceException' in content or resp.status < 200 or resp.status > 299:
msg = 'Unable to obtain thumbnail: %s' % content
raise RuntimeError(msg)
return content
def _thumbnail_url(self, width=20, height=None):
""" Generate a URL representing thumbnail of the layer """
params = {
'layers': self.typename.encode('utf-8'),
'format': 'image/png8',
'width': width,
}
if height is not None:
params['height'] = height
# Avoid using urllib.urlencode here because it breaks the url.
# commas and slashes in values get encoded and then cause trouble
# with the WMS parser.
p = "&".join("%s=%s"%item for item in params.items())
return ogc_server_settings.LOCATION + "wms/reflect?" + p
def verify(self):
"""Makes sure the state of the layer is consistent in GeoServer and Catalogue.
"""
# Check the layer is in the wms get capabilities record
# FIXME: Implement caching of capabilities record site wide
_local_wms = get_wms()
record = _local_wms.contents.get(self.typename)
if record is None:
msg = "WMS Record missing for layer [%s]" % self.typename.encode('utf-8')
raise GeoNodeException(msg)
@property
def display_type(self):
return ({
"dataStore" : "Vector Data",
"coverageStore": "Raster Data",
}).get(self.storeType, "Data")
@property
def store_type(self):
cat = Layer.objects.gs_catalog
res = cat.get_resource(self.name)
res.store.fetch()
return res.store.dom.find('type').text
@property
def service_type(self):
if self.storeType == 'coverageStore':
return "WCS"
if self.storeType == 'dataStore':
return "WFS"
def get_absolute_url(self):
return reverse('layer_detail', args=(self.typename,))
def attribute_config(self):
#Get custom attribute sort order and labels if any
cfg = {}
visible_attributes = self.attribute_set.visible()
if (visible_attributes.count() > 0):
cfg["getFeatureInfo"] = {
"fields": [l.attribute for l in visible_attributes],
"propertyNames": dict([(l.attribute,l.attribute_label) for l in visible_attributes])
}
return cfg
def __str__(self):
return "%s Layer" % self.typename.encode('utf-8')
class Meta:
# custom permissions,
# change and delete are standard in django
permissions = (('view_layer', 'Can view'),
('change_layer_permissions', "Can change permissions"), )
# Permission Level Constants
# LEVEL_NONE inherited
LEVEL_READ = 'layer_readonly'
LEVEL_WRITE = 'layer_readwrite'
LEVEL_ADMIN = 'layer_admin'
def set_default_permissions(self):
self.set_gen_level(ANONYMOUS_USERS, self.LEVEL_READ)
self.set_gen_level(AUTHENTICATED_USERS, self.LEVEL_READ)
# remove specific user permissions
current_perms = self.get_all_level_info()
for username in current_perms['users'].keys():
user = User.objects.get(username=username)
self.set_user_level(user, self.LEVEL_NONE)
# assign owner admin privileges
if self.owner:
self.set_user_level(self.owner, self.LEVEL_ADMIN)
def tiles_url(self):
return self.link_set.get(name='Tiles').url
def maps(self):
from geonode.maps.models import MapLayer
return MapLayer.objects.filter(name=self.typename)
@property
def class_name(self):
return self.__class__.__name__
class Layer_Styles(models.Model):
layer = models.ForeignKey(Layer)
style = models.ForeignKey(Style)
class AttributeManager(models.Manager):
"""Helper class to access filtered attributes
"""
def visible(self):
return self.get_query_set().filter(visible=True).order_by('display_order')
class Attribute(models.Model):
"""
Auxiliary model for storing layer attributes.
This helps reduce the need for runtime lookups
to GeoServer, and lets users customize attribute titles,
sort order, and visibility.
"""
layer = models.ForeignKey(Layer, blank=False, null=False, unique=False, related_name='attribute_set')
attribute = models.CharField(_('attribute name'), help_text=_('name of attribute as stored in shapefile/spatial database'), max_length=255, blank=False, null=True, unique=False)
description = models.CharField(_('attribute description'), help_text=_('description of attribute to be used in metadata'), max_length=255, blank=True, null=True)
attribute_label = models.CharField(_('attribute label'), help_text=_('title of attribute as displayed in GeoNode'), max_length=255, blank=False, null=True, unique=False)
attribute_type = models.CharField(_('attribute type'), help_text=_('the data type of the attribute (integer, string, geometry, etc)'), max_length=50, blank=False, null=False, default='xsd:string', unique=False)
visible = models.BooleanField(_('visible?'), help_text=_('specifies if the attribute should be displayed in identify results'), default=True)
display_order = models.IntegerField(_('display order'), help_text=_('specifies the order in which attribute should be displayed in identify results'), default=1)
# statistical derivations
count = models.IntegerField(_('count'), help_text=_('count value for this field'), default=1)
min = models.CharField(_('min'), help_text=_('minimum value for this field'), max_length=255, blank=False, null=True, unique=False, default='NA')
max = models.CharField(_('max'), help_text=_('maximum value for this field'), max_length=255, blank=False, null=True, unique=False, default='NA')
average = models.CharField(_('average'), help_text=_('average value for this field'), max_length=255, blank=False, null=True, unique=False, default='NA')
median = models.CharField(_('median'), help_text=_('median value for this field'), max_length=255, blank=False, null=True, unique=False, default='NA')
stddev = models.CharField(_('standard deviation'), help_text=_('standard deviation for this field'), max_length=255, blank=False, null=True, unique=False, default='NA')
sum = models.CharField(_('sum'), help_text=_('sum value for this field'), max_length=255, blank=False, null=True, unique=False, default='NA')
unique_values = models.TextField(_('unique values for this field'), null=True, blank=True, default='NA')
last_stats_updated = models.DateTimeField(_('last modified'), default=datetime.now, help_text=_('date when attribute statistics were last updated')) # passing the method itself, not
objects = AttributeManager()
def __str__(self):
return "%s" % self.attribute_label.encode("utf-8") if self.attribute_label else self.attribute.encode("utf-8")
def unique_values_as_list(self):
return self.unique_values.split(',')
def geoserver_pre_delete(instance, sender, **kwargs):
"""Removes the layer from GeoServer
"""
ct = ContentType.objects.get_for_model(instance)
OverallRating.objects.filter(content_type = ct, object_id = instance.id).delete()
#cascading_delete should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True):
cascading_delete(Layer.objects.gs_catalog, instance.typename)
def pre_save_layer(instance, sender, **kwargs):
if kwargs.get('raw', False):
instance.owner = instance.resourcebase_ptr.owner
instance.uuid = instance.resourcebase_ptr.uuid
instance.bbox_x0 = instance.resourcebase_ptr.bbox_x0
instance.bbox_x1 = instance.resourcebase_ptr.bbox_x1
instance.bbox_y0 = instance.resourcebase_ptr.bbox_y0
instance.bbox_y1 = instance.resourcebase_ptr.bbox_y1
if instance.abstract == '' or instance.abstract is None:
instance.abstract = 'No abstract provided'
if instance.title == '' or instance.title is None:
instance.title = instance.name
def pre_delete_layer(instance, sender, **kwargs):
"""
Remove any associated style to the layer, if it is not used by other layers.
Default style will be deleted in post_delete_layer
"""
logger.debug("Going to delete the styles associated for [%s]", instance.typename.encode('utf-8'))
default_style = instance.default_style
for style in instance.styles.all():
if style.layer_styles.all().count()==1:
if style != default_style:
style.delete()
def post_delete_layer(instance, sender, **kwargs):
"""
Removed the layer from any associated map, if any.
Remove the layer default style.
"""
from geonode.maps.models import MapLayer
logger.debug("Going to delete associated maplayers for [%s]", instance.typename.encode('utf-8'))
MapLayer.objects.filter(name=instance.typename).delete()
logger.debug("Going to delete the default style for [%s]", instance.typename.encode('utf-8'))
if instance.default_style and Layer.objects.filter(default_style__id=instance.default_style.id).count() == 0:
instance.default_style.delete()
def geoserver_pre_save(instance, sender, **kwargs):
"""Send information to geoserver.
The attributes sent include:
* Title
* Abstract
* Name
* Keywords
* Metadata Links,
* Point of Contact name and url
"""
url = ogc_server_settings.internal_rest
try:
gs_catalog = Catalog(url, _user, _password)
gs_resource = gs_catalog.get_resource(instance.name)
except (EnvironmentError, FailedRequestError) as e:
gs_resource = None
msg = ('Could not connect to geoserver at "%s"'
'to save information for layer "%s"' % (
ogc_server_settings.LOCATION, instance.name.encode('utf-8'))
)
logger.warn(msg, e)
# If geoserver is not online, there is no need to continue
return
# If there is no resource returned it could mean one of two things:
# a) There is a synchronization problem in geoserver
# b) The unit tests are running and another geoserver is running in the
# background.
# For both cases it is sensible to stop processing the layer
if gs_resource is None:
logger.warn('Could not get geoserver resource for %s' % instance)
return
gs_resource.title = instance.title
gs_resource.abstract = instance.abstract
gs_resource.name= instance.name
# Get metadata links
metadata_links = []
for link in instance.link_set.metadata():
metadata_links.append((link.name, link.mime, link.url))
gs_resource.metadata_links = metadata_links
#gs_resource should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True):
gs_catalog.save(gs_resource)
gs_layer = gs_catalog.get_layer(instance.name)
if instance.poc and instance.poc.user:
gs_layer.attribution = str(instance.poc.user)
profile = Profile.objects.get(user=instance.poc.user)
gs_layer.attribution_link = settings.SITEURL[:-1] + profile.get_absolute_url()
#gs_layer should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True):
gs_catalog.save(gs_layer)
"""Get information from geoserver.
The attributes retrieved include:
* Bounding Box
* SRID
* Download links (WMS, WCS or WFS and KML)
* Styles (SLD)
"""
gs_resource = gs_catalog.get_resource(instance.name)
bbox = gs_resource.latlon_bbox
#FIXME(Ariel): Correct srid setting below
#self.srid = gs_resource.src
# Set bounding box values
instance.bbox_x0 = bbox[0]
instance.bbox_x1 = bbox[1]
instance.bbox_y0 = bbox[2]
instance.bbox_y1 = bbox[3]
instance.update_thumbnail(save=False)
def geoserver_post_save(instance, sender, **kwargs):
"""Save keywords to GeoServer
The way keywords are implemented requires the layer
to be saved to the database before accessing them.
"""
url = ogc_server_settings.internal_rest
try:
gs_catalog = Catalog(url, _user, _password)
gs_resource = gs_catalog.get_resource(instance.name)
except (FailedRequestError, EnvironmentError) as e:
msg = ('Could not connect to geoserver at "%s"'
'to save information for layer "%s"' % (
ogc_server_settings.LOCATION, instance.name.encode('utf-8'))
)
logger.warn(msg, e)
# If geoserver is not online, there is no need to continue
return
# If there is no resource returned it could mean one of two things:
# a) There is a synchronization problem in geoserver
# b) The unit tests are running and another geoserver is running in the
# background.
# For both cases it is sensible to stop processing the layer
if gs_resource is None:
logger.warn('Could not get geoserver resource for %s' % instance)
return
gs_resource.keywords = instance.keyword_list()
#gs_resource should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True):
gs_catalog.save(gs_resource)
bbox = gs_resource.latlon_bbox
dx = float(bbox[1]) - float(bbox[0])
dy = float(bbox[3]) - float(bbox[2])
dataAspect = 1 if dy == 0 else dx / dy
height = 550
width = int(height * dataAspect)
# Set download links for WMS, WCS or WFS and KML
links = wms_links(ogc_server_settings.public_url + 'wms?',
instance.typename.encode('utf-8'), instance.bbox_string,
instance.srid, height, width)
for ext, name, mime, wms_url in links:
Link.objects.get_or_create(resource= instance.resourcebase_ptr,
name=ugettext(name),
defaults=dict(
extension=ext,
url=wms_url,
mime=mime,
link_type='image',
)
)
if instance.storeType == "dataStore":
links = wfs_links(ogc_server_settings.public_url + 'wfs?', instance.typename.encode('utf-8'))
for ext, name, mime, wfs_url in links:
Link.objects.get_or_create(resource= instance.resourcebase_ptr,
url=wfs_url,
defaults=dict(
extension=ext,
name=name,
mime=mime,
url=wfs_url,
link_type='data',
)
)
elif instance.storeType == 'coverageStore':
#FIXME(Ariel): This works for public layers, does it work for restricted too?
# would those end up with no geotiff links, like, forever?
permissions = {}
permissions['anonymous'] = instance.get_gen_level(ANONYMOUS_USERS)
permissions['authenticated'] = instance.get_gen_level(AUTHENTICATED_USERS)
instance.set_gen_level(ANONYMOUS_USERS,'layer_readonly')
links = wcs_links(ogc_server_settings.public_url + 'wcs?', instance.typename.encode('utf-8'),
bbox=instance.bbox[:-1], crs=instance.bbox[-1], height=height, width=width)
for ext, name, mime, wcs_url in links:
Link.objects.get_or_create(resource= instance.resourcebase_ptr,
url=wcs_url,
defaults=dict(
extension=ext,
name=name,
mime=mime,
link_type='data',
)
)
instance.set_gen_level(ANONYMOUS_USERS,permissions['anonymous'])
instance.set_gen_level(AUTHENTICATED_USERS,permissions['authenticated'])
kml_reflector_link_download = ogc_server_settings.public_url + "wms/kml?" + urllib.urlencode({
'layers': instance.typename.encode('utf-8'),
'mode': "download"
})
Link.objects.get_or_create(resource= instance.resourcebase_ptr,
url=kml_reflector_link_download,
defaults=dict(
extension='kml',
name=_("KML"),
mime='text/xml',
link_type='data',
)
)
kml_reflector_link_view = ogc_server_settings.public_url + "wms/kml?" + urllib.urlencode({
'layers': instance.typename.encode('utf-8'),
'mode': "refresh"
})
Link.objects.get_or_create(resource= instance.resourcebase_ptr,
url=kml_reflector_link_view,
defaults=dict(
extension='kml',
name=_("View in Google Earth"),
mime='text/xml',
link_type='data',
)
)
tile_url = ('%sgwc/service/gmaps?' % ogc_server_settings.public_url +
'layers=%s' % instance.typename.encode('utf-8') +
'&zoom={z}&x={x}&y={y}' +
'&format=image/png8'
)
Link.objects.get_or_create(resource= instance.resourcebase_ptr,
url=tile_url,
defaults=dict(
extension='tiles',
name=_("Tiles"),
mime='image/png',
link_type='image',
)
)
html_link_url = '%s%s' % (settings.SITEURL[:-1], instance.get_absolute_url())
Link.objects.get_or_create(resource= instance.resourcebase_ptr,
url=html_link_url,
defaults=dict(
extension='html',
name=instance.typename,
mime='text/html',
link_type='html',
)
)
#remove links that belong to and old address
for link in instance.link_set.all():
if not urlparse(settings.SITEURL).hostname == urlparse(link.url).hostname and not \
urlparse(ogc_server_settings.public_url).hostname == urlparse(link.url).hostname:
link.delete()
#Save layer attributes
set_attributes(instance)
#Save layer styles
set_styles(instance, gs_catalog)
def set_styles(layer, gs_catalog):
style_set = []
gs_layer = gs_catalog.get_layer(layer.name)
default_style = gs_layer.default_style
layer.default_style = save_style(default_style)
style_set.append(layer.default_style)
alt_styles = gs_layer.styles
for alt_style in alt_styles:
style_set.append(save_style(alt_style))
layer.styles = style_set
return layer
def save_style(gs_style):
style, created = Style.objects.get_or_create(name = gs_style.sld_name)
style.sld_title = gs_style.sld_title
style.sld_body = gs_style.sld_body
style.sld_url = gs_style.body_href()
style.save()
return style
def is_layer_attribute_aggregable(store_type, field_name, field_type):
"""
Decipher whether layer attribute is suitable for statistical derivation
"""
# must be vector layer
if store_type != 'dataStore':
return False
# must be a numeric data type
if field_type not in LAYER_ATTRIBUTE_NUMERIC_DATA_TYPES:
return False
# must not be an identifier type field
if field_name.lower() in ['id', 'identifier']:
return False
return True
def get_attribute_statistics(layer_name, field):
"""
Generate statistics (range, mean, median, standard deviation, unique values)
for layer attribute
"""
logger.debug('Deriving aggregate statistics for attribute %s', field)
if not ogc_server_settings.WPS_ENABLED:
return None
try:
return wps_execute_layer_attribute_statistics(layer_name, field)
except Exception:
logger.exception('Error generating layer aggregate statistics')
def set_attributes(layer, overwrite=False):
"""
Retrieve layer attribute names & types from Geoserver,
then store in GeoNode database using Attribute model
"""
#Appending authorizations seems necessary to avoid 'layer not found' from GeoServer
http = httplib2.Http()
http.add_credentials(_user, _password)
_netloc = urlparse(ogc_server_settings.LOCATION).netloc
http.authorizations.append(
httplib2.BasicAuthentication(
(_user, _password),
_netloc,
ogc_server_settings.LOCATION,
{},
None,
None,
http
)
)
attribute_map = []
if layer.storeType == "dataStore":
dft_url = ogc_server_settings.LOCATION + "wfs?" + urllib.urlencode({
"service": "wfs",
"version": "1.0.0",
"request": "DescribeFeatureType",
"typename": layer.typename.encode('utf-8'),
})
try:
body = http.request(dft_url)[1]
doc = etree.fromstring(body)
path = ".//{xsd}extension/{xsd}sequence/{xsd}element".format(xsd="{http://www.w3.org/2001/XMLSchema}")
attribute_map = [[n.attrib["name"],n.attrib["type"]] for n in doc.findall(path)]
except Exception:
attribute_map = []
elif layer.storeType == "coverageStore":
dc_url = ogc_server_settings.LOCATION + "wcs?" + urllib.urlencode({
"service": "wcs",
"version": "1.1.0",
"request": "DescribeCoverage",
"identifiers": layer.typename.encode('utf-8')
})
try:
response, body = http.request(dc_url)
doc = etree.fromstring(body)
path = ".//{wcs}Axis/{wcs}AvailableKeys/{wcs}Key".format(wcs="{http://www.opengis.net/wcs/1.1.1}")
attribute_map = [[n.text,"raster"] for n in doc.findall(path)]
except Exception:
attribute_map = []
attributes = layer.attribute_set.all()
# Delete existing attributes if they no longer exist in an updated layer
for la in attributes:
lafound = False
for field, ftype in attribute_map:
if field == la.attribute:
lafound = True
if overwrite or not lafound:
logger.debug("Going to delete [%s] for [%s]", la.attribute, layer.name.encode('utf-8'))
la.delete()
# Add new layer attributes if they don't already exist
if attribute_map is not None:
iter = len(Attribute.objects.filter(layer=layer)) + 1
for field, ftype in attribute_map:
if field is not None:
la, created = Attribute.objects.get_or_create(layer=layer, attribute=field, attribute_type=ftype)
if created:
if is_layer_attribute_aggregable(layer.storeType, field, ftype):
logger.debug("Generating layer attribute statistics")
result = get_attribute_statistics(layer.name, field)
if result is not None:
la.count = result['Count']
la.min = result['Min']
la.max = result['Max']
la.average = result['Average']
la.median = result['Median']
la.stddev = result['StandardDeviation']
la.sum = result['Sum']
la.unique_values = result['unique_values']
la.last_stats_updated = datetime.now()
la.attribute_label = field.title()
la.visible = ftype.find("gml:") != 0
la.display_order = iter
la.save()
iter += 1
logger.debug("Created [%s] attribute for [%s]", field, layer.name.encode('utf-8'))
else:
logger.debug("No attributes found")
signals.pre_save.connect(pre_save_layer, sender=Layer)
signals.pre_save.connect(geoserver_pre_save, sender=Layer)
signals.pre_delete.connect(geoserver_pre_delete, sender=Layer)
signals.post_save.connect(geoserver_post_save, sender=Layer)
signals.pre_delete.connect(pre_delete_layer, sender=Layer)
signals.post_delete.connect(post_delete_layer, sender=Layer)
signals.post_save.connect(resourcebase_post_save, sender=Layer)
signals.post_delete.connect(resourcebase_post_delete, sender=Layer)
| AnnalisaS/migration_geonode | geonode/layers/models.py | Python | gpl-3.0 | 29,589 |
# Django Imports
from django.db.models.signals import post_save
from django.dispatch import receiver
# Python Imports
from logging import getLogger
# Local Imports
from .models import Person, PersonAttribute, PersonIdentifier, PersonIdentifierAttribute, GroupIdentifier, \
GroupIdentifierAttribute
# Logging
logger = getLogger(__name__)
# Implementation
@receiver(post_save, sender=Person)
def signal_ava_organize_person_post_save(sender, created, instance, **kwargs):
logger.debug('Signal Called'
'- organize::signal_ava_organize_person_post_save')
if created:
logger.debug('Signal Fired'
' - organize::signal_ava_organize_person_post_save')
attribute = PersonAttribute.objects.create(person=instance)
attribute.save()
@receiver(post_save, sender=PersonIdentifier)
def signal_ava_organize_person_identifier_post_save(sender,created, instance, **kwargs):
logger.debug('Signal Called'
'- organize::signal_ava_organize_person_identifier_post_save')
if created:
logger.debug('Signal Fired'
' - organize::signal_ava_organize_person_identifier_post_save')
attribute = PersonIdentifierAttribute.objects.create(identifier=instance)
attribute.save()
@receiver(post_save, sender=GroupIdentifier)
def signal_ava_organize_group_identifier_post_save(sender,created, instance, **kwargs):
logger.debug('Signal Called'
'- organize::signal_ava_organize_group_identifier_post_save')
if created:
logger.debug('Signal Fired'
' - organize::signal_ava_organize_group_identifier_post_save')
attribute = GroupIdentifierAttribute.objects.create(identifier=instance)
attribute.save()
| alzeih/ava | ava_core/organize/signals.py | Python | gpl-3.0 | 1,780 |
# -*- coding: utf-8 -*-
# from django.contrib import admin
# from tribus.web.cloud.models import *
# class PaqueteAlin(admin.TabularInline):
# model = Paquete
# extra = 0
#
# class AdminMantenedor(admin.ModelAdmin):
# fieldsets = (
# ('Nombre del mantenedor', {
# 'classes': ('wide', 'extrapretty',),
# 'fields': ('nombre',)
# }),
#
# ('Correo electronico', {
# 'classes': ('wide', 'extrapretty',),
# 'fields': ('correo',)
# }),
# )
#
# inlines = [PaqueteAlin] # Descomentar para mostrar los paquetes que corresponden a cada mantenedor
#
# list_display = ('nombre', 'correo')
# list_filter = ['nombre']
# search_fields = ['nombre']
#
# class AdminPaquete(admin.ModelAdmin):
# fieldsets = (
# ('Nombre del paquete', {
# 'classes': ('wide', 'extrapretty'),
# 'fields': ('Package',)
# }),
#
# ('Mantenedor del paquete', {
# 'classes': ('wide', 'extrapretty'),
# 'fields': ('Maintainer',)
# }),
#
# ('Version', {
# 'classes': ('wide', 'extrapretty'),
# 'fields': ('Version',)
# }),
#
# ('Multi Arquitectura', {
# 'classes': ('wide', 'extrapretty'),
# 'fields': ('MultiArch',)
# }),
#
# ('Suma MD5', {
# 'classes': ('wide', 'extrapretty'),
# 'fields': ('MD5sum',)
# }),
#
# ('Etiquetas del paquete', {
# 'classes': ('wide', 'extrapretty'),
# 'fields': ('Tags',)
# }),
# )
#
# list_display = ('Package', 'Version', 'MD5sum')
# list_filter = ['Architecture', 'Priority']
# search_fields = ['Package']
#
# admin.site.register(Etiqueta)
# admin.site.register(ValorTag)
# admin.site.register(Paquete, AdminPaquete)
# admin.site.register(Mantenedor, AdminMantenedor)
| LuisAlejandro/tribus | tribus/web/cloud/admin.py | Python | gpl-3.0 | 1,893 |
import IMP
import IMP.test
import IMP.core
class Tests(IMP.test.TestCase):
"""Tests copying derivatives to and from refined"""
def _create_particle(self, m, ks):
p = IMP.Particle(m)
for k in ks:
p.add_attribute(k, 0.0)
return p
def _setup(self):
k0 = IMP.FloatKey("Key0")
k1 = IMP.FloatKey("Key1")
ks = IMP.FloatKeys()
ks.append(k0)
ks.append(k1)
m = IMP.Model()
p0 = self._create_particle(m, ks)
p1 = self._create_particle(m, ks)
p2 = self._create_particle(m, ks)
h0 = IMP.core.Hierarchy.setup_particle(p0)
h1 = IMP.core.Hierarchy.setup_particle(p1)
h2 = IMP.core.Hierarchy.setup_particle(p2)
h0.add_child(h1)
h0.add_child(h2)
r = IMP.core.ChildrenRefiner(IMP.core.Hierarchy.get_default_traits())
return h0, h1, h2, p0, p1, p2, r, ks, m
def test_to(self):
"""Test copying derivatives to refined"""
h0, h1, h2, p0, p1, p2, r, ks, m = self._setup()
da = IMP.DerivativeAccumulator()
p0.add_to_derivative(ks[0], 1, da)
p0.add_to_derivative(ks[1], 2, da)
prop = IMP.core.DerivativesToRefined(r, ks)
da = IMP.DerivativeAccumulator()
prop.apply_index(m, p0)
self.assertEqual(p1.get_derivative(ks[0]), 1)
self.assertEqual(p1.get_derivative(ks[1]), 2)
self.assertEqual(p2.get_derivative(ks[0]), 1)
self.assertEqual(p2.get_derivative(ks[1]), 2)
def test_from(self):
"""Test copying derivatives from refined"""
h0, h1, h2, p0, p1, p2, r, ks, m = self._setup()
da = IMP.DerivativeAccumulator()
p1.add_to_derivative(ks[0], 1, da)
p1.add_to_derivative(ks[1], 2, da)
p2.add_to_derivative(ks[0], 4, da)
p2.add_to_derivative(ks[1], 8, da)
prop = IMP.core.DerivativesFromRefined(r, ks)
da = IMP.DerivativeAccumulator()
prop.apply_index(m, p0)
self.assertEqual(p0.get_derivative(ks[0]), 5)
self.assertEqual(p0.get_derivative(ks[1]), 10)
if __name__ == '__main__':
IMP.test.main()
| shanot/imp | modules/core/test/test_to_from_refined.py | Python | gpl-3.0 | 2,150 |
#! /usr/bin/env python
'''Tests rendering using shader objects from core GL or extensions
Uses the:
Lighthouse 3D Tutorial toon shader
http://www.lighthouse3d.com/opengl/glsl/index.php?toon2
By way of:
http://www.pygame.org/wiki/GLSLExample
'''
import OpenGL
OpenGL.ERROR_ON_COPY = True
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
# PyOpenGL 3.0.1 introduces this convenience module...
from OpenGL.GL.shaders import *
import time, sys
program = None
# A general OpenGL initialization function. Sets all of the initial parameters.
def InitGL(Width, Height): # We call this right after our OpenGL window is created.
glClearColor(0.0, 0.0, 0.0, 0.0) # This Will Clear The Background Color To Black
glClearDepth(1.0) # Enables Clearing Of The Depth Buffer
glDepthFunc(GL_LESS) # The Type Of Depth Test To Do
glEnable(GL_DEPTH_TEST) # Enables Depth Testing
glShadeModel(GL_SMOOTH) # Enables Smooth Color Shading
glMatrixMode(GL_PROJECTION)
glLoadIdentity() # Reset The Projection Matrix
# Calculate The Aspect Ratio Of The Window
gluPerspective(45.0, float(Width)/float(Height), 0.1, 100.0)
glMatrixMode(GL_MODELVIEW)
if not glUseProgram:
print 'Missing Shader Objects!'
sys.exit(1)
global program
program = compileProgram(
compileShader('''
varying vec3 normal;
void main() {
normal = gl_NormalMatrix * gl_Normal;
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
}
''',GL_VERTEX_SHADER),
compileShader('''
varying vec3 normal;
void main() {
float intensity;
vec4 color;
vec3 n = normalize(normal);
vec3 l = normalize(gl_LightSource[0].position).xyz;
// quantize to 5 steps (0, .25, .5, .75 and 1)
intensity = (floor(dot(l, n) * 4.0) + 1.0)/4.0;
color = vec4(intensity*1.0, intensity*0.5, intensity*0.5,
intensity*1.0);
gl_FragColor = color;
}
''',GL_FRAGMENT_SHADER),)
# The function called when our window is resized (which shouldn't happen if you enable fullscreen, below)
def ReSizeGLScene(Width, Height):
if Height == 0: # Prevent A Divide By Zero If The Window Is Too Small
Height = 1
glViewport(0, 0, Width, Height) # Reset The Current Viewport And Perspective Transformation
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(45.0, float(Width)/float(Height), 0.1, 100.0)
glMatrixMode(GL_MODELVIEW)
# The main drawing function.
def DrawGLScene():
# Clear The Screen And The Depth Buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glLoadIdentity() # Reset The View
# Move Left 1.5 units and into the screen 6.0 units.
glTranslatef(-1.5, 0.0, -6.0)
if program:
glUseProgram(program)
glutSolidSphere(1.0,32,32)
glTranslate( 1,0,2 )
glutSolidCube( 1.0 )
# since this is double buffered, swap the buffers to display what just got drawn.
glutSwapBuffers()
# The function called whenever a key is pressed. Note the use of Python tuples to pass in: (key, x, y)
def keyPressed(*args):
# If escape is pressed, kill everything.
if args[0] == '\x1b':
sys.exit()
def main():
global window
# For now we just pass glutInit one empty argument. I wasn't sure what should or could be passed in (tuple, list, ...)
# Once I find out the right stuff based on reading the PyOpenGL source, I'll address this.
glutInit(sys.argv)
# Select type of Display mode:
# Double buffer
# RGBA color
# Alpha components supported
# Depth buffer
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_DEPTH)
# get a 640 x 480 window
glutInitWindowSize(640, 480)
# the window starts at the upper left corner of the screen
glutInitWindowPosition(0, 0)
# Okay, like the C version we retain the window id to use when closing, but for those of you new
# to Python (like myself), remember this assignment would make the variable local and not global
# if it weren't for the global declaration at the start of main.
window = glutCreateWindow("Jeff Molofee's GL Code Tutorial ... NeHe '99")
# Register the drawing function with glut, BUT in Python land, at least using PyOpenGL, we need to
# set the function pointer and invoke a function to actually register the callback, otherwise it
# would be very much like the C version of the code.
glutDisplayFunc(DrawGLScene)
# Uncomment this line to get full screen.
#glutFullScreen()
# When we are doing nothing, redraw the scene.
glutIdleFunc(DrawGLScene)
# Register the function called when our window is resized.
glutReshapeFunc(ReSizeGLScene)
# Register the function called when the keyboard is pressed.
glutKeyboardFunc(keyPressed)
# Initialize our window.
InitGL(640, 480)
# Start Event Processing Engine
glutMainLoop()
# Print message to console, and kick off the main to get it rolling.
if __name__ == "__main__":
print "Hit ESC key to quit."
main()
| mgood7123/UPM | Sources/PyOpenGL-Demo-3.0.1b1/PyOpenGL-Demo/GLUT/shader_test.py | Python | gpl-3.0 | 5,525 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Bruno Calogero <brunocalogero@hotmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_interface_policy_leaf_policy_group
short_description: Add Fabric Interface Policy Leaf Policy Groups on Cisco ACI fabrics.
description:
- Add Fabric Interface Policy Leaf Policy Groups on Cisco ACI fabrics.
- More information from the internal APIC class I(infra:AccBndlGrp), I(infra:AccPortGrp) at
U(https://developer.cisco.com/site/aci/docs/apis/apic-mim-ref/).
author:
- Bruno Calogero (@brunocalogero)
version_added: '2.5'
notes:
- When using the module please select the appropriate link_aggregation_type (lag_type).
C(link) for Port Channel(PC), C(node) for Virtual Port Channel(VPC) and C(leaf) for Leaf Access Port Policy Group.
options:
policy_group:
description:
- Name of the leaf policy group to be added/deleted.
aliases: [ name, policy_group_name ]
description:
description:
- Description for the leaf policy group to be created.
aliases: [ descr ]
lag_type:
description:
- Selector for the type of leaf policy group we want to create.
aliases: [ lag_type_name ]
link_level_policy:
description:
- Choice of link_level_policy to be used as part of the leaf policy group to be created.
aliases: [ link_level_policy_name ]
cdp_policy:
description:
- Choice of cdp_policy to be used as part of the leaf policy group to be created.
aliases: [ cdp_policy_name ]
mcp_policy:
description:
- Choice of mcp_policy to be used as part of the leaf policy group to be created.
aliases: [ mcp_policy_name ]
lldp_policy:
description:
- Choice of lldp_policy to be used as part of the leaf policy group to be created.
aliases: [ lldp_policy_name ]
stp_interface_policy:
description:
- Choice of stp_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ stp_interface_policy_name ]
egress_data_plane_policing_policy:
description:
- Choice of egress_data_plane_policing_policy to be used as part of the leaf policy group to be created.
aliases: [ egress_data_plane_policing_policy_name ]
ingress_data_plane_policing_policy:
description:
- Choice of ingress_data_plane_policing_policy to be used as part of the leaf policy group to be created.
aliases: [ ingress_data_plane_policing_policy_name ]
priority_flow_control_policy:
description:
- Choice of priority_flow_control_policy to be used as part of the leaf policy group to be created.
aliases: [ priority_flow_control_policy_name ]
fibre_channel_interface_policy:
description:
- Choice of fibre_channel_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ fibre_channel_interface_policy_name ]
slow_drain_policy:
description:
- Choice of slow_drain_policy to be used as part of the leaf policy group to be created.
aliases: [ slow_drain_policy_name ]
port_channel_policy:
description:
- Choice of port_channel_policy to be used as part of the leaf policy group to be created.
aliases: [ port_channel_policy_name ]
monitoring_policy:
description:
- Choice of monitoring_policy to be used as part of the leaf policy group to be created.
aliases: [ monitoring_policy_name ]
storm_control_interface_policy:
description:
- Choice of storm_control_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ storm_control_interface_policy_name ]
l2_interface_policy:
description:
- Choice of l2_interface_policy to be used as part of the leaf policy group to be created.
aliases: [ l2_interface_policy_name ]
port_security_policy:
description:
- Choice of port_security_policy to be used as part of the leaf policy group to be created.
aliases: [ port_security_policy_name ]
aep:
description:
- Choice of attached_entity_profile (AEP) to be used as part of the leaf policy group to be created.
aliases: [ aep_name ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: creating a Port Channel (PC) Interface Policy Group
aci_interface_policy_leaf_policy_group:
hostname: apic
username: yourusername
password: yourpassword
policy_group: policygroupname
description: policygroupname description
lag_type: link
link_level_policy: whateverlinklevelpolicy
fibre_channel_interface_policy: whateverfcpolicy
state: present
- name: creating a Virtual Port Channel (VPC) Interface Policy Group (no description)
aci_interface_policy_leaf_policy_group:
hostname: apic
username: yourusername
password: yourpassword
policy_group: policygroupname
lag_type: node
link_level_policy: whateverlinklevelpolicy
fibre_channel_interface_policy: whateverfcpolicy
state: present
- name: creating a Leaf Access Port Policy Group (no description)
aci_interface_policy_leaf_policy_group:
hostname: apic
username: yourusername
password: yourpassword
policy_group: policygroupname
lag_type: leaf
link_level_policy: whateverlinklevelpolicy
fibre_channel_interface_policy: whateverfcpolicy
state: present
- name: deleting an Interface policy Leaf Policy Group
aci_interface_policy_leaf_policy_group:
hostname: apic
username: yourusername
password: yourpassword
policy_group: policygroupname
lag_type: type_name
state: absent
'''
RETURN = ''' # '''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update({
'policy_group': dict(type='str', aliases=['name', 'policy_group_name']),
'description': dict(type='str', aliases=['descr']),
# NOTE: Since this module needs to include both infra:AccBndlGrp (for PC andVPC) and infra:AccPortGrp (for leaf access port policy group):
# NOTE: I'll allow the user to make the choice here (link(PC), node(VPC), leaf(leaf-access port policy group))
'lag_type': dict(type='str', aliases=['lag_type_name']),
'link_level_policy': dict(type='str', aliases=['link_level_policy_name']),
'cdp_policy': dict(type='str', aliases=['cdp_policy_name']),
'mcp_policy': dict(type='str', aliases=['mcp_policy_name']),
'lldp_policy': dict(type='str', aliases=['lldp_policy_name']),
'stp_interface_policy': dict(type='str', aliases=['stp_interface_policy_name']),
'egress_data_plane_policing_policy': dict(type='str', aliases=['egress_data_plane_policing_policy_name']),
'ingress_data_plane_policing_policy': dict(type='str', aliases=['ingress_data_plane_policing_policy_name']),
'priority_flow_control_policy': dict(type='str', aliases=['priority_flow_control_policy_name']),
'fibre_channel_interface_policy': dict(type='str', aliases=['fibre_channel_interface_policy_name']),
'slow_drain_policy': dict(type='str', aliases=['slow_drain_policy_name']),
'port_channel_policy': dict(type='str', aliases=['port_channel_policy_name']),
'monitoring_policy': dict(type='str', aliases=['monitoring_policy_name']),
'storm_control_interface_policy': dict(type='str', aliases=['storm_control_interface_policy_name']),
'l2_interface_policy': dict(type='str', aliases=['l2_interface_policy_name']),
'port_security_policy': dict(type='str', aliases=['port_security_policy_name']),
'aep': dict(type='str', aliases=['aep_name']),
'state': dict(type='str', default='present', choices=['absent', 'present', 'query'])
})
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['policy_group', 'lag_type']],
['state', 'present', ['policy_group', 'lag_type']]
]
)
policy_group = module.params['policy_group']
description = module.params['description']
lag_type = module.params['lag_type']
link_level_policy = module.params['link_level_policy']
cdp_policy = module.params['cdp_policy']
mcp_policy = module.params['mcp_policy']
lldp_policy = module.params['lldp_policy']
stp_interface_policy = module.params['stp_interface_policy']
egress_data_plane_policing_policy = module.params['egress_data_plane_policing_policy']
ingress_data_plane_policing_policy = module.params['ingress_data_plane_policing_policy']
priority_flow_control_policy = module.params['priority_flow_control_policy']
fibre_channel_interface_policy = module.params['fibre_channel_interface_policy']
slow_drain_policy = module.params['slow_drain_policy']
port_channel_policy = module.params['port_channel_policy']
monitoring_policy = module.params['monitoring_policy']
storm_control_interface_policy = module.params['storm_control_interface_policy']
l2_interface_policy = module.params['l2_interface_policy']
port_security_policy = module.params['port_security_policy']
aep = module.params['aep']
state = module.params['state']
aci_class_name = ''
dn_name = ''
class_config_dict = {}
if lag_type == 'leaf':
aci_class_name = 'infraAccPortGrp'
dn_name = 'accportgrp'
class_config_dict = dict(
name=policy_group,
descr=description,
dn='uni/infra/funcprof/{0}-{1}'.format(dn_name, policy_group)
)
elif lag_type == 'link' or lag_type == 'node':
aci_class_name = 'infraAccBndlGrp'
dn_name = 'accbundle'
class_config_dict = dict(
name=policy_group,
descr=description,
lagT=lag_type,
dn='uni/infra/funcprof/{0}-{1}'.format(dn_name, policy_group)
)
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class=aci_class_name,
aci_rn='infra/funcprof/{0}-{1}'.format(dn_name, policy_group),
filter_target='eq({0}.name, "{1}")'.format(aci_class_name, policy_group),
module_object=policy_group
),
child_classes=[
'infraRsMonIfInfraPol', 'infraRsLldpIfPol', 'infraRsFcIfPol',
'infraRsLacpPol', 'infraRsL2PortSecurityPol', 'infraRsHIfPol',
'infraRsQosPfcIfPol', 'infraRsStpIfPol', 'infraRsQosIngressDppIfPol',
'infraRsStormctrlIfPol', 'infraRsQosEgressDppIfPol', 'infraRsQosSdIfPol',
'infraRsAttEntP', 'infraRsMcpIfPol', 'infraRsCdpIfPol', 'infraRsL2IfPol'
]
)
aci.get_existing()
if state == 'present':
# Filter out module params with null values
aci.payload(
aci_class=aci_class_name,
class_config=class_config_dict,
child_configs=[
dict(
infraRsMonIfInfraPol=dict(
attributes=dict(
tnMonInfraPolName=monitoring_policy
)
)
),
dict(
infraRsLldpIfPol=dict(
attributes=dict(
tnLldpIfPolName=lldp_policy
)
)
),
dict(
infraRsFcIfPol=dict(
attributes=dict(
tnFcIfPolName=fibre_channel_interface_policy
)
)
),
dict(
infraRsLacpPol=dict(
attributes=dict(
tnLacpLagPolName=port_channel_policy
)
)
),
dict(
infraRsL2PortSecurityPol=dict(
attributes=dict(
tnL2PortSecurityPolName=port_security_policy
)
)
),
dict(
infraRsHIfPol=dict(
attributes=dict(
tnFabricHIfPolName=link_level_policy
)
)
),
dict(
infraRsQosPfcIfPol=dict(
attributes=dict(
tnQosPfcIfPolName=priority_flow_control_policy
)
)
),
dict(
infraRsStpIfPol=dict(
attributes=dict(
tnStpIfPolName=stp_interface_policy
)
)
),
dict(
infraRsQosIngressDppIfPol=dict(
attributes=dict(
tnQosDppPolName=ingress_data_plane_policing_policy
)
)
),
dict(
infraRsStormctrlIfPol=dict(
attributes=dict(
tnStormctrlIfPolName=storm_control_interface_policy
)
)
),
dict(
infraRsQosEgressDppIfPol=dict(
attributes=dict(
tnQosDppPolName=egress_data_plane_policing_policy
)
)
),
dict(
infraRsQosSdIfPol=dict(
attributes=dict(
tnQosSdIfPolName=slow_drain_policy
)
)
),
dict(
infraRsMcpIfPol=dict(
attributes=dict(
tnMcpIfPolName=mcp_policy
)
)
),
dict(
infraRsCdpIfPol=dict(
attributes=dict(
tnCdpIfPolName=cdp_policy
)
)
),
dict(
infraRsL2IfPol=dict(
attributes=dict(
tnL2IfPolName=l2_interface_policy
)
)
),
dict(
infraRsAttEntP=dict(
attributes=dict(
tDn='uni/infra/attentp-{0}'.format(aep)
)
)
)
],
)
# Generate config diff which will be used as POST request body
aci.get_diff(aci_class=aci_class_name)
# Submit changes if module not in check_mode and the proposed is different than existing
aci.post_config()
elif state == 'absent':
aci.delete_config()
module.exit_json(**aci.result)
if __name__ == "__main__":
main()
| tima/ansible | lib/ansible/modules/network/aci/aci_interface_policy_leaf_policy_group.py | Python | gpl-3.0 | 15,589 |
from typing import List
class NamedList:
"""A contaner class for storing a list of named integers."""
def __init__(self, names: List[str], values: List[int]) -> None:
self._names = names
self._values = values
def __getitem__(self, name: str) -> int:
idx = self._names.index(name)
return self._values[idx]
def __contains__(self, name: str) -> bool:
return name in self._names
named_list = NamedList(['a', 'b', 'c'], [1, 2, 3])
print('c' in named_list) # Prints True
del named_list['c'] # Error on this line
print('c' in named_list)
| pyta-uoft/pyta | examples/pylint/e1138_unsupported_delete_operation.py | Python | gpl-3.0 | 595 |
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import os
from PyQt5.Qt import Qt, QVBoxLayout, QFormLayout
from calibre.gui2.preferences import ConfigWidgetBase, test_widget, \
CommaSeparatedList, AbortCommit
from calibre.gui2.preferences.adding_ui import Ui_Form
from calibre.utils.config import prefs
from calibre.gui2.widgets import FilenamePattern
from calibre.gui2.auto_add import AUTO_ADDED
from calibre.gui2 import gprefs, choose_dir, error_dialog, question_dialog
class ConfigWidget(ConfigWidgetBase, Ui_Form):
def genesis(self, gui):
self.gui = gui
r = self.register
r('read_file_metadata', prefs)
r('swap_author_names', prefs)
r('add_formats_to_existing', prefs)
r('check_for_dupes_on_ctl', prefs)
r('preserve_date_on_ctl', gprefs)
r('manual_add_auto_convert', gprefs)
choices = [
(_('Ignore duplicate incoming formats'), 'ignore'),
(_('Overwrite existing duplicate formats'), 'overwrite'),
(_('Create new record for each duplicate format'), 'new record')]
r('automerge', gprefs, choices=choices)
r('new_book_tags', prefs, setting=CommaSeparatedList)
r('mark_new_books', prefs)
r('auto_add_path', gprefs, restart_required=True)
r('auto_add_everything', gprefs, restart_required=True)
r('auto_add_check_for_duplicates', gprefs)
r('auto_add_auto_convert', gprefs)
r('auto_convert_same_fmt', gprefs)
self.filename_pattern = FilenamePattern(self)
self.metadata_box.l = QVBoxLayout(self.metadata_box)
self.metadata_box.layout().insertWidget(0, self.filename_pattern)
self.filename_pattern.changed_signal.connect(self.changed_signal.emit)
self.auto_add_browse_button.clicked.connect(self.choose_aa_path)
for signal in ('Activated', 'Changed', 'DoubleClicked', 'Clicked'):
signal = getattr(self.opt_blocked_auto_formats, 'item'+signal)
signal.connect(self.blocked_auto_formats_changed)
self.tag_map_rules = self.add_filter_rules = None
self.tag_map_rules_button.clicked.connect(self.change_tag_map_rules)
self.add_filter_rules_button.clicked.connect(self.change_add_filter_rules)
self.tabWidget.setCurrentIndex(0)
self.actions_tab.layout().setFieldGrowthPolicy(QFormLayout.AllNonFixedFieldsGrow)
def change_tag_map_rules(self):
from calibre.gui2.tag_mapper import RulesDialog
d = RulesDialog(self)
if gprefs.get('tag_map_on_add_rules'):
d.rules = gprefs['tag_map_on_add_rules']
if d.exec_() == d.Accepted:
self.tag_map_rules = d.rules
self.changed_signal.emit()
def change_add_filter_rules(self):
from calibre.gui2.add_filters import RulesDialog
d = RulesDialog(self)
if gprefs.get('add_filter_rules'):
d.rules = gprefs['add_filter_rules']
if d.exec_() == d.Accepted:
self.add_filter_rules = d.rules
self.changed_signal.emit()
def choose_aa_path(self):
path = choose_dir(self, 'auto add path choose',
_('Choose a folder'))
if path:
self.opt_auto_add_path.setText(path)
def initialize(self):
ConfigWidgetBase.initialize(self)
self.filename_pattern.blockSignals(True)
self.filename_pattern.initialize()
self.filename_pattern.blockSignals(False)
self.init_blocked_auto_formats()
self.opt_automerge.setEnabled(self.opt_add_formats_to_existing.isChecked())
self.tag_map_rules = self.add_filter_rules = None
# Blocked auto formats {{{
def blocked_auto_formats_changed(self, *args):
fmts = self.current_blocked_auto_formats
old = gprefs['blocked_auto_formats']
if set(fmts) != set(old):
self.changed_signal.emit()
def init_blocked_auto_formats(self, defaults=False):
if defaults:
fmts = gprefs.defaults['blocked_auto_formats']
else:
fmts = gprefs['blocked_auto_formats']
viewer = self.opt_blocked_auto_formats
viewer.blockSignals(True)
exts = set(AUTO_ADDED)
viewer.clear()
for ext in sorted(exts):
viewer.addItem(ext)
item = viewer.item(viewer.count()-1)
item.setFlags(Qt.ItemIsEnabled|Qt.ItemIsUserCheckable)
item.setCheckState(Qt.Checked if
ext in fmts else Qt.Unchecked)
viewer.blockSignals(False)
@property
def current_blocked_auto_formats(self):
fmts = []
viewer = self.opt_blocked_auto_formats
for i in range(viewer.count()):
if viewer.item(i).checkState() == Qt.Checked:
fmts.append(unicode(viewer.item(i).text()))
return fmts
# }}}
def restore_defaults(self):
ConfigWidgetBase.restore_defaults(self)
self.filename_pattern.initialize(defaults=True)
self.init_blocked_auto_formats(defaults=True)
self.tag_map_rules = []
self.add_filter_rules = []
def commit(self):
path = unicode(self.opt_auto_add_path.text()).strip()
if path != gprefs['auto_add_path']:
if path:
path = os.path.abspath(path)
self.opt_auto_add_path.setText(path)
if not os.path.isdir(path):
error_dialog(self, _('Invalid folder'),
_('You must specify an existing folder as your '
'auto-add folder. %s does not exist.')%path,
show=True)
raise AbortCommit('invalid auto-add folder')
if not os.access(path, os.R_OK|os.W_OK):
error_dialog(self, _('Invalid folder'),
_('You do not have read/write permissions for '
'the folder: %s')%path, show=True)
raise AbortCommit('invalid auto-add folder')
if os.path.basename(path)[0] in '._':
error_dialog(self, _('Invalid folder'),
_('Cannot use folders whose names start with a '
'period or underscore: %s')%os.path.basename(path), show=True)
raise AbortCommit('invalid auto-add folder')
if not question_dialog(self, _('Are you sure?'),
_('<b>WARNING:</b> Any files you place in %s will be '
'automatically deleted after being added to '
'calibre. Are you sure?')%path):
return
pattern = self.filename_pattern.commit()
prefs['filename_pattern'] = pattern
fmts = self.current_blocked_auto_formats
old = gprefs['blocked_auto_formats']
changed = set(fmts) != set(old)
if changed:
gprefs['blocked_auto_formats'] = self.current_blocked_auto_formats
if self.tag_map_rules is not None:
if self.tag_map_rules:
gprefs['tag_map_on_add_rules'] = self.tag_map_rules
else:
gprefs.pop('tag_map_on_add_rules', None)
if self.add_filter_rules is not None:
if self.add_filter_rules:
gprefs['add_filter_rules'] = self.add_filter_rules
else:
gprefs.pop('add_filter_rules', None)
ret = ConfigWidgetBase.commit(self)
return changed or ret
def refresh_gui(self, gui):
# Ensure worker process reads updated settings
gui.spare_pool().shutdown()
# Update rules used int he auto adder
gui.auto_adder.read_rules()
if __name__ == '__main__':
from calibre.gui2 import Application
app = Application([])
test_widget('Import/Export', 'Adding')
| jelly/calibre | src/calibre/gui2/preferences/adding.py | Python | gpl-3.0 | 8,073 |
"""Unit tests for stats.cache module."""
# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
from nose.tools import raises
import numpy as np
from klustaviewa.stats.indexed_matrix import IndexedMatrix, CacheMatrix
# -----------------------------------------------------------------------------
# Indexed matrix tests
# -----------------------------------------------------------------------------
def test_indexed_matrix_0():
indices = [2, 3, 5, 7]
matrix = IndexedMatrix(indices=indices)
assert np.array_equal(matrix.to_absolute([0, 3, 1]), [2, 7, 3])
assert np.array_equal(matrix.to_absolute(2), 5)
assert np.array_equal(matrix.to_relative([2, 7, 3]), [0, 3, 1])
assert np.array_equal(matrix.to_relative(5), 2)
@raises(IndexError)
def test_indexed_matrix_1():
indices = [2, 3, 5, 7]
matrix = IndexedMatrix(indices=indices)
# This should raise an IndexError.
matrix[0, 0]
def test_indexed_matrix_2():
indices = [2, 3, 5, 7]
matrix = IndexedMatrix(indices=indices)
assert matrix[2, 2] == 0.
assert np.array_equal(matrix[:, 2], np.zeros(4))
assert np.array_equal(matrix[7, :], np.zeros(4))
assert np.array_equal(matrix[[2, 5], :], np.zeros((2, 4)))
assert np.array_equal(matrix[[2, 5, 3], [2]], np.zeros((3, 1)))
assert np.array_equal(matrix[[2, 5, 3], 2], np.zeros(3))
assert np.array_equal(matrix[[2], [2, 5, 3]], np.zeros((1, 3)))
assert np.array_equal(matrix[2, [2, 5, 3]], np.zeros(3))
assert np.array_equal(matrix[[5, 7], [3, 2]], np.zeros((2, 2)))
def test_indexed_matrix_3():
indices = [2, 3, 5, 7]
matrix = IndexedMatrix(indices=indices)
matrix.add_indices(4)
assert matrix.shape == (5, 5)
assert np.array_equal(matrix.indices, [2, 3, 4, 5, 7])
def test_indexed_matrix_4():
indices = [2, 3, 5, 7]
matrix = IndexedMatrix(indices=indices)
matrix.add_indices(7)
assert np.array_equal(matrix.indices, indices)
def test_indexed_matrix_5():
indices = [2, 3, 5, 7]
matrix = IndexedMatrix(indices=indices)
matrix.add_indices([6, 10])
assert matrix.shape == (6, 6)
assert np.array_equal(matrix.indices, [2, 3, 5, 6, 7, 10])
matrix.remove_indices(7)
assert matrix.shape == (5, 5)
assert np.array_equal(matrix.indices, [2, 3, 5, 6, 10])
@raises(IndexError)
def test_indexed_matrix_6():
indices = [2, 3, 5, 7]
matrix = IndexedMatrix(indices=indices)
matrix.add_indices([6, 10])
# One of the indices does not exist, so this raises an Exception.
matrix.remove_indices([5, 6, 9])
def test_indexed_matrix_7():
indices = [2, 3, 5, 7]
matrix = IndexedMatrix(indices=indices)
matrix[2, 3] = 10
assert np.all(matrix[2, 3] == 10)
matrix[5, :] = 20
assert np.all(matrix[5, :] == 20)
matrix[:, 7] = 30
assert np.all(matrix[:, 7] == 30)
matrix[[2, 3], 5] = 40
assert np.all(matrix[[2, 3], 5] == 40)
matrix[[2, 3], [5, 7]] = 50
assert np.all(matrix[[2, 3], [5, 7]] == 50)
def test_indexed_matrix_8():
indices = [2, 3, 5, 7]
matrix = IndexedMatrix(indices=indices, shape=(4, 4, 10))
x = np.random.rand(10)
matrix[7, 7] = x
assert np.array_equal(matrix[7, 7], x)
assert np.array_equal(matrix[7, :][-1, :], x)
assert np.array_equal(matrix[[2, 7], 7][-1, :], x)
assert np.array_equal(matrix[[2, 5, 3], [2]], np.zeros((3, 1, 10)))
assert np.array_equal(matrix[[2, 5, 3], 2], np.zeros((3, 10)))
assert np.array_equal(matrix[[2], [2, 5, 3]], np.zeros((1, 3, 10)))
assert np.array_equal(matrix[2, [2, 5, 3]], np.zeros((3, 10)))
assert np.array_equal(matrix[[5, 7], [3, 2]], np.zeros((2, 2, 10)))
matrix.remove_indices(5)
assert matrix.to_array().shape == (3, 3, 10)
assert np.array_equal(matrix[7, 7], x)
def test_indexed_matrix_9():
matrix = IndexedMatrix()
indices = [10, 20]
matrix.add_indices(10)
assert np.array_equal(matrix.not_in_indices(indices), [20])
matrix[10, 10] = 1
assert np.array_equal(matrix.not_in_indices(indices), [20])
matrix.add_indices(20)
assert np.array_equal(matrix.not_in_indices(indices), [])
matrix[20, :] = 0
matrix[:, 20] = 0
assert np.array_equal(matrix.not_in_indices(indices), [])
def test_indexed_matrix_10():
indices = [2, 3, 5, 7]
matrix = IndexedMatrix(indices=indices, shape=(4, 4, 10))
matrix[3, 7] = np.ones(10)
matrix[2, 5] = 2 * np.ones(10)
submatrix = matrix.submatrix([3,7])
assert submatrix.shape == (2, 2, 10)
assert np.array_equal(submatrix.to_array()[0, 1, ...], np.ones(10))
submatrix = matrix.submatrix([2,5])
assert submatrix.shape == (2, 2, 10)
assert np.array_equal(submatrix.to_array()[0, 1, ...], 2 * np.ones(10))
# -----------------------------------------------------------------------------
# Cache matrix tests
# -----------------------------------------------------------------------------
def test_cache_matrix_1():
indices = [2, 3, 5, 7]
matrix = CacheMatrix(shape=(0, 0, 10))
assert np.array_equal(matrix.not_in_indices(indices), indices)
d = {(i, j): i + j for i in indices for j in indices}
matrix.update(indices, d)
matrix_actual = (np.array(indices).reshape((-1, 1)) +
np.array(indices).reshape((1, -1)))
assert np.array_equal(matrix.to_array()[:, :, 0], matrix_actual)
assert np.array_equal(matrix.not_in_indices(indices), [])
def test_cache_matrix_2():
indices = [2, 3, 5, 7]
matrix = CacheMatrix(shape=(0, 0, 10))
d = {(i, j): i + j for i in indices for j in indices}
matrix.update(indices, d)
assert np.array_equal(matrix.not_in_indices(indices), [])
matrix.invalidate([2, 5])
assert np.array_equal(matrix.not_in_indices(indices), [2, 5])
def test_cache_matrix_2():
indices = [2, 3, 5, 7]
matrix = CacheMatrix()
assert np.array_equal(matrix.not_in_key_indices(indices), indices)
matrix.update(2, {(2, 2): 0, (2, 3): 0, (3, 2): 0})
assert np.array_equal(matrix.not_in_key_indices(indices), [3, 5, 7])
matrix.update([2, 3], {(2, 2): 0, (2, 3): 0, (3, 2): 0, (3, 3): 0})
assert np.array_equal(matrix.not_in_key_indices(indices), [5, 7])
matrix.invalidate([2, 5])
assert np.array_equal(matrix.not_in_key_indices(indices), [2, 5, 7])
d = {(i, j): i + j for i in indices for j in indices}
matrix.update(indices, d)
assert np.array_equal(matrix.not_in_key_indices(indices), [])
| DavidTingley/ephys-processing-pipeline | installation/klustaviewa-0.3.0/klustaviewa/stats/tests/test_indexed_matrix.py | Python | gpl-3.0 | 7,015 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# filename: tops_sql.py
# Copyright 2008-2010 Stefano Costa <steko@iosa.it>
#
# This file is part of Total Open Station.
#
# Total Open Station is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Total Open Station is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Total Open Station. If not, see
# <http://www.gnu.org/licenses/>.
def to_sql(point, tablename):
'''Generate SQL line corresponding to the input point.
At this moment the column names are fixed, but they could change in the
future. The default names are reasonable.'''
params = {
'wkt': to_wkt(point),
'tablename': tablename,
'pid': point[0],
'text': point[4]}
sql_string = "INSERT INTO %(tablename)s" % params
sql_string += "(point_id, point_geom, point_text) VALUES"
sql_string += "(%(pid)s,GeomFromText('%(wkt)s'),'%(text)s');\n" % params
return sql_string
def to_wkt(point):
pid, x, y, z, text = point
wkt_representation = 'POINT(%s %s)' % (x, y)
return wkt_representation
class OutputFormat:
"""
Exports points data in SQL format suitable for use with PostGIS & friends.
http://postgis.refractions.net/documentation/manual-1.3/ch04.html#id2986280
has an example of loading an SQL file into a PostgreSQL database.
``data`` should be an iterable (e.g. list) containing one iterable (e.g.
tuple) for each point. The default order is PID, x, x, z, TEXT.
This is consistent with our current standard.
"""
def __init__(self, data, tablename='topsdata'):
self.data = data
self.tablename = tablename
def process(self):
lines = [to_sql(e, self.tablename) for e in self.data]
lines.insert(0, 'BEGIN;\n')
lines.append('COMMIT;\n')
output = "".join(lines)
return output
if __name__ == "__main__":
TotalOpenSQL(
[(1, 2, 3, 4, 'qwerty'),
("2.3", 42, 45, 12, 'asdfg')],
'prova')
| BackupTheBerlios/tops | totalopenstation/output/tops_sql.py | Python | gpl-3.0 | 2,442 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Module containing the two gram OPF model implementation. """
import collections
import itertools
import logging
from nupic import encoders
from nupic.data import fieldmeta
from nupic.frameworks.opf import model
from nupic.frameworks.opf import opfutils
from opfutils import InferenceType
class TwoGramModel(model.Model):
"""Two-gram benchmark model."""
def __init__(self, inferenceType=InferenceType.TemporalNextStep,
encoderParams=()):
""" Two-gram model constructor.
inferenceType: An opfutils.InferenceType value that specifies what type of
inference (i.e. TemporalNextStep, Classification, etc.)
encoders: Sequence of encoder params dictionaries.
"""
super(TwoGramModel, self).__init__(inferenceType)
self._logger = opfutils.initLogger(self)
self._reset = False
self._hashToValueDict = dict()
self._learningEnabled = True
self._encoder = encoders.MultiEncoder(encoderParams)
self._fieldNames = self._encoder.getScalarNames()
self._prevValues = [None] * len(self._fieldNames)
self._twoGramDicts = [dict() for _ in xrange(len(self._fieldNames))]
def run(self, inputRecord):
"""Run one iteration of this model.
Args:
inputRecord: A record object formatted according to
nupic.data.FileSource.getNext() result format.
Returns:
A ModelResult named tuple (see opfutils.py). The contents of
ModelResult.inferences depends on the specific inference type of this
model, which can be queried by getInferenceType().
TODO: Implement getInferenceType()?
"""
results = super(TwoGramModel, self).run(inputRecord)
# Set up the lists of values, defaults, and encoded values.
values = [inputRecord[k] for k in self._fieldNames]
defaults = ['' if type(v) == str else 0 for v in values]
inputFieldEncodings = self._encoder.encodeEachField(inputRecord)
inputBuckets = self._encoder.getBucketIndices(inputRecord)
results.sensorInput = opfutils.SensorInput(
dataRow=values, dataEncodings=inputFieldEncodings,
sequenceReset=int(self._reset))
# Keep track of the last value associated with each encoded value for that
# predictions can be returned in the original value format.
for value, bucket in itertools.izip(values, inputBuckets):
self._hashToValueDict[bucket] = value
# Update the two-gram dict if learning is enabled.
for bucket, prevValue, twoGramDict in itertools.izip(
inputBuckets, self._prevValues, self._twoGramDicts):
if self._learningEnabled and not self._reset:
if prevValue not in twoGramDict:
twoGramDict[prevValue] = collections.defaultdict(int)
twoGramDict[prevValue][bucket] += 1
# Populate the results.inferences dict with the predictions and encoded
# predictions.
predictions = []
encodedPredictions = []
for bucket, twoGramDict, default, fieldName in (
itertools.izip(inputBuckets, self._twoGramDicts, defaults,
self._fieldNames)):
if bucket in twoGramDict:
probabilities = twoGramDict[bucket].items()
prediction = self._hashToValueDict[
max(probabilities, key=lambda x: x[1])[0]]
predictions.append(prediction)
encodedPredictions.append(self._encoder.encodeField(fieldName,
prediction))
else:
predictions.append(default)
encodedPredictions.append(self._encoder.encodeField(fieldName,
default))
results.inferences = dict()
results.inferences[opfutils.InferenceElement.prediction] = predictions
results.inferences[opfutils.InferenceElement.encodings] = encodedPredictions
self._prevValues = inputBuckets
self._reset = False
return results
def finishLearning(self):
"""Places the model in a permanent "finished learning" mode.
Once called, the model will not be able to learn from subsequent input
records. Learning may not be resumed on a given instance of the model once
this is called as the implementation may optimize itself by pruning data
structures that are necessary for learning.
"""
self._learningEnabled = False
def setFieldStatistics(self,fieldStats):
"""
This method is used for the data source to communicate to the
model any statistics that it knows about the fields
Since the two-gram has no use for this information, this is a no-op
"""
pass
def getFieldInfo(self):
"""Returns the metadata specifying the format of the model's output.
The result may be different than the list of
nupic.data.fieldmeta.FieldMetaInfo objects supplied at initialization due
to the transcoding of some input fields into meta- fields, such as
datetime -> dayOfWeek, timeOfDay, etc.
"""
fieldTypes = self._encoder.getDecoderOutputFieldTypes()
assert len(self._fieldNames) == len(fieldTypes)
return tuple(fieldmeta.FieldMetaInfo(*args) for args in
itertools.izip(
self._fieldNames, fieldTypes,
itertools.repeat(fieldmeta.FieldMetaSpecial.none)))
def getRuntimeStats(self):
"""Get the runtime statistics specific to the model.
I.E. activeCellOverlapAvg
Returns:
A dict mapping statistic names to values.
"""
# TODO: Add debugging stats.
return dict()
def _getLogger(self):
"""Get the logger created by this subclass.
Returns:
A logging.Logger object. Should not be None.
"""
return self._logger
def resetSequenceStates(self):
"""Called to indicate the start of a new sequence.
The next call to run should not perform learning.
"""
self._reset = True
def __getstate__(self):
del self._logger
return self.__dict__
def __setstate__(self):
self._logger = opfutils.initLogger(self)
| tkaitchuck/nupic | py/nupic/frameworks/opf/two_gram_model.py | Python | gpl-3.0 | 6,948 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2017 Joshua Charles Campbell
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from __future__ import print_function, division
from six import string_types
from runpy import run_path
from inspect import isclass, getmembers, isroutine
import logging
logger = logging.getLogger(__name__)
error = logger.error
warn = logger.warn
info = logger.info
debug = logger.debug
class Config(object):
def __init__(self, file_path):
self._config = run_path(file_path)
def __getattr__(self, attr):
return self._config[attr]
def restify_class(self, o):
if isclass(o):
d = {}
for k, v in getmembers(o):
if '__' not in k:
d[k] = self.restify_class(v)
return d
else:
assert (isinstance(o, dict),
isinstance(o, float),
isinstance(o, list),
isinstance(o, int),
isinstance(o, string_types)
), o
return o
def restify(self):
d = {}
for k, v in self._config.items():
if '__' not in k:
x = self.restify_class(v)
d[k] = x
return d
| orezpraw/partycrasher | partycrasher/config_loader.py | Python | gpl-3.0 | 1,971 |
#!/usr/bin/env python
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
import os
import re
from mozharness.mozilla.testing.errors import TinderBoxPrintRe
from mozharness.base.log import OutputParser, WARNING, INFO, CRITICAL
from mozharness.mozilla.buildbot import TBPL_WARNING, TBPL_FAILURE, TBPL_RETRY
from mozharness.mozilla.buildbot import TBPL_SUCCESS, TBPL_WORST_LEVEL_TUPLE
SUITE_CATEGORIES = ['mochitest', 'reftest', 'xpcshell']
def tbox_print_summary(pass_count, fail_count, known_fail_count=None,
crashed=False, leaked=False):
emphasize_fail_text = '<em class="testfail">%s</em>'
if pass_count < 0 or fail_count < 0 or \
(known_fail_count is not None and known_fail_count < 0):
summary = emphasize_fail_text % 'T-FAIL'
elif pass_count == 0 and fail_count == 0 and \
(known_fail_count == 0 or known_fail_count is None):
summary = emphasize_fail_text % 'T-FAIL'
else:
str_fail_count = str(fail_count)
if fail_count > 0:
str_fail_count = emphasize_fail_text % str_fail_count
summary = "%d/%s" % (pass_count, str_fail_count)
if known_fail_count is not None:
summary += "/%d" % known_fail_count
# Format the crash status.
if crashed:
summary += " %s" % emphasize_fail_text % "CRASH"
# Format the leak status.
if leaked is not False:
summary += " %s" % emphasize_fail_text % (
(leaked and "LEAK") or "L-FAIL")
return summary
class TestSummaryOutputParserHelper(OutputParser):
def __init__(self, regex=re.compile(r'(passed|failed|todo): (\d+)'), **kwargs):
self.regex = regex
self.failed = 0
self.passed = 0
self.todo = 0
self.last_line = None
super(TestSummaryOutputParserHelper, self).__init__(**kwargs)
def parse_single_line(self, line):
super(TestSummaryOutputParserHelper, self).parse_single_line(line)
self.last_line = line
m = self.regex.search(line)
if m:
try:
setattr(self, m.group(1), int(m.group(2)))
except ValueError:
# ignore bad values
pass
def evaluate_parser(self):
# generate the TinderboxPrint line for TBPL
emphasize_fail_text = '<em class="testfail">%s</em>'
failed = "0"
if self.passed == 0 and self.failed == 0:
self.tsummary = emphasize_fail_text % "T-FAIL"
else:
if self.failed > 0:
failed = emphasize_fail_text % str(self.failed)
self.tsummary = "%d/%s/%d" % (self.passed, failed, self.todo)
def print_summary(self, suite_name):
self.evaluate_parser()
self.info("TinderboxPrint: %s: %s\n" % (suite_name, self.tsummary))
class DesktopUnittestOutputParser(OutputParser):
"""
A class that extends OutputParser such that it can parse the number of
passed/failed/todo tests from the output.
"""
def __init__(self, suite_category, **kwargs):
# worst_log_level defined already in DesktopUnittestOutputParser
# but is here to make pylint happy
self.worst_log_level = INFO
super(DesktopUnittestOutputParser, self).__init__(**kwargs)
self.summary_suite_re = TinderBoxPrintRe.get('%s_summary' % suite_category, {})
self.harness_error_re = TinderBoxPrintRe['harness_error']['minimum_regex']
self.full_harness_error_re = TinderBoxPrintRe['harness_error']['full_regex']
self.harness_retry_re = TinderBoxPrintRe['harness_error']['retry_regex']
self.fail_count = -1
self.pass_count = -1
# known_fail_count does not exist for some suites
self.known_fail_count = self.summary_suite_re.get('known_fail_group') and -1
self.crashed, self.leaked = False, False
self.tbpl_status = TBPL_SUCCESS
def parse_single_line(self, line):
if self.summary_suite_re:
summary_m = self.summary_suite_re['regex'].match(line) # pass/fail/todo
if summary_m:
message = ' %s' % line
log_level = INFO
# remove all the none values in groups() so this will work
# with all suites including mochitest browser-chrome
summary_match_list = [group for group in summary_m.groups()
if group is not None]
r = summary_match_list[0]
if self.summary_suite_re['pass_group'] in r:
if len(summary_match_list) > 1:
self.pass_count = int(summary_match_list[-1])
else:
# This handles suites that either pass or report
# number of failures. We need to set both
# pass and fail count in the pass case.
self.pass_count = 1
self.fail_count = 0
elif self.summary_suite_re['fail_group'] in r:
self.fail_count = int(summary_match_list[-1])
if self.fail_count > 0:
message += '\n One or more unittests failed.'
log_level = WARNING
# If self.summary_suite_re['known_fail_group'] == None,
# then r should not match it, # so this test is fine as is.
elif self.summary_suite_re['known_fail_group'] in r:
self.known_fail_count = int(summary_match_list[-1])
self.log(message, log_level)
return # skip harness check and base parse_single_line
harness_match = self.harness_error_re.match(line)
if harness_match:
self.warning(' %s' % line)
self.worst_log_level = self.worst_level(WARNING, self.worst_log_level)
self.tbpl_status = self.worst_level(TBPL_WARNING, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
full_harness_match = self.full_harness_error_re.match(line)
if full_harness_match:
r = full_harness_match.group(1)
if r == "application crashed":
self.crashed = True
elif r == "missing output line for total leaks!":
self.leaked = None
else:
self.leaked = True
return # skip base parse_single_line
if self.harness_retry_re.search(line):
self.critical(' %s' % line)
self.worst_log_level = self.worst_level(CRITICAL, self.worst_log_level)
self.tbpl_status = self.worst_level(TBPL_RETRY, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
return # skip base parse_single_line
super(DesktopUnittestOutputParser, self).parse_single_line(line)
def evaluate_parser(self, return_code, success_codes=None):
success_codes = success_codes or [0]
if self.num_errors: # mozharness ran into a script error
self.tbpl_status = self.worst_level(TBPL_FAILURE, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
# I have to put this outside of parse_single_line because this checks not
# only if fail_count was more then 0 but also if fail_count is still -1
# (no fail summary line was found)
if self.fail_count != 0:
self.worst_log_level = self.worst_level(WARNING, self.worst_log_level)
self.tbpl_status = self.worst_level(TBPL_WARNING, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
# Account for the possibility that no test summary was output.
if self.pass_count <= 0 and self.fail_count <= 0 and \
(self.known_fail_count is None or self.known_fail_count <= 0):
self.error('No tests run or test summary not found')
self.worst_log_level = self.worst_level(WARNING,
self.worst_log_level)
self.tbpl_status = self.worst_level(TBPL_WARNING,
self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
if return_code not in success_codes:
self.tbpl_status = self.worst_level(TBPL_FAILURE, self.tbpl_status,
levels=TBPL_WORST_LEVEL_TUPLE)
# we can trust in parser.worst_log_level in either case
return (self.tbpl_status, self.worst_log_level)
def append_tinderboxprint_line(self, suite_name):
# We are duplicating a condition (fail_count) from evaluate_parser and
# parse parse_single_line but at little cost since we are not parsing
# the log more then once. I figured this method should stay isolated as
# it is only here for tbpl highlighted summaries and is not part of
# buildbot evaluation or result status IIUC.
summary = tbox_print_summary(self.pass_count,
self.fail_count,
self.known_fail_count,
self.crashed,
self.leaked)
self.info("TinderboxPrint: %s<br/>%s\n" % (suite_name, summary))
class EmulatorMixin(object):
""" Currently dependent on both TooltoolMixin and TestingMixin)"""
def install_emulator_from_tooltool(self, manifest_path):
dirs = self.query_abs_dirs()
if self.tooltool_fetch(manifest_path, output_dir=dirs['abs_work_dir']):
self.fatal("Unable to download emulator via tooltool!")
unzip = self.query_exe("unzip")
unzip_cmd = [unzip, '-q', os.path.join(dirs['abs_work_dir'], "emulator.zip")]
self.run_command(unzip_cmd, cwd=dirs['abs_emulator_dir'], halt_on_failure=True,
fatal_exit_code=3)
def install_emulator(self):
dirs = self.query_abs_dirs()
self.mkdir_p(dirs['abs_emulator_dir'])
if self.config.get('emulator_url'):
self._download_unzip(self.config['emulator_url'], dirs['abs_emulator_dir'])
elif self.config.get('emulator_manifest'):
manifest_path = self.create_tooltool_manifest(self.config['emulator_manifest'])
self.install_emulator_from_tooltool(manifest_path)
elif self.buildbot_config:
props = self.buildbot_config.get('properties')
url = 'https://hg.mozilla.org/%s/raw-file/%s/b2g/test/emulator.manifest' % (
props['repo_path'], props['revision'])
manifest_path = self.download_file(url,
file_name='tooltool.tt',
parent_dir=dirs['abs_work_dir'])
if not manifest_path:
self.fatal("Can't download emulator manifest from %s" % url)
self.install_emulator_from_tooltool(manifest_path)
else:
self.fatal("Can't get emulator; set emulator_url or emulator_manifest in the config!")
| simar7/build-mozharness | mozharness/mozilla/testing/unittest.py | Python | mpl-2.0 | 11,491 |
#!/usr/bin/env python
import os
import sys
sys.path.insert(
0,
os.path.join(
os.path.dirname(os.path.abspath(__file__)), '..', '..', '..', 'common',
'security-features', 'tools'))
import generate
class ReferrerPolicyConfig(object):
def __init__(self):
self.selection_pattern = \
'%(source_context_list)s.%(delivery_type)s/' + \
'%(delivery_value)s/' + \
'%(subresource)s/' + \
'%(origin)s.%(redirection)s.%(source_scheme)s'
self.test_file_path_pattern = 'gen/' + self.selection_pattern + '.html'
self.test_description_template = 'Referrer Policy: Expects %(expectation)s for %(subresource)s to %(origin)s origin and %(redirection)s redirection from %(source_scheme)s context.'
self.test_page_title_template = 'Referrer-Policy: %s'
self.helper_js = '/referrer-policy/generic/test-case.sub.js'
# For debug target only.
self.sanity_checker_js = '/referrer-policy/generic/sanity-checker.js'
self.spec_json_js = '/referrer-policy/spec_json.js'
self.test_case_name = 'TestCase'
script_directory = os.path.dirname(os.path.abspath(__file__))
self.spec_directory = os.path.abspath(
os.path.join(script_directory, '..', '..'))
if __name__ == '__main__':
generate.main(ReferrerPolicyConfig())
| notriddle/servo | tests/wpt/web-platform-tests/referrer-policy/generic/tools/generate.py | Python | mpl-2.0 | 1,383 |
#!/usr/bin/env python
#
#
# Copyright (C) 2014 Sascha Rommelfangen, Raphael Vinot
# Copyright (C) 2014 CIRCL Computer Incident Response Center Luxembourg (SMILE gie)
#
from datetime import date
import json
import redis
try:
from urllib.parse import quote
except ImportError:
from urllib import quote
from pyfaup.faup import Faup
import socket
import dns.resolver
import re
import sys
import logging
from pypdns import PyPDNS
import bgpranking_web
import urlquery
from pypssl import PyPSSL
from pyeupi import PyEUPI
import requests
from bs4 import BeautifulSoup
try:
import sphinxapi
sphinx = True
except:
sphinx = False
enable_cache = True
r_cache = None
def _cache_init(host='localhost', port=6334, db=1):
global r_cache
if enable_cache and r_cache is None:
r_cache = redis.Redis(host, port, db=db)
def _cache_set(key, value, field=None):
_cache_init()
if enable_cache:
if field is None:
r_cache.setex(key, json.dumps(value), 3600)
else:
r_cache.hset(key, field, json.dumps(value))
r_cache.expire(key, 3600)
def _cache_get(key, field=None):
_cache_init()
if enable_cache:
if field is None:
value_json = r_cache.get(key)
else:
value_json = r_cache.hget(key, field)
if value_json is not None:
return json.loads(value_json)
return None
def to_bool(s):
"""
Converts the given string to a boolean.
"""
return s.lower() in ('1', 'true', 'yes', 'on')
def get_submissions(url, day=None):
_cache_init()
if enable_cache:
if day is None:
day = date.today().isoformat()
else:
day = day.isoformat()
key = date.today().isoformat() + '_submissions'
return r_cache.zscore(key, url)
def get_mail_sent(url, day=None):
_cache_init()
if enable_cache:
if day is None:
day = date.today().isoformat()
else:
day = day.isoformat()
key = date.today().isoformat() + '_mails'
return r_cache.sismember(key, url)
def set_mail_sent(url, day=None):
_cache_init()
if enable_cache:
if day is None:
day = date.today().isoformat()
else:
day = day.isoformat()
key = date.today().isoformat() + '_mails'
return r_cache.sadd(key, url)
def is_valid_url(url):
cached = _cache_get(url, 'valid')
key = date.today().isoformat() + '_submissions'
r_cache.zincrby(key, url)
if cached is not None:
return cached
fex = Faup()
if url.startswith('hxxp'):
url = 'http' + url[4:]
elif not url.startswith('http'):
url = 'http://' + url
logging.debug("Checking validity of URL: " + url)
fex.decode(url)
scheme = fex.get_scheme()
host = fex.get_host()
if scheme is None or host is None:
reason = "Not a valid http/https URL/URI"
return False, url, reason
_cache_set(url, (True, url, None), 'valid')
return True, url, None
def is_ip(host):
if ':' in host:
try:
socket.inet_pton(socket.AF_INET6, host)
return True
except:
pass
else:
try:
socket.inet_aton(host)
return True
except:
pass
return False
def try_resolve(fex, url):
fex.decode(url)
host = fex.get_host().lower()
if is_ip(host):
return True, None
try:
ipaddr = dns.resolver.query(host, 'A')
except Exception:
reason = "DNS server problem. Check resolver settings."
return False, reason
if not ipaddr:
reason = "Host " + host + " does not exist."
return False, reason
return True, None
def get_urls(url, depth=1):
if depth > 5:
print('Too many redirects.')
return
fex = Faup()
def meta_redirect(content):
c = content.lower()
soup = BeautifulSoup(c, "html.parser")
for result in soup.find_all(attrs={'http-equiv': 'refresh'}):
if result:
out = result["content"].split(";")
if len(out) == 2:
wait, text = out
a, url = text.split('=', 1)
return url.strip()
return None
resolve, reason = try_resolve(fex, url)
if not resolve:
# FIXME: inform that the domain does not resolve
yield url
return
logging.debug("Making HTTP connection to " + url)
headers = {'User-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:8.0) Gecko/20100101 Firefox/8.0'}
try:
response = requests.get(url, allow_redirects=True, headers=headers,
timeout=15, verify=False)
except:
# That one can fail (DNS for example)
# FIXME: inform that the get failed
yield url
return
if response.history is not None:
for h in response.history:
# Yeld the urls in the order we find them
yield h.url
yield response.url
meta_redir_url = meta_redirect(response.content)
if meta_redir_url is not None:
depth += 1
if not meta_redir_url.startswith('http'):
fex.decode(url)
base = '{}://{}'.format(fex.get_scheme(), fex.get_host())
port = fex.get_port()
if port is not None:
base += ':{}'.format(port)
if not meta_redir_url.startswith('/'):
# relative redirect. resource_path has the initial '/'
if fex.get_resource_path() is not None:
base += fex.get_resource_path()
if not base.endswith('/'):
base += '/'
meta_redir_url = base + meta_redir_url
for url in get_urls(meta_redir_url, depth):
yield url
def url_list(url):
cached = _cache_get(url, 'list')
if cached is not None:
return cached
list_urls = []
for u in get_urls(url):
if u is None or u in list_urls:
continue
list_urls.append(u)
_cache_set(url, list_urls, 'list')
return list_urls
def dns_resolve(url):
cached = _cache_get(url, 'dns')
if cached is not None:
return cached
fex = Faup()
fex.decode(url)
host = fex.get_host().lower()
ipv4 = None
ipv6 = None
if is_ip(host):
if ':' in host:
try:
socket.inet_pton(socket.AF_INET6, host)
ipv6 = [host]
except:
pass
else:
try:
socket.inet_aton(host)
ipv4 = [host]
except:
pass
else:
try:
ipv4 = [str(ip) for ip in dns.resolver.query(host, 'A')]
except:
logging.debug("No IPv4 address assigned to: " + host)
try:
ipv6 = [str(ip) for ip in dns.resolver.query(host, 'AAAA')]
except:
logging.debug("No IPv6 address assigned to: " + host)
_cache_set(url, (ipv4, ipv6), 'dns')
return ipv4, ipv6
def phish_query(url, key, query):
cached = _cache_get(query, 'phishtank')
if cached is not None:
return cached
postfields = {'url': quote(query), 'format': 'json', 'app_key': key}
response = requests.post(url, data=postfields)
res = response.json()
if res["meta"]["status"] == "success":
if res["results"]["in_database"]:
_cache_set(query, res["results"]["phish_detail_page"], 'phishtank')
return res["results"]["phish_detail_page"]
else:
# no information
pass
elif res["meta"]["status"] == 'error':
# Inform the user?
# errormsg = res["errortext"]
pass
return None
def sphinxsearch(server, port, url, query):
if not sphinx:
return None
cached = _cache_get(query, 'sphinx')
if cached is not None:
return cached
client = sphinxapi.SphinxClient()
client.SetServer(server, port)
client.SetMatchMode(2)
client.SetConnectTimeout(5.0)
result = []
res = client.Query(query)
if res.get("matches") is not None:
for ticket in res["matches"]:
ticket_id = ticket["id"]
ticket_link = url + str(ticket_id)
result.append(ticket_link)
_cache_set(query, result, 'sphinx')
return result
def vt_query_url(url, url_up, key, query, upload=True):
cached = _cache_get(query, 'vt')
if cached is not None:
return cached
parameters = {"resource": query, "apikey": key}
if upload:
parameters['scan'] = 1
response = requests.post(url, data=parameters)
if response.text is None or len(response.text) == 0:
return None
res = response.json()
msg = res["verbose_msg"]
link = res.get("permalink")
positives = res.get("positives")
total = res.get("total")
if positives is not None:
_cache_set(query, (msg, link, positives, total), 'vt')
return msg, link, positives, total
def gsb_query(url, query):
cached = _cache_get(query, 'gsb')
if cached is not None:
return cached
param = '1\n' + query
response = requests.post(url, data=param)
status = response.status_code
if status == 200:
_cache_set(query, response.text, 'gsb')
return response.text
def urlquery_query(url, key, query):
cached = _cache_get(query, 'urlquery')
if cached is not None:
return cached
try:
urlquery.url = url
urlquery.key = key
response = urlquery.search(query)
except:
return None
if response['_response_']['status'] == 'ok':
if response.get('reports') is not None:
total_alert_count = 0
for r in response['reports']:
total_alert_count += r['urlquery_alert_count']
total_alert_count += r['ids_alert_count']
total_alert_count += r['blacklist_alert_count']
_cache_set(query, total_alert_count, 'urlquery')
return total_alert_count
else:
return None
def process_emails(emails, ignorelist, replacelist):
to_return = list(set(emails))
for mail in reversed(to_return):
for ignorelist_entry in ignorelist:
if re.search(ignorelist_entry, mail, re.I):
if mail in to_return:
to_return.remove(mail)
for k, v in list(replacelist.items()):
if re.search(k, mail, re.I):
if k in to_return:
to_return.remove(k)
to_return += v
return to_return
def whois(server, port, domain, ignorelist, replacelist):
cached = _cache_get(domain, 'whois')
if cached is not None:
return cached
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(15)
try:
s.connect((server, port))
except Exception:
print("Connection problems - check WHOIS server")
print(("WHOIS request while problem occurred: ", domain))
print(("WHOIS server: {}:{}".format(server, port)))
sys.exit(0)
if domain.startswith('http'):
fex = Faup()
fex.decode(domain)
d = fex.get_domain().lower()
else:
d = domain
s.send(d + "\r\n")
response = ''
while True:
d = s.recv(4096)
response += d
if d == '':
break
s.close()
match = re.findall(r'[\w\.-]+@[\w\.-]+', response)
emails = process_emails(match, ignorelist, replacelist)
if len(emails) == 0:
return None
list_mail = list(set(emails))
_cache_set(domain, list_mail, 'whois')
return list_mail
def pdnscircl(url, user, passwd, q):
cached = _cache_get(q, 'pdns')
if cached is not None:
return cached
pdns = PyPDNS(url, basic_auth=(user, passwd))
response = pdns.query(q)
all_uniq = []
for e in reversed(response):
host = e['rrname'].lower()
if host in all_uniq:
continue
else:
all_uniq.append(host)
response = (len(all_uniq), all_uniq[:5])
_cache_set(q, response, 'pdns')
return response
def psslcircl(url, user, passwd, q):
cached = _cache_get(q, 'pssl')
if cached is not None:
return cached
pssl = PyPSSL(url, basic_auth=(user, passwd))
response = pssl.query(q)
if response.get(q) is not None:
certinfo = response.get(q)
entries = {}
for sha1 in certinfo['certificates']:
entries[sha1] = []
if certinfo['subjects'].get(sha1):
for value in certinfo['subjects'][sha1]['values']:
entries[sha1].append(value)
_cache_set(q, entries, 'pssl')
return entries
return None
def eupi(url, key, q):
cached = _cache_get(q, 'eupi')
if cached is not None:
return cached
eu = PyEUPI(key, url)
response = eu.search_url(q)
if response.get('results'):
r = response.get('results')[0]['tag_label']
_cache_set(q, r, 'eupi')
return r
eu.post_submission(q)
return None
def bgpranking(ip):
cached = _cache_get(ip, 'bgp')
if cached is not None:
return cached
details = bgpranking_web.ip_lookup(ip, 7)
ptrr = details.get('ptrrecord')
if details.get('history') is None or len(details.get('history')) == 0:
return ptrr, None, None, None, None, None
asn = details['history'][0].get('asn')
rank_info = bgpranking_web.cached_daily_rank(asn)
position, total = bgpranking_web.cached_position(asn)
asn_descr = rank_info[1]
rank = rank_info[-1]
response = (ptrr, asn_descr, asn, int(position), int(total), float(rank))
_cache_set(ip, response, 'bgp')
return response
def _deserialize_cached(entry):
to_return = {}
h = r_cache.hgetall(entry)
for key, value in list(h.items()):
to_return[key] = json.loads(value)
return to_return
def get_url_data(url):
data = _deserialize_cached(url)
if data.get('dns') is not None:
ipv4, ipv6 = data['dns']
ip_data = {}
if ipv4 is not None:
for ip in ipv4:
ip_data[ip] = _deserialize_cached(ip)
if ipv6 is not None:
for ip in ipv6:
ip_data[ip] = _deserialize_cached(ip)
if len(ip_data) > 0:
data.update(ip_data)
return {url: data}
def cached(url):
_cache_init()
if not enable_cache:
return [url]
url_data = get_url_data(url)
to_return = [url_data]
if url_data[url].get('list') is not None:
url_redirs = url_data[url]['list']
for u in url_redirs:
if u == url:
continue
to_return.append(get_url_data(u))
return to_return
| xujun10110/url-abuse | url_abuse_async.py | Python | agpl-3.0 | 14,884 |
# -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.db import models
from django.db.models import Q
from django.contrib.auth.models import (
Permission,
User
)
class GymManager(models.Manager):
'''
Custom query manager for Gyms
'''
def get_members(self, gym_pk):
'''
Returns all members for this gym (i.e non-admin ones)
'''
perm_gym = Permission.objects.get(codename='manage_gym')
perm_gyms = Permission.objects.get(codename='manage_gyms')
perm_trainer = Permission.objects.get(codename='gym_trainer')
users = User.objects.filter(userprofile__gym_id=gym_pk)
return users.exclude(Q(groups__permissions=perm_gym) |
Q(groups__permissions=perm_gyms) |
Q(groups__permissions=perm_trainer)).distinct()
def get_admins(self, gym_pk):
'''
Returns all admins for this gym (i.e trainers, managers, etc.)
'''
perm_gym = Permission.objects.get(codename='manage_gym')
perm_gyms = Permission.objects.get(codename='manage_gyms')
perm_trainer = Permission.objects.get(codename='gym_trainer')
users = User.objects.filter(userprofile__gym_id=gym_pk)
return users.filter(Q(groups__permissions=perm_gym) |
Q(groups__permissions=perm_gyms) |
Q(groups__permissions=perm_trainer)).distinct()
| DeveloperMal/wger | wger/gym/managers.py | Python | agpl-3.0 | 2,069 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import division
import logging
import numpy
from pandas import merge, concat, DataFrame
from openfisca_france_data import default_config_files_directory as config_files_directory
from openfisca_france_data.input_data_builders.build_openfisca_survey_data.base import year_specific_by_generic_data_frame_name
from openfisca_france_data.model.common import mark_weighted_percentiles as mwp
from openfisca_survey_manager.survey_collections import SurveyCollection
from openfisca_france_data.utils import simulation_results_as_data_frame
from openfisca_france_data.erf import get_erf2of, get_of2erf
from openfisca_plugin_aggregates.aggregates import Aggregates
from openfisca_parsers import input_variables_extractors
log = logging.getLogger(__name__)
def clean(parameter):
return parameter[:-len('_holder')] if parameter.endswith('_holder') else parameter
class Debugger(object):
def __init__(self):
super(Debugger, self).__init__()
self.erf_menage = None
self.erf_eec_indivi = None
self.of_menages_data_frame = None
self.of_individus_data_frame = None
self.variable = None
self.survey_scenario = None
def set_survey_scenario(self, survey_scenario = None):
assert survey_scenario is not None
self.survey_scenario = survey_scenario
self.column_by_name = self.survey_scenario.simulation.tax_benefit_system.column_by_name
self.simulation = self.survey_scenario.simulation
assert survey_scenario.simulation is not None, "The simulation attibute of survey_scenario is None"
def set_variable(self, variable):
if isinstance(variable, list):
self.variable = variable[0]
else:
self.variable = variable
def show_aggregates(self):
from openfisca_france_data.erf.aggregates import build_erf_aggregates
assert self.survey_scenario is not None, 'simulation attribute is None'
assert self.variable is not None, 'variable attribute is None'
variable = self.variable
openfisca_aggregates = Aggregates()
openfisca_aggregates.set_survey_scenario(self.survey_scenario)
openfisca_aggregates.compute()
column_by_name = self.column_by_name
temp = (build_erf_aggregates(variables=[variable], year= self.survey_scenario.year))
selection = openfisca_aggregates.aggr_frame["Mesure"] == column_by_name[variable].label
print openfisca_aggregates.aggr_frame[selection]
print temp
# TODO: clean this
return
def extract(self, data_frame, entities = "men"):
column_by_name = self.column_by_name
filtered_data_frame_columns = list(set(column_by_name.keys()).intersection(set(data_frame.columns)))
extracted_columns = [column_name for column_name in filtered_data_frame_columns
if column_by_name[column_name].entity in entities]
extracted_columns = list(set(extracted_columns).union(set(['idmen'])))
return data_frame[extracted_columns].copy()
def get_all_parameters(self, column_list):
global x
print [column.name for column in column_list]
x = x + 1
if x == 20:
boum
column_by_name = self.column_by_name
tax_benefit_system = self.survey_scenario.simulation.tax_benefit_system
extractor = input_variables_extractors.setup(tax_benefit_system)
if len(column_list) == 0:
return []
else:
column_name = column_list[0].name
print column_name
if extractor.get_input_variables(column_by_name[column_name]) is None:
return column_list
else:
first_column = [column_list[0]]
input_columns = self.get_all_parameters([
column_by_name[clean(parameter)]
for parameter in list(extractor.get_input_variables(column_by_name[column_name]))
])
other_columns = list(
set(self.get_all_parameters(column_list[1:])) - set(first_column + input_columns)
)
print 'input_variables: ', [column.name for column in input_columns]
print 'new_variables: ', [column.name for column in other_columns]
new_column_list = first_column + input_columns + other_columns
print 'final list: ', [column.name for column in new_column_list]
return new_column_list
def build_columns_to_fetch(self):
column_by_name = self.column_by_name
# parameters_column = self.get_all_parameters([column_by_name.get(x) for x in [self.variable]])
# parameters = [x.name for x in parameters_column]
parameters = [self.variable]
# We want to get all parameters and consumers that we're going to encounter
# consumers = []
# for variable in [self.variable]:
# column = column_by_name.get(variable)
# consumers = list(set(consumers).union(set(column.consumers)))
# column_names = list(set(parameters).union(set(consumers)))
# self.columns_to_fetch = column_names
# self.variable_consumers = list(set(consumers))
self.variable_parameters = list(set(parameters))
self.columns_to_fetch = list(set(parameters))
def build_openfisca_data_frames(self):
column_names = self.columns_to_fetch
for column in column_names:
assert column in survey_scenario.tax_benefit_system.column_by_name.keys()
data_frame_by_entity_key_plural = survey_scenario.create_data_frame_by_entity_key_plural(
variables = column_names + ['idmen_original'],
indices = True,
roles = True,
)
self.data_frame_by_entity_key_plural = data_frame_by_entity_key_plural
projected = self.project_on(data_frame_by_entity_key_plural = data_frame_by_entity_key_plural)
idmen_original_by_idmen = dict(
zip(
data_frame_by_entity_key_plural['menages'].index.values,
data_frame_by_entity_key_plural['menages']["idmen_original"].values
)
)
self.idmen_original_by_idmen = idmen_original_by_idmen
idmen_by_idmen_original = dict(
zip(
data_frame_by_entity_key_plural['menages']["idmen_original"].values,
data_frame_by_entity_key_plural['menages'].index.values,
)
)
self.idmen_by_idmen_original = idmen_by_idmen_original
data_frame_by_entity_key_plural['menages'] = projected.rename(
columns = {"idmen_original": "idmen"})
data_frame_by_entity_key_plural['individus'].replace(
{'idmen': idmen_original_by_idmen}, inplace = True)
self.data_frame_by_entity_key_plural = data_frame_by_entity_key_plural
def project_on(self, receiving_entity_key_plural = 'menages', data_frame_by_entity_key_plural = None):
tax_benefit_system = self.survey_scenario.tax_benefit_system
assert data_frame_by_entity_key_plural is not None
assert receiving_entity_key_plural is not tax_benefit_system.person_key_plural
entity_data_frame = data_frame_by_entity_key_plural[receiving_entity_key_plural]
person_data_frame = data_frame_by_entity_key_plural[tax_benefit_system.person_key_plural]
entity_keys_plural = list(
set(tax_benefit_system.entity_class_by_key_plural.keys()).difference(set(
[tax_benefit_system.person_key_plural, receiving_entity_key_plural]
))
)
for entity_key_plural in entity_keys_plural:
entity = tax_benefit_system.entity_class_by_key_plural[entity_key_plural]
# Getting only heads of other entities prenent in the projected on entity
boolean_index = person_data_frame[entity.role_for_person_variable_name] == 0 # Heads
index_entity = person_data_frame.loc[boolean_index, entity.index_for_person_variable_name].values # Ent.
for column_name, column_series in self.data_frame_by_entity_key_plural[entity_key_plural].iteritems():
person_data_frame.loc[boolean_index, column_name] \
= column_series.iloc[index_entity].values
person_data_frame[column_name].fillna(0)
receiving_entity = tax_benefit_system.entity_class_by_key_plural[receiving_entity_key_plural]
grouped_data_frame = person_data_frame.groupby(by = receiving_entity.index_for_person_variable_name).agg(sum)
grouped_data_frame.drop(receiving_entity.role_for_person_variable_name, axis = 1, inplace = True)
data_frame = concat([entity_data_frame, grouped_data_frame], axis = 1)
assert data_frame.notnull().all().all()
return data_frame
def build_erf_data_frames(self):
# TODO: remove this
self.columns_to_fetch = ['af']
variables = self.columns_to_fetch
erf_survey_collection = SurveyCollection.load(
collection = "erfs", config_files_directory = config_files_directory)
erf_survey = erf_survey_collection.get_survey("erfs_{}".format(year))
year_specific_by_generic = year_specific_by_generic_data_frame_name(year)
generic_by_year_specific = dict(zip(year_specific_by_generic.values(), year_specific_by_generic.keys()))
erf_variables = list(set(variables + ["ident", "wprm", "quelfic", "noi"]))
of2erf = get_of2erf()
for index, variable in enumerate(erf_variables):
if variable in of2erf:
erf_variables[index] = of2erf[variable]
data_frame_by_table = dict(eec_indivi = None, erf_indivi = None, erf_menage = None)
erf_variables_by_generic_table = dict(eec_indivi = [], erf_indivi = [], erf_menage = [])
year_specific_tables_by_erf_variable = dict(
[
(
erf_variable,
set(
erf_survey.find_tables(variable = erf_variable)
).intersection(
set([year_specific_by_generic[key] for key in erf_variables_by_generic_table.keys()])
)
) for erf_variable in erf_variables
]
)
for variable, year_specific_tables in year_specific_tables_by_erf_variable.iteritems():
if len(year_specific_tables) < 1:
log.info("No tables are present for variable {}".format(variable))
continue
else:
log.info("Variable {} is present in multiple tables : {}".format(variable, year_specific_tables))
for table in year_specific_tables:
log.info("Variable {} is retrieved from table {}".format(variable, table))
erf_variables_by_generic_table[generic_by_year_specific[table]].append(variable)
erf2of = get_erf2of()
for table, erf_variables in erf_variables_by_generic_table.iteritems():
if erf_variables:
data_frame_by_table[table] = erf_survey.get_values(
variables = erf_variables, table = year_specific_by_generic[table]
)
data_frame_by_table[table].rename(columns = erf2of, inplace = True)
data_frame_by_table[table].rename(columns = {'ident': 'idmen'}, inplace = True)
assert not data_frame_by_table["erf_menage"].duplicated().any(), "Duplicated idmen in erf_menage"
self.erf_data_frame_by_entity_key_plural = dict(
menages = data_frame_by_table["erf_menage"],
individus = data_frame_by_table["erf_indivi"].merge(data_frame_by_table["eec_indivi"])
)
# TODO: fichier foyer
def get_major_differences(self):
variable = self.variable
of_menages_data_frame = self.data_frame_by_entity_key_plural['menages']
erf_menages_data_frame = self.erf_data_frame_by_entity_key_plural['menages']
merged_menage_data_frame = merge(
erf_menages_data_frame[[variable, 'idmen']],
of_menages_data_frame[[variable, 'idmen']],
on = 'idmen',
how = 'inner',
suffixes = ('_erf', '_of')
)
log.info('Length of merged_menage_data_frame is {}'.format(len(merged_menage_data_frame)))
merged_menage_data_frame.set_index('idmen', drop = False, inplace = True)
table = merged_menage_data_frame[
numpy.logical_and(
merged_menage_data_frame[variable + '_erf'] != 0,
merged_menage_data_frame[variable + '_of'] != 0
)
]
table[variable + "_rel_diff"] = (table[variable + '_of'] - table[variable + '_erf']) \
/ table[variable + '_erf'] # Difference relative
log.info(
"Minimum difference between the two tables for {} is {}".format(
variable, str(table[variable + "_rel_diff"].min())
)
)
log.info(
"Maximum difference between the two tables for {} is {}".format(
variable, str(table[variable + "_rel_diff"].max())
)
)
table[variable + '_ratio'] = (
table[variable + '_of'] / table[variable + '_erf']
)
log.info(table[variable + "_rel_diff"].describe())
try:
assert len(table[variable + "_rel_diff"]) == len(table['wprm_of']), "PINAGS"
dec, values = mwp(
table[variable + "_rel_diff"],
numpy.arange(1, 11), table['wprm_of'],
2,
return_quantiles = True
)
log.info(sorted(values))
dec, values = mwp(
table[variable + "_rel_diff"],
numpy.arange(1, 101),
table['wprm_erf'],
2,
return_quantiles = True
)
log.info(sorted(values)[90:])
del dec, values
except:
log.info('Weighted percentile method did not work for {}'.format(variable + "_rel_diff"))
pass
table.sort(columns = variable + "_rel_diff", ascending = False, inplace = True)
print table[:10].to_string()
return table
def describe_discrepancies(self, fov = 10, consumers = False, parameters = True, descending = True, to_men = False):
variable = self.variable
major_differences_data_frame = self.get_major_differences()
major_differences_data_frame.sort(
columns = self.variable + "_rel_diff",
ascending = not descending,
inplace = True
)
debug_data_frame = major_differences_data_frame[0:fov].copy()
del major_differences_data_frame
of_menages_data_frame = self.data_frame_by_entity_key_plural['menages']
of_individus_data_frame = self.data_frame_by_entity_key_plural['individus']
erf_individus_data_frame = self.erf_data_frame_by_entity_key_plural['individus']
erf_menages_data_frame = self.erf_data_frame_by_entity_key_plural['menages']
return debug_data_frame
kept_columns = set()
if parameters:
kept_columns.update(set(self.variable_parameters))
if consumers:
kept_columns.update(set(self.variable_consumers))
kept_columns = list(kept_columns)
kept_columns = list(set(kept_columns).union(
set(['idmen', 'idfam', 'idfoy', 'quimen', 'quifam', 'quifoy'] + list(major_differences_data_frame.columns)))
)
if to_men:
entities_ind = ['ind']
entities_men = ['men', 'fam', 'foy']
else:
entities_ind = ['ind', 'fam', 'foy']
entities_men = ['men']
debug_data_frame = debug_data_frame.merge(
self.extract(of_menages_data_frame, entities = entities_men),
how = 'inner',
on = 'idmen',
)
print debug_data_frame.to_string()
debug_data_frame = debug_data_frame.merge(
self.extract(of_individus_data_frame, entities = entities_ind),
how = 'inner',
on = 'idmen',
)
debug_data_frame = debug_data_frame.merge(
erf_individus_data_frame,
how = 'inner',
on = 'idmen',
)
suffixes = ["_erf", "_of", "_rel_diff", "_ratio"]
reordered_columns = [variable + suffixe for suffixe in suffixes] \
+ ["idmen", "quimen", "idfam", "quifam", "idfoy", "quifoy"]
reordered_columns = reordered_columns + list(set(kept_columns) - set(reordered_columns))
debug_data_frame = debug_data_frame[reordered_columns].copy()
return debug_data_frame
def generate_test_case(self):
entity_class_by_key_plural = self.survey_scenario.tax_benefit_system.entity_class_by_key_plural
menages_entity = entity_class_by_key_plural['menages']
idmen_by_idmen_original = self.idmen_by_idmen_original
idmen_original = self.describe_discrepancies(descending = False)[menages_entity.index_for_person_variable_name].iloc[0]
idmen = idmen_by_idmen_original[idmen_original]
input_data_frame = self.survey_scenario.input_data_frame
individus_index = input_data_frame.index[input_data_frame[menages_entity.index_for_person_variable_name] == idmen]
index_by_entity = {
entity_class_by_key_plural['individus']: individus_index,
}
for entity in entity_class_by_key_plural.values():
if entity.key_plural != 'individus':
index_by_entity[entity] = input_data_frame.loc[
individus_index, entity.index_for_person_variable_name].unique()
extracted_indices = individus_index
for entity, entity_index in index_by_entity.iteritems():
if entity.key_plural in ['menages', 'individus']:
continue
extracted_indices = extracted_indices + \
input_data_frame.index[input_data_frame[entity.index_for_person_variable_name].isin(entity_index)]
extracted_input_data_frame = input_data_frame.loc[extracted_indices]
return extracted_input_data_frame
if __name__ == '__main__':
import sys
from openfisca_plugin_aggregates.tests.test_aggregates import create_survey_scenario
logging.basicConfig(level = logging.INFO, stream = sys.stdout)
restart = True
if restart:
year = 2009
survey_scenario = create_survey_scenario(year)
survey_scenario.simulation = survey_scenario.new_simulation()
debugger = Debugger()
debugger.set_survey_scenario(survey_scenario = survey_scenario)
debugger.set_variable('af')
debugger.build_columns_to_fetch()
debugger.build_openfisca_data_frames()
debugger.build_erf_data_frames()
# df_menage = debugger.data_frame_by_entity_key_plural['menages']
# df_famille = debugger.data_frame_by_entity_key_plural['familles']
# df_individus = debugger.data_frame_by_entity_key_plural['individus']
#df = debugger.get_major_differences()
# debugger.show_aggregates()
df = debugger.describe_discrepancies(descending = False)
df = debugger.generate_test_case()
boum
entity_class_by_key_plural = debugger.survey_scenario.tax_benefit_system.entity_class_by_key_plural
menages_entity = entity_class_by_key_plural['menages']
idmen = debugger.describe_discrepancies(descending = False)[menages_entity.index_for_person_variable_name].iloc[0]
input_data_frame = debugger.survey_scenario.input_data_frame
| adrienpacifico/openfisca-france-data | openfisca_france_data/debugger.py | Python | agpl-3.0 | 20,788 |
# Copyright (c) 2001-2015, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from contextlib import contextmanager
import glob
import os
from navitiacommon import utils, launch_exec
from navitiacommon.launch_exec import launch_exec
import psycopg2
import zipfile
import logging
ALEMBIC_PATH_ED = os.environ.get('ALEMBIC_PATH', '../sql')
ALEMBIC_PATH_CITIES = os.environ.get('ALEMBIC_PATH_CITIES', '../cities')
@contextmanager
def cd(new_dir):
"""
small helper to change the current dir
"""
prev_dir = os.getcwd()
os.chdir(os.path.expanduser(new_dir))
try:
yield
finally:
os.chdir(prev_dir)
def binarize(ed_db_params, output, ed_component_path, cities_db_params):
logger = logging.getLogger(__name__)
logger.info('creating data.nav')
ed2nav = 'ed2nav'
if ed_component_path:
ed2nav = os.path.join(ed_component_path, ed2nav)
launch_exec(
ed2nav,
[
"-o",
output,
"--connection-string",
ed_db_params.old_school_cnx_string(),
"--cities-connection-string",
cities_db_params.old_school_cnx_string(),
],
logger,
)
logger.info("data.nav is created successfully: {}".format(output))
def import_data(data_dir, db_params, ed_component_path):
"""
call the right component to import the data in the directory
we loop through all files until we recognize one on them
"""
log = logging.getLogger(__name__)
files = glob.glob(data_dir + "/*")
data_type, file_to_load = utils.type_of_data(files)
if not data_type:
log.info('unknown data type for dir {}, skipping'.format(data_dir))
return
# Note, we consider that we only have to load one kind of data per directory
import_component = data_type + '2ed'
if ed_component_path:
import_component = os.path.join(ed_component_path, import_component)
if file_to_load.endswith('.zip') or file_to_load.endswith('.geopal'):
# TODO: handle geopal as non zip
# if it's a zip, we unzip it
zip_file = zipfile.ZipFile(file_to_load)
zip_file.extractall(path=data_dir)
file_to_load = data_dir
if launch_exec(
import_component, ["-i", file_to_load, "--connection-string", db_params.old_school_cnx_string()], log
):
raise Exception('Error: problem with running {}, stoping'.format(import_component))
def load_cities(cities_file, cities_db_params, cities_exec_path):
logger = logging.getLogger(__name__)
cities_exec = os.path.join(cities_exec_path, 'cities')
if launch_exec(
cities_exec, ["-i", cities_file, "--connection-string", cities_db_params.old_school_cnx_string()], logger
):
raise Exception('Error: problem with running {}, stoping'.format(cities_exec))
def load_data(data_dirs, ed_db_params, ed_component_path):
logging.getLogger(__name__).info('loading {}'.format(data_dirs))
for d in data_dirs:
import_data(d, ed_db_params, ed_component_path)
def update_db(db_params, alembic_path):
"""
enable postgis on the db and update it's scheme
"""
cnx_string = db_params.cnx_string()
# we need to enable postgis on the db
cnx = psycopg2.connect(
database=db_params.dbname, user=db_params.user, password=db_params.password, host=db_params.host
)
c = cnx.cursor()
c.execute("create extension postgis;")
c.close()
cnx.commit()
logging.getLogger(__name__).info('message = {}'.format(c.statusmessage))
with cd(alembic_path):
res = os.system('PYTHONPATH=. alembic -x dbname="{cnx}" upgrade head'.format(cnx=cnx_string))
if res:
raise Exception('problem with db update')
def generate_nav(
data_dir, docker_ed, docker_cities, output_file, ed_component_path, cities_exec_path, import_cities
):
"""
load all data either directly in data_dir if there is no sub dir, or all data in the subdir
"""
cities_db_params = docker_cities.get_db_params()
update_db(cities_db_params, ALEMBIC_PATH_CITIES)
ed_db_params = docker_ed.get_db_params()
update_db(ed_db_params, ALEMBIC_PATH_ED)
if import_cities:
if not os.path.exists(import_cities):
raise Exception('Error: impossible to find {}, exiting'.format(import_cities))
load_cities(import_cities, cities_db_params, cities_exec_path)
if not os.path.exists(data_dir):
raise Exception('Error: impossible to find {}, exiting'.format(data_dir))
data_dirs = [
os.path.join(data_dir, sub_dir_name)
for sub_dir_name in os.listdir(data_dir)
if os.path.isdir(os.path.join(data_dir, sub_dir_name))
] or [
data_dir
] # if there is no sub dir, we import only the files in the dir
load_data(data_dirs, ed_db_params, ed_component_path)
binarize(ed_db_params, output_file, ed_component_path, cities_db_params)
| pbougue/navitia | source/eitri/ed_handler.py | Python | agpl-3.0 | 6,071 |
from functools import wraps
from flask import current_app, request
from flask_sitemap import Sitemap, sitemap_page_needed
from udata.app import cache
sitemap = Sitemap()
CACHE_KEY = 'sitemap-page-{0}'
@sitemap_page_needed.connect
def create_page(app, page, urlset):
key = CACHE_KEY.format(page)
cache.set(key, sitemap.render_page(urlset=urlset))
def load_page(fn):
@wraps(fn)
def loader(*args, **kwargs):
page = kwargs.get('page')
key = CACHE_KEY.format(page)
return cache.get(key) or fn(*args, **kwargs)
return loader
def set_scheme(fn):
@wraps(fn)
def set_scheme_on_call(*args, **kwargs):
scheme = 'https' if request.is_secure else 'http'
current_app.config['SITEMAP_URL_SCHEME'] = scheme
return fn(*args, **kwargs)
return set_scheme_on_call
def init_app(app):
sitemap.decorators = []
app.config['SITEMAP_VIEW_DECORATORS'] = [load_page, set_scheme]
sitemap.init_app(app)
| opendatateam/udata | udata/sitemap.py | Python | agpl-3.0 | 977 |
import logging
logging.warning('This module is deprecated. Use '
'`coalib.testing.BearTestHelper` instead.')
from coalib.testing.BearTestHelper import *
| sounak98/coala-bears | tests/BearTestHelper.py | Python | agpl-3.0 | 171 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
############################################################################
#
# MODULE: tr3.unregister
# AUTHOR(S): Soeren Gebbert
#
# PURPOSE: Unregister raster3d maps from space time raster3d datasets
# COPYRIGHT: (C) 2011 by the GRASS Development Team
#
# This program is free software under the GNU General Public
# License (version 2). Read the file COPYING that comes with GRASS
# for details.
#
#############################################################################
#%module
#% description: Unregister raster3d map(s) from a specific or from all space time raster3d dataset in which it is registered
#% keywords: spacetime raster3d dataset
#% keywords: raster3d
#%end
#%option
#% key: dataset
#% type: string
#% description: Name of an existing space time raster3d dataset. If no name is provided the raster3d map(s) are unregistered from all space time datasets in which they are registered.
#% required: no
#% multiple: no
#%end
#%option
#% key: maps
#% type: string
#% description: Name(s) of existing raster3d map(s) to unregister
#% required: yes
#% multiple: yes
#%end
import grass.script as grass
import grass.temporal as tgis
############################################################################
def main():
# Get the options
name = options["dataset"]
maps = options["maps"]
# Make sure the temporal database exists
tgis.create_temporal_database()
# Unregister maps
tgis.unregister_maps_from_space_time_datasets("raster3d", name, maps)
if __name__ == "__main__":
options, flags = grass.parser()
main()
| AsherBond/MondocosmOS | grass_trunk/temporal/tr3.unregister/tr3.unregister.py | Python | agpl-3.0 | 1,642 |
import ipaddress
import docker.types
def init():
pass
def get_next_cidr(client):
networks = client.networks.list()
last_cidr = ipaddress.ip_network("10.0.0.0/24")
for network in networks:
if (network.attrs["IPAM"] and network.attrs["IPAM"]["Config"]
and len(network.attrs["IPAM"]["Config"]) > 0
and network.attrs["IPAM"]["Config"][0]["Subnet"]):
cidr = ipaddress.ip_network(network.attrs["IPAM"]["Config"][0]["Subnet"])
if cidr.network_address.packed[0] == 10:
if cidr.prefixlen != 24:
raise Exception(
"Invalid network prefix length {0} for network {1}"
.format(cidr.prefixlen, network.name))
if cidr > last_cidr:
last_cidr = cidr
next_cidr = ipaddress.ip_network((last_cidr.network_address + 256).exploded + "/24")
if next_cidr.network_address.packed[0] > 10:
raise Exception("No more networks available")
last_cidr = next_cidr
return next_cidr
def create_network(client, name):
cidr = get_next_cidr(client)
print("Creating network {0} with subnet {1}".format(name, cidr.exploded))
networks = client.networks.list(names=[name])
if len(networks) > 0:
for network in networks:
network.remove()
ipam_pool = docker.types.IPAMPool(subnet=cidr.exploded,
gateway=(cidr.network_address + 1).exploded)
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
client.networks.create(name, ipam=ipam_config)
| puffinrocks/puffin | puffin/core/network.py | Python | agpl-3.0 | 1,606 |
# Create your views here.
import random
import json
import hashlib
import smtplib
from django.shortcuts import render_to_response, get_object_or_404
from django.conf import settings
from django.template import RequestContext
from django.contrib.auth.models import User
from django.contrib import messages
from django.http import HttpResponse, HttpResponseRedirect #, Http404
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ObjectDoesNotExist
from datetime import date
from itertools import izip
from email.mime.text import MIMEText
from memopol.campaign.forms import ScoreForm, DebriefingForm
from memopol.campaign.models import MEPScore, ScoreRule
from memopol.meps.models import MEP
from .models import Campaign, Debriefing
def updateCampaignScores(form, pk, c):
query={}
if form.cleaned_data['group']:
query['groupmep__group__abbreviation__in'] = form.cleaned_data['group']
query['groupmep__end'] = date(9999, 12, 31)
if form.cleaned_data['groupRole']:
query['groupmep__role__in'] = form.cleaned_data['groupRole']
if form.cleaned_data['delegation']:
query['delegationrole__delegation__name__in'] = form.cleaned_data['delegation']
query['delegationrole__end'] = date(9999, 12, 31)
if form.cleaned_data['delegationRole']:
query['delegationrole__role__in'] = form.cleaned_data['delegationRole']
if form.cleaned_data['staff']:
query['organizationmep__organization__name__in'] = form.cleaned_data['staff']
query['organizationmep__end'] = date(9999, 12, 31)
if form.cleaned_data['staffRole']:
query['organizationmep__role__in'] = form.cleaned_data['staffRole']
if form.cleaned_data['committee']:
query['committeerole__committee__name__in'] = form.cleaned_data['committee']
query['committeerole__end'] = date(9999, 12, 31)
if form.cleaned_data['committeeRole']:
query['committeerole__role__in'] = form.cleaned_data['committeeRole']
#print ', '.join(["%s = %s" % (k,v) for k,v in query.items()])
if query:
# for the record
ScoreRule(campaign=c,
rule=', '.join(["%s = %s" % (k,v) for k,v in query.items()]),
score=form.cleaned_data['weight']).save()
for mep in MEP.objects.filter(**query).distinct():
ms = MEPScore.objects.get_or_create(mep=mep, campaign=c)[0]
ms.score += form.cleaned_data['weight']
ms.save()
def editCampaign(request, pk):
c = get_object_or_404(Campaign, pk=pk)
if not request.user.is_authenticated():
messages.add_message(request,
messages.ERROR,
"You should login to edit the campaign.")
return HttpResponseRedirect("/campaign/view/%s/" % c.id)
data = { 'campaign': c }
form = ScoreForm(request.POST)
if not form.is_valid():
form = ScoreForm()
else:
updateCampaignScores(form, pk, c)
data['form'] = form
return render_to_response('campaign/edit.html',
data,
context_instance = RequestContext(request))
def randomsubset(l, n):
res = []
for _ in xrange(n):
res.append(random.choice([x for li in [[x[1]] * x[0] for x in l if x[1] not in res] for x in li]))
return res
def getCampaignMeps(request, pk):
c=get_object_or_404(Campaign, pk=pk)
if not 'chosen' in request.session:
request.session['chosen']={}
if not c in request.session['chosen'] or request.GET.get('force'):
scoredmeps=[(x['score'],x['mep']) for x in MEPScore.objects.filter(campaign=c).values('mep','score')]
smeps=set([x[1] for x in scoredmeps])
allmeps=set([x['id'] for x in MEP.objects.filter(active=True).values('id')])
zmeps=[(1, x) for x in allmeps-smeps]
try:
limit=int(request.GET.get('limit'))
except:
limit=3
chosen=randomsubset(zmeps+scoredmeps, limit)
request.session['chosen'][c]=chosen
request.session.modified = True
return HttpResponseRedirect("/campaign/view/%s/" % c.id)
else:
chosen=request.session['chosen'][c]
if request.GET.get('format')=='json':
return HttpResponse(json.dumps(chosen),
mimetype="application/json")
chosen=[MEP.objects.get(pk=x) for x in chosen]
forms = [DebriefingForm(instance=Debriefing(mep=mep,campaign=c)) for mep in chosen]
dbriefs = [Debriefing.objects.filter(mep=mep,campaign=c,valid="") for mep in chosen]
return render_to_response('campaign/view.html',
{ 'object_list': izip(chosen,forms, dbriefs ),
'campaign': c, },
context_instance = RequestContext(request))
def getCampaigns(request):
c=[(c, MEP.objects.filter(debriefing__campaign=c).distinct().count()) for c in Campaign.objects.all()]
return render_to_response('campaign/list.html',
{ 'object_list': c, },
context_instance = RequestContext(request))
def feedback(request):
feedback = DebriefingForm(request.POST)
feedback.full_clean()
if feedback.errors:
return HttpResponse(str(feedback.errors))
feedback = feedback.save(commit=False)
tmp=Debriefing.objects.filter(mep=feedback.mep,
campaign=feedback.campaign,
usercontact=feedback.usercontact,
type=feedback.type,
response=feedback.response,
text=feedback.text).count()
if tmp>0:
return HttpResponse("known.")
feedback.save()
to=[x.email for x in User.objects.filter(is_staff=True)]
actid=sendverifymail(feedback,to)
feedback.valid=actid
feedback.save()
return HttpResponse("Thank you.")
def sendverifymail(feedback,to):
actid = hashlib.sha1(''.join([chr(random.randint(32, 122))
for x in range(12)])).hexdigest()
msg = MIMEText(_("Someone sent feedback on a campaign\nYour verification key is %(root_url)s/campaign/feedback/%(feedback_id)s/%(actid)s\n\nfrom: %(from)s\nabout %(mep)s\ntype: %(type)s\nresult: %(result)s\ncomment: %(comment)s")
% {"root_url": settings.ROOT_URL or 'http://localhost:8001/',
"feedback_id": feedback.id,
"actid": actid,
"from": feedback.usercontact,
"mep": feedback.mep,
"type": feedback.type,
"result": feedback.response,
"comment": feedback.text})
msg['Subject'] = _('Memopol2 feedback moderation')
msg['From'] = 'memopol2@memopol2.lqdn.fr'
msg['To'] = ', '.join(to)
s = smtplib.SMTP('localhost')
s.sendmail('memopol2@memopol2.lqdn.fr', [to], msg.as_string())
s.quit()
return actid
def confirm(request, id, key):
feedback=None
try:
feedback=Debriefing.objects.get(pk=id, valid=key)
except ObjectDoesNotExist:
messages.add_message(request,
messages.INFO,
"Thank you! Either already confirmed, or object doesn't exist")
return HttpResponseRedirect('/campaign/list/')
feedback.valid=''
feedback.save()
messages.add_message(request,
messages.INFO,
'Thank you for your confirmation')
return HttpResponseRedirect('/campaign/view/%s/' % feedback.campaign.id)
def report(request, pk):
c=get_object_or_404(Campaign, pk=pk)
chosen=MEP.objects.filter(debriefing__campaign=c,debriefing__valid="").distinct()
forms = [DebriefingForm(instance=Debriefing(mep=mep,campaign=c,valid="")) for mep in chosen]
dbriefs = [Debriefing.objects.filter(mep=mep,campaign=c,valid="") for mep in chosen]
mepscores = MEPScore.objects.filter(campaign=c)
mepsforms = [DebriefingForm(instance=Debriefing(mep=mep,campaign=c,valid=""))
for mep in MEP.objects.filter(mepscore__campaign=c)]
return render_to_response('campaign/view.html',
{ 'object_list': izip(chosen,forms, dbriefs ),
'mepscores': izip(mepscores,mepsforms),
'campaign': c, },
context_instance = RequestContext(request))
| yohanboniface/memopol-core | memopol/campaign/views.py | Python | agpl-3.0 | 8,612 |
#####################################################################################
#
# Copyright (C) Tavendo GmbH
#
# Unless a separate license agreement exists between you and Tavendo GmbH (e.g. you
# have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import, division, print_function
from six import StringIO as NativeStringIO
from twisted.internet.selectreactor import SelectReactor
from crossbar.test import TestCase
from crossbar.controller import cli
from crossbar import _logging
from weakref import WeakKeyDictionary
import os
import sys
import platform
import twisted
class CLITestBase(TestCase):
# the tests here a mostly bogus, as they test for log message content,
# not actual functionality
skip = True
def setUp(self):
self._subprocess_timeout = 15
if platform.python_implementation() == 'PyPy':
self._subprocess_timeout = 30
self.stderr = NativeStringIO()
self.stdout = NativeStringIO()
self.patch(_logging, "_stderr", self.stderr)
self.patch(_logging, "_stdout", self.stdout)
self.patch(_logging, "_loggers", WeakKeyDictionary())
self.patch(_logging, "_loglevel", "info")
return super(CLITestBase, self).setUp()
def tearDown(self):
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
class VersionTests(CLITestBase):
"""
Tests for `crossbar version`.
"""
def test_basic(self):
"""
Just running `crossbar version` gets us the versions.
"""
reactor = SelectReactor()
cli.run("crossbar",
["version"],
reactor=reactor)
self.assertIn("Crossbar.io", self.stdout.getvalue())
self.assertIn(
("Twisted : \x1b[33m\x1b[1m" + twisted.version.short() + "-SelectReactor"),
self.stdout.getvalue())
def test_debug(self):
"""
Running `crossbar version` will give us the versions, plus the
locations of some of them.
"""
reactor = SelectReactor()
cli.run("crossbar",
["version", "--loglevel=debug"],
reactor=reactor)
self.assertIn("Crossbar.io", self.stdout.getvalue())
self.assertIn(
("Twisted : \x1b[33m\x1b[1m" + twisted.version.short() + "-SelectReactor"),
self.stdout.getvalue())
self.assertIn(
("[twisted.internet.selectreactor.SelectReactor]"),
self.stdout.getvalue())
class StartTests(CLITestBase):
"""
Tests for `crossbar start`.
"""
def setUp(self):
CLITestBase.setUp(self)
# Set up the configuration directories
self.cbdir = os.path.abspath(self.mktemp())
os.mkdir(self.cbdir)
self.config = os.path.abspath(os.path.join(self.cbdir, "config.json"))
def test_start(self):
"""
A basic start, that doesn't actually enter the reactor.
"""
with open(self.config, "w") as f:
f.write("""{"controller": {}}""")
reactor = SelectReactor()
reactor.run = lambda: False
cli.run("crossbar",
["start", "--cbdir={}".format(self.cbdir),
"--logformat=syslogd"],
reactor=reactor)
self.assertIn("Entering reactor event loop", self.stdout.getvalue())
def test_configValidationFailure(self):
"""
Running `crossbar start` with an invalid config will print a warning.
"""
with open(self.config, "w") as f:
f.write("")
reactor = SelectReactor()
with self.assertRaises(SystemExit) as e:
cli.run("crossbar",
["start", "--cbdir={}".format(self.cbdir),
"--logformat=syslogd"],
reactor=reactor)
# Exit with code 1
self.assertEqual(e.exception.args[0], 1)
# The proper warning should be emitted
self.assertIn("*** Configuration validation failed ***",
self.stderr.getvalue())
self.assertIn(("configuration file does not seem to be proper JSON "),
self.stderr.getvalue())
def test_fileLogging(self):
"""
Running `crossbar start --logtofile` will log to cbdir/node.log.
"""
with open(self.config, "w") as f:
f.write("""{"controller": {}}""")
reactor = SelectReactor()
reactor.run = lambda: None
cli.run("crossbar",
["start", "--cbdir={}".format(self.cbdir), "--logtofile"],
reactor=reactor)
with open(os.path.join(self.cbdir, "node.log"), "r") as f:
logFile = f.read()
self.assertIn("Entering reactor event loop", logFile)
self.assertEqual("", self.stderr.getvalue())
self.assertEqual("", self.stdout.getvalue())
def test_stalePID(self):
with open(self.config, "w") as f:
f.write("""{"controller": {}}""")
with open(os.path.join(self.cbdir, "node.pid"), "w") as f:
f.write("""{"pid": 9999999}""")
reactor = SelectReactor()
reactor.run = lambda: None
cli.run("crossbar",
["start", "--cbdir={}".format(self.cbdir),
"--logformat=syslogd"],
reactor=reactor)
self.assertIn(
("Stale Crossbar.io PID file (pointing to non-existing process "
"with PID {pid}) {fp} removed").format(
fp=os.path.abspath(os.path.join(self.cbdir, "node.pid")),
pid=9999999),
self.stdout.getvalue())
class ConvertTests(CLITestBase):
"""
Tests for `crossbar convert`.
"""
def test_unknown_format(self):
"""
Running `crossbar convert` with an unknown config file produces an
error.
"""
cbdir = self.mktemp()
os.makedirs(cbdir)
config_file = os.path.join(cbdir, "config.blah")
open(config_file, 'wb').close()
with self.assertRaises(SystemExit) as e:
cli.run("crossbar",
["convert", "--config={}".format(config_file)])
self.assertEqual(e.exception.args[0], 1)
self.assertIn(
("Error: configuration file needs to be '.json' or '.yaml'."),
self.stdout.getvalue())
def test_yaml_to_json(self):
"""
Running `crossbar convert` with a YAML config file will convert it to
JSON.
"""
cbdir = self.mktemp()
os.makedirs(cbdir)
config_file = os.path.join(cbdir, "config.yaml")
with open(config_file, 'w') as f:
f.write("""
foo:
bar: spam
baz:
foo: cat
""")
cli.run("crossbar",
["convert", "--config={}".format(config_file)])
self.assertIn(
("JSON formatted configuration written"),
self.stdout.getvalue())
with open(os.path.join(cbdir, "config.json"), 'r') as f:
self.assertEqual(f.read(), """{
"foo": {
"bar": "spam",
"baz": {
"foo": "cat"
}
}
}""")
def test_invalid_yaml_to_json(self):
"""
Running `crossbar convert` with an invalid YAML config file will error
saying it is invalid.
"""
cbdir = self.mktemp()
os.makedirs(cbdir)
config_file = os.path.join(cbdir, "config.yaml")
with open(config_file, 'w') as f:
f.write("""{{{{{{{{""")
with self.assertRaises(SystemExit) as e:
cli.run("crossbar",
["convert", "--config={}".format(config_file)])
self.assertEqual(e.exception.args[0], 1)
self.assertIn(
("not seem to be proper YAML"),
self.stdout.getvalue())
def test_json_to_yaml(self):
"""
Running `crossbar convert` with a YAML config file will convert it to
JSON.
"""
cbdir = self.mktemp()
os.makedirs(cbdir)
config_file = os.path.join(cbdir, "config.json")
with open(config_file, 'w') as f:
f.write("""{
"foo": {
"bar": "spam",
"baz": {
"foo": "cat"
}
}
}""")
cli.run("crossbar",
["convert", "--config={}".format(config_file)])
self.assertIn(
("YAML formatted configuration written"),
self.stdout.getvalue())
with open(os.path.join(cbdir, "config.yaml"), 'r') as f:
self.assertEqual(f.read(), """foo:
bar: spam
baz:
foo: cat
""")
def test_invalid_json_to_yaml(self):
"""
Running `crossbar convert` with an invalid JSON config file will error
saying it is invalid.
"""
cbdir = self.mktemp()
os.makedirs(cbdir)
config_file = os.path.join(cbdir, "config.json")
with open(config_file, 'w') as f:
f.write("""{{{{{{{{""")
with self.assertRaises(SystemExit) as e:
cli.run("crossbar",
["convert", "--config={}".format(config_file)])
self.assertEqual(e.exception.args[0], 1)
self.assertIn(
("not seem to be proper JSON"),
self.stdout.getvalue())
| w1z2g3/crossbar | crossbar/controller/test/test_cli.py | Python | agpl-3.0 | 10,540 |
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2016 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
import json
from django.core.cache import cache
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext as _
import six
from wirecloud.catalogue.models import CatalogueResource
from wirecloud.commons.baseviews import Resource
from wirecloud.commons.utils.cache import CacheableData
from wirecloud.commons.utils.http import authentication_required, build_error_response, get_absolute_reverse_url, get_current_domain, consumes, parse_json_request
from wirecloud.platform.models import Workspace
from wirecloud.platform.wiring.utils import generate_xhtml_operator_code, get_operator_cache_key
class WiringEntry(Resource):
@authentication_required
@consumes(('application/json',))
def update(self, request, workspace_id):
workspace = get_object_or_404(Workspace, id=workspace_id)
if not request.user.is_superuser and workspace.creator != request.user:
return build_error_response(request, 403, _('You are not allowed to update this workspace'))
new_wiring_status = parse_json_request(request)
old_wiring_status = workspace.wiringStatus
# Check read only connections
old_read_only_connections = [connection for connection in old_wiring_status['connections'] if connection.get('readonly', False)]
new_read_only_connections = [connection for connection in new_wiring_status['connections'] if connection.get('readonly', False)]
if len(old_read_only_connections) > len(new_read_only_connections):
return build_error_response(request, 403, _('You are not allowed to remove or update read only connections'))
for connection in old_read_only_connections:
if connection not in new_read_only_connections:
return build_error_response(request, 403, _('You are not allowed to remove or update read only connections'))
# Check operator preferences
for operator_id, operator in six.iteritems(new_wiring_status['operators']):
if operator_id in old_wiring_status['operators']:
old_operator = old_wiring_status['operators'][operator_id]
added_preferences = set(operator['preferences'].keys()) - set(old_operator['preferences'].keys())
removed_preferences = set(old_operator['preferences'].keys()) - set(operator['preferences'].keys())
updated_preferences = set(operator['preferences'].keys()).intersection(old_operator['preferences'].keys())
else:
# New operator
added_preferences = operator['preferences'].keys()
removed_preferences = ()
updated_preferences = ()
for preference_name in added_preferences:
if operator['preferences'][preference_name].get('readonly', False) or operator['preferences'][preference_name].get('hidden', False):
return build_error_response(request, 403, _('Read only and hidden preferences cannot be created using this API'))
for preference_name in removed_preferences:
if old_operator['preferences'][preference_name].get('readonly', False) or old_operator['preferences'][preference_name].get('hidden', False):
return build_error_response(request, 403, _('Read only and hidden preferences cannot be removed'))
for preference_name in updated_preferences:
old_preference = old_operator['preferences'][preference_name]
new_preference = operator['preferences'][preference_name]
if old_preference.get('readonly', False) != new_preference.get('readonly', False) or old_preference.get('hidden', False) != new_preference.get('hidden', False):
return build_error_response(request, 403, _('Read only and hidden status cannot be changed using this API'))
if new_preference.get('readonly', False) and new_preference.get('value') != old_preference.get('value'):
return build_error_response(request, 403, _('Read only preferences cannot be updated'))
workspace.wiringStatus = new_wiring_status
workspace.save()
return HttpResponse(status=204)
def process_requirements(requirements):
return dict((requirement['name'], {}) for requirement in requirements)
class OperatorEntry(Resource):
def read(self, request, vendor, name, version):
operator = get_object_or_404(CatalogueResource, type=2, vendor=vendor, short_name=name, version=version)
# For now, all operators are freely accessible/distributable
#if not operator.is_available_for(request.user):
# return HttpResponseForbidden()
mode = request.GET.get('mode', 'classic')
key = get_operator_cache_key(operator, get_current_domain(request), mode)
cached_response = cache.get(key)
if cached_response is None:
options = json.loads(operator.json_description)
js_files = options['js_files']
base_url = get_absolute_reverse_url('wirecloud.showcase_media', kwargs={
'vendor': operator.vendor,
'name': operator.short_name,
'version': operator.version,
'file_path': operator.template_uri
}, request=request)
xhtml = generate_xhtml_operator_code(js_files, base_url, request, process_requirements(options['requirements']), mode)
cache_timeout = 31536000 # 1 year
cached_response = CacheableData(xhtml, timeout=cache_timeout, content_type='application/xhtml+xml; charset=UTF-8')
cache.set(key, cached_response, cache_timeout)
return cached_response.get_response()
| rockneurotiko/wirecloud | src/wirecloud/platform/wiring/views.py | Python | agpl-3.0 | 6,563 |
# © 2014-2015 Tecnativa S.L. - Jairo Llopis
# © 2016 Tecnativa S.L. - Vicent Cubells
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import fields, models
class ResPartner(models.Model):
_inherit = "res.partner"
department_id = fields.Many2one("res.partner.department", "Department")
class ResPartnerDepartment(models.Model):
_name = "res.partner.department"
_order = "parent_path"
_parent_order = "name"
_parent_store = True
_description = "Department"
name = fields.Char(required=True, translate=True)
parent_id = fields.Many2one(
"res.partner.department", "Parent department", ondelete="restrict"
)
child_ids = fields.One2many(
"res.partner.department", "parent_id", "Child departments"
)
parent_path = fields.Char(index=True)
| OCA/partner-contact | partner_contact_department/models/res_partner.py | Python | agpl-3.0 | 839 |
#! /usr/bin/python3
# This simulates the org.freedesktop.UDisks.Encrypted.Slots property
# et al for versions of UDisks that don't have them yet.
import sys
import json
import subprocess
import re
import base64
import signal
import atexit
import os
def b64_decode(data):
# The data we get doesn't seem to have any padding, but the base64
# module requires it. So we add it back. Can't anyone agree on
# anything? Not even base64?
return base64.urlsafe_b64decode((data + '=' * ((4 - len(data) % 4) % 4)).encode('ascii', 'ignore'))
def get_clevis_config_from_protected_header(protected_header):
header = b64_decode(protected_header).decode("utf-8")
header_object = json.loads(header)
clevis = header_object.get("clevis", None)
if clevis:
pin = clevis.get("pin", None)
if pin == "tang":
return clevis
elif pin == "sss":
subpins = { }
jwes = clevis["sss"]["jwe"]
for jwe in jwes:
subconf = get_clevis_config_from_jwe(jwe)
subpin = subconf["pin"]
if subpin not in subpins:
subpins[subpin] = [ subconf[subpin] ]
else:
subpins[subpin].append(subconf[subpin])
return { "pin": "sss", "sss": { "t": clevis["sss"]["t"], "pins": subpins } }
else:
return { "pin": pin, pin: { } }
def get_clevis_config_from_jwe(jwe):
return get_clevis_config_from_protected_header(jwe.split(".")[0])
def info(dev):
slots = { }
version = 1
max_slots = 8
try:
result = subprocess.check_output([ "cryptsetup", "luksDump", dev ], stderr=subprocess.PIPE)
except subprocess.CalledProcessError:
return { "version": version, "slots": [ ], "max_slots": max_slots }
in_luks2_slot_section = False
in_luks2_token_section = False
for line in result.splitlines():
if not (line.startswith(b" ") or line.startswith(b"\t")):
in_luks2_slot_section = False
in_luks2_token_section = False
if line == b"Keyslots:":
in_luks2_slot_section = True
version = 2
max_slots = 32
elif line == b"Tokens:":
in_luks2_token_section = True
if in_luks2_slot_section:
match = re.match(b" ([0-9]+): luks2$", line)
else:
match = re.match(b"Key Slot ([0-9]+): ENABLED$", line)
if match:
slot = int(match.group(1))
entry = { "Index": { "v": slot } }
if version == 1:
try:
luksmeta = subprocess.check_output([ "luksmeta", "load", "-d", dev, "-s", str(slot),
"-u", "cb6e8904-81ff-40da-a84a-07ab9ab5715e" ],
stderr=subprocess.PIPE)
entry["ClevisConfig"] = {
"v": json.dumps(get_clevis_config_from_jwe(luksmeta.decode("utf-8")))
}
except subprocess.CalledProcessError:
pass
if slot not in slots:
slots[slot] = entry
if in_luks2_token_section:
match = re.match(b" ([0-9]+): clevis$", line)
if match:
try:
token = subprocess.check_output([ "cryptsetup", "token", "export", dev, "--token-id", match.group(1) ],
stderr=subprocess.PIPE)
token_object = json.loads(token.decode("utf-8"))
if token_object.get("type") == "clevis":
config = json.dumps(get_clevis_config_from_protected_header(token_object["jwe"]["protected"]))
for slot_str in token_object.get("keyslots", [ ]):
slot = int(slot_str)
slots[slot] = { "Index": { "v": slot },
"ClevisConfig": { "v": config } }
except subprocess.CalledProcessError:
pass
return { "version": version, "slots": list(slots.values()), "max_slots": max_slots }
def monitor(dev):
mon = None
# We have to kill the udevadm process explicitly when Cockpit
# kills us. It will eventually exit on its own since its stdout
# will be closed when we exit, but that will only happen when it
# actually writes something.
def killmon():
if mon:
mon.terminate()
def sigexit(signo, stack):
killmon()
os._exit(0)
atexit.register(killmon)
signal.signal(signal.SIGTERM, sigexit)
signal.signal(signal.SIGINT, sigexit)
signal.signal(signal.SIGHUP, sigexit)
path = subprocess.check_output([ "udevadm", "info", "-q", "path", dev ]).rstrip(b"\n")
mon = subprocess.Popen([ "stdbuf", "-o", "L", "udevadm", "monitor", "-u", "-s", "block"],
stdout=subprocess.PIPE)
old_infos = info(dev)
sys.stdout.write(json.dumps(old_infos) + "\n")
sys.stdout.flush()
while True:
line = mon.stdout.readline()
if path in line:
new_infos = info(dev)
if new_infos != old_infos:
sys.stdout.write(json.dumps(new_infos) + "\n")
sys.stdout.flush()
old_infos = new_infos
monitor(sys.argv[1])
| moraleslazaro/cockpit | pkg/storaged/luksmeta-monitor-hack.py | Python | lgpl-2.1 | 5,420 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import glob
import os.path
import re
class Picard(Package):
"""Picard is a set of command line tools for manipulating high-throughput
sequencing (HTS) data and formats such as SAM/BAM/CRAM and VCF.
"""
homepage = "http://broadinstitute.github.io/picard/"
url = "https://github.com/broadinstitute/picard/releases/download/2.9.2/picard.jar"
_urlfmt = "https://github.com/broadinstitute/picard/releases/download/{0}/picard.jar"
_oldurlfmt = 'https://github.com/broadinstitute/picard/releases/download/{0}/picard-tools-{0}.zip'
# They started distributing a single jar file at v2.6.0, prior to
# that it was a .zip file with multiple .jar and .so files
version('2.18.3', '181b1b0731fd35f0d8bd44677d8787e9', expand=False)
version('2.18.0', '20045ff141e4a67512365f0b6bbd8229', expand=False)
version('2.17.0', '72cc527f1e4ca6a799ae0117af60b54e', expand=False)
version('2.16.0', 'fed8928b03bb36e355656f349e579083', expand=False)
version('2.15.0', '3f5751630b1a3449edda47a0712a64e4', expand=False)
version('2.13.2', '3d7b33fd1f43ad2129e6ec7883af56f5', expand=False)
version('2.10.0', '96f3c11b1c9be9fc8088bc1b7b9f7538', expand=False)
version('2.9.4', '5ce72af4d5efd02fba7084dcfbb3c7b3', expand=False)
version('2.9.3', '3a33c231bcf3a61870c3d44b3b183924', expand=False)
version('2.9.2', '0449279a6a89830917e8bcef3a976ef7', expand=False)
version('2.9.0', 'b711d492f16dfe0084d33e684dca2202', expand=False)
version('2.8.3', '4a181f55d378cd61d0b127a40dfd5016', expand=False)
version('2.6.0', '91f35f22977d9692ce2718270077dc50', expand=False)
version('1.140', '308f95516d94c1f3273a4e7e2b315ec2')
depends_on('java@8:', type='run')
def install(self, spec, prefix):
mkdirp(prefix.bin)
# The list of files to install varies with release...
# ... but skip the spack-{build.env}.out files.
files = [x for x in glob.glob("*") if not re.match("^spack-", x)]
for f in files:
install(f, prefix.bin)
# Set up a helper script to call java on the jar file,
# explicitly codes the path for java and the jar file.
script_sh = join_path(os.path.dirname(__file__), "picard.sh")
script = prefix.bin.picard
install(script_sh, script)
set_executable(script)
# Munge the helper script to explicitly point to java and the
# jar file.
java = self.spec['java'].prefix.bin.java
kwargs = {'ignore_absent': False, 'backup': False, 'string': False}
filter_file('^java', java, script, **kwargs)
filter_file('picard.jar', join_path(prefix.bin, 'picard.jar'),
script, **kwargs)
def setup_environment(self, spack_env, run_env):
"""The Picard docs suggest setting this as a convenience."""
run_env.prepend_path('PICARD',
join_path(self.prefix, 'bin', 'picard.jar'))
def url_for_version(self, version):
if version < Version('2.6.0'):
return self._oldurlfmt.format(version)
else:
return self._urlfmt.format(version)
| krafczyk/spack | var/spack/repos/builtin/packages/picard/package.py | Python | lgpl-2.1 | 4,400 |
import os
import sys
import time
from django.db import models
'''
@author: lbergesio,omoya,CarolinaFernandez
@organization: i2CAT, OFELIA FP7
Django RuleTable Model class
'''
#Django is required to run this model
class PolicyRuleTableModel(models.Model):
class Meta:
"""RuleTable model class"""
app_label = 'pypelib'
db_table = 'pypelib_RuleTableModel'
type = models.CharField(max_length = 16, default="") #terminal/non terminal
uuid = models.CharField(max_length = 512, default="") # uuid
name = models.TextField(max_length = 120, default="") # name
# FIXME: set 'name' to 'unique', but that seems only possible with 'CharField'
#name = models.CharField(max_length = 120, default="", unique=True) # name
defaultParser = models.CharField(max_length = 64, default="", blank =True, null =True)
defaultPersistence = models.CharField(max_length = 64, default="", blank =True, null =True)
defaultPersistenceFlag = models.BooleanField()
| fp7-ofelia/pypelib | src/pypelib/persistence/backends/django/RuleTableModel.py | Python | lgpl-3.0 | 1,069 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Common / shared code for handling authentication against OpenStack identity
service (Keystone).
"""
import sys
import datetime
from libcloud.utils.py3 import httplib
from libcloud.utils.iso8601 import parse_date
from libcloud.common.base import ConnectionUserAndKey, Response
from libcloud.compute.types import (LibcloudError, InvalidCredsError,
MalformedResponseError)
try:
import simplejson as json
except ImportError:
import json
AUTH_API_VERSION = '1.1'
# Auth versions which contain token expiration information.
AUTH_VERSIONS_WITH_EXPIRES = [
'1.1',
'2.0',
'2.0_apikey',
'2.0_password',
'3.0',
'3.x_password'
]
# How many seconds to subtract from the auth token expiration time before
# testing if the token is still valid.
# The time is subtracted to account for the HTTP request latency and prevent
# user from getting "InvalidCredsError" if token is about to expire.
AUTH_TOKEN_EXPIRES_GRACE_SECONDS = 5
__all__ = [
'OpenStackIdentityVersion',
'OpenStackIdentityDomain',
'OpenStackIdentityProject',
'OpenStackIdentityUser',
'OpenStackIdentityRole',
'OpenStackServiceCatalog',
'OpenStackServiceCatalogEntry',
'OpenStackServiceCatalogEntryEndpoint',
'OpenStackIdentityEndpointType',
'OpenStackIdentityConnection',
'OpenStackIdentity_1_0_Connection',
'OpenStackIdentity_1_1_Connection',
'OpenStackIdentity_2_0_Connection',
'OpenStackIdentity_3_0_Connection',
'get_class_for_auth_version'
]
class OpenStackIdentityEndpointType(object):
"""
Enum class for openstack identity endpoint type.
"""
INTERNAL = 'internal'
EXTERNAL = 'external'
ADMIN = 'admin'
class OpenStackIdentityTokenScope(object):
"""
Enum class for openstack identity token scope.
"""
PROJECT = 'project'
DOMAIN = 'domain'
UNSCOPED = 'unscoped'
class OpenStackIdentityVersion(object):
def __init__(self, version, status, updated, url):
self.version = version
self.status = status
self.updated = updated
self.url = url
def __repr__(self):
return (('<OpenStackIdentityVersion version=%s, status=%s, '
'updated=%s, url=%s>' %
(self.version, self.status, self.updated, self.url)))
class OpenStackIdentityDomain(object):
def __init__(self, id, name, enabled):
self.id = id
self.name = name
self.enabled = enabled
def __repr__(self):
return (('<OpenStackIdentityDomain id=%s, name=%s, enabled=%s>' %
(self.id, self.name, self.enabled)))
class OpenStackIdentityProject(object):
def __init__(self, id, name, description, enabled, domain_id=None):
self.id = id
self.name = name
self.description = description
self.enabled = enabled
self.domain_id = domain_id
def __repr__(self):
return (('<OpenStackIdentityProject id=%s, domain_id=%s, name=%s, '
'enabled=%s>' %
(self.id, self.domain_id, self.name, self.enabled)))
class OpenStackIdentityRole(object):
def __init__(self, id, name, description, enabled):
self.id = id
self.name = name
self.description = description
self.enabled = enabled
def __repr__(self):
return (('<OpenStackIdentityRole id=%s, name=%s, description=%s, '
'enabled=%s>' % (self.id, self.name, self.description,
self.enabled)))
class OpenStackIdentityUser(object):
def __init__(self, id, domain_id, name, email, description, enabled):
self.id = id
self.domain_id = domain_id
self.name = name
self.email = email
self.description = description
self.enabled = enabled
def __repr__(self):
return (('<OpenStackIdentityUser id=%s, domain_id=%s, name=%s, '
'email=%s, enabled=%s>' % (self.id, self.domain_id, self.name,
self.email, self.enabled)))
class OpenStackServiceCatalog(object):
"""
http://docs.openstack.org/api/openstack-identity-service/2.0/content/
This class should be instantiated with the contents of the
'serviceCatalog' in the auth response. This will do the work of figuring
out which services actually exist in the catalog as well as split them up
by type, name, and region if available
"""
_auth_version = None
_service_catalog = None
def __init__(self, service_catalog, auth_version=AUTH_API_VERSION):
self._auth_version = auth_version
# Check this way because there are a couple of different 2.0_*
# auth types.
if '3.x' in self._auth_version:
entries = self._parse_service_catalog_auth_v3(
service_catalog=service_catalog)
elif '2.0' in self._auth_version:
entries = self._parse_service_catalog_auth_v2(
service_catalog=service_catalog)
elif ('1.1' in self._auth_version) or ('1.0' in self._auth_version):
entries = self._parse_service_catalog_auth_v1(
service_catalog=service_catalog)
else:
raise LibcloudError('auth version "%s" not supported'
% (self._auth_version))
# Force consistent ordering by sorting the entries
entries = sorted(entries,
key=lambda x: x.service_type + (x.service_name or ''))
self._entries = entries # stories all the service catalog entries
def get_entries(self):
"""
Return all the entries for this service catalog.
:rtype: ``list`` of :class:`.OpenStackServiceCatalogEntry`
"""
return self._entries
def get_catalog(self):
"""
Deprecated in the favor of ``get_entries`` method.
"""
return self.get_entries()
def get_public_urls(self, service_type=None, name=None):
"""
Retrieve all the available public (external) URLs for the provided
service type and name.
"""
endpoints = self.get_endpoints(service_type=service_type,
name=name)
result = []
for endpoint in endpoints:
endpoint_type = endpoint.endpoint_type
if endpoint_type == OpenStackIdentityEndpointType.EXTERNAL:
result.append(endpoint.url)
return result
def get_endpoints(self, service_type=None, name=None):
"""
Retrieve all the endpoints for the provided service type and name.
:rtype: ``list`` of :class:`.OpenStackServiceCatalogEntryEndpoint`
"""
endpoints = []
for entry in self._entries:
# Note: "if XXX and YYY != XXX" comparison is used to support
# partial lookups.
# This allows user to pass in only one argument to the method (only
# service_type or name), both of them or neither.
if service_type and entry.service_type != service_type:
continue
if name and entry.service_name != name:
continue
for endpoint in entry.endpoints:
endpoints.append(endpoint)
return endpoints
def get_endpoint(self, service_type=None, name=None, region=None,
endpoint_type=OpenStackIdentityEndpointType.EXTERNAL):
"""
Retrieve a single endpoint using the provided criteria.
Note: If no or more than one matching endpoint is found, an exception
is thrown.
"""
endpoints = []
for entry in self._entries:
if service_type and entry.service_type != service_type:
continue
if name and entry.service_name != name:
continue
for endpoint in entry.endpoints:
if region and endpoint.region != region:
continue
if endpoint_type and endpoint.endpoint_type != endpoint_type:
continue
endpoints.append(endpoint)
if len(endpoints) == 1:
return endpoints[0]
elif len(endpoints) > 1:
raise ValueError('Found more than 1 matching endpoint')
else:
raise LibcloudError('Could not find specified endpoint')
def get_regions(self, service_type=None):
"""
Retrieve a list of all the available regions.
:param service_type: If specified, only return regions for this
service type.
:type service_type: ``str``
:rtype: ``list`` of ``str``
"""
regions = set()
for entry in self._entries:
if service_type and entry.service_type != service_type:
continue
for endpoint in entry.endpoints:
if endpoint.region:
regions.add(endpoint.region)
return sorted(list(regions))
def get_service_types(self, region=None):
"""
Retrieve all the available service types.
:param region: Optional region to retrieve service types for.
:type region: ``str``
:rtype: ``list`` of ``str``
"""
service_types = set()
for entry in self._entries:
include = True
for endpoint in entry.endpoints:
if region and endpoint.region != region:
include = False
break
if include:
service_types.add(entry.service_type)
return sorted(list(service_types))
def get_service_names(self, service_type=None, region=None):
"""
Retrieve list of service names that match service type and region.
:type service_type: ``str``
:type region: ``str``
:rtype: ``list`` of ``str``
"""
names = set()
if '2.0' not in self._auth_version:
raise ValueError('Unsupported version: %s' % (self._auth_version))
for entry in self._entries:
if service_type and entry.service_type != service_type:
continue
include = True
for endpoint in entry.endpoints:
if region and endpoint.region != region:
include = False
break
if include and entry.service_name:
names.add(entry.service_name)
return sorted(list(names))
def _parse_service_catalog_auth_v1(self, service_catalog):
entries = []
for service, endpoints in service_catalog.items():
entry_endpoints = []
for endpoint in endpoints:
region = endpoint.get('region', None)
public_url = endpoint.get('publicURL', None)
private_url = endpoint.get('internalURL', None)
if public_url:
entry_endpoint = OpenStackServiceCatalogEntryEndpoint(
region=region, url=public_url,
endpoint_type=OpenStackIdentityEndpointType.EXTERNAL)
entry_endpoints.append(entry_endpoint)
if private_url:
entry_endpoint = OpenStackServiceCatalogEntryEndpoint(
region=region, url=private_url,
endpoint_type=OpenStackIdentityEndpointType.INTERNAL)
entry_endpoints.append(entry_endpoint)
entry = OpenStackServiceCatalogEntry(service_type=service,
endpoints=entry_endpoints)
entries.append(entry)
return entries
def _parse_service_catalog_auth_v2(self, service_catalog):
entries = []
for service in service_catalog:
service_type = service['type']
service_name = service.get('name', None)
entry_endpoints = []
for endpoint in service.get('endpoints', []):
region = endpoint.get('region', None)
public_url = endpoint.get('publicURL', None)
private_url = endpoint.get('internalURL', None)
if public_url:
entry_endpoint = OpenStackServiceCatalogEntryEndpoint(
region=region, url=public_url,
endpoint_type=OpenStackIdentityEndpointType.EXTERNAL)
entry_endpoints.append(entry_endpoint)
if private_url:
entry_endpoint = OpenStackServiceCatalogEntryEndpoint(
region=region, url=private_url,
endpoint_type=OpenStackIdentityEndpointType.INTERNAL)
entry_endpoints.append(entry_endpoint)
entry = OpenStackServiceCatalogEntry(service_type=service_type,
endpoints=entry_endpoints,
service_name=service_name)
entries.append(entry)
return entries
def _parse_service_catalog_auth_v3(self, service_catalog):
entries = []
for item in service_catalog:
service_type = item['type']
service_name = item.get('name', None)
entry_endpoints = []
for endpoint in item['endpoints']:
region = endpoint.get('region', None)
url = endpoint['url']
endpoint_type = endpoint['interface']
if endpoint_type == 'internal':
endpoint_type = OpenStackIdentityEndpointType.INTERNAL
elif endpoint_type == 'public':
endpoint_type = OpenStackIdentityEndpointType.EXTERNAL
elif endpoint_type == 'admin':
endpoint_type = OpenStackIdentityEndpointType.ADMIN
entry_endpoint = OpenStackServiceCatalogEntryEndpoint(
region=region, url=url, endpoint_type=endpoint_type)
entry_endpoints.append(entry_endpoint)
entry = OpenStackServiceCatalogEntry(service_type=service_type,
service_name=service_name,
endpoints=entry_endpoints)
entries.append(entry)
return entries
class OpenStackServiceCatalogEntry(object):
def __init__(self, service_type, endpoints=None, service_name=None):
"""
:param service_type: Service type.
:type service_type: ``str``
:param endpoints: Endpoints belonging to this entry.
:type endpoints: ``list``
:param service_name: Optional service name.
:type service_name: ``str``
"""
self.service_type = service_type
self.endpoints = endpoints or []
self.service_name = service_name
# For consistency, sort the endpoints
self.endpoints = sorted(self.endpoints, key=lambda x: x.url or '')
def __eq__(self, other):
return (self.service_type == other.service_type and
self.endpoints == other.endpoints and
other.service_name == self.service_name)
def __ne__(self, other):
return not self.__eq__(other=other)
def __repr__(self):
return (('<OpenStackServiceCatalogEntry service_type=%s, '
'service_name=%s, endpoints=%s' %
(self.service_type, self.service_name, repr(self.endpoints))))
class OpenStackServiceCatalogEntryEndpoint(object):
VALID_ENDPOINT_TYPES = [
OpenStackIdentityEndpointType.INTERNAL,
OpenStackIdentityEndpointType.EXTERNAL,
OpenStackIdentityEndpointType.ADMIN,
]
def __init__(self, region, url, endpoint_type='external'):
"""
:param region: Endpoint region.
:type region: ``str``
:param url: Endpoint URL.
:type url: ``str``
:param endpoint_type: Endpoint type (external / internal / admin).
:type endpoint_type: ``str``
"""
if endpoint_type not in self.VALID_ENDPOINT_TYPES:
raise ValueError('Invalid type: %s' % (endpoint_type))
# TODO: Normalize / lowercase all the region names
self.region = region
self.url = url
self.endpoint_type = endpoint_type
def __eq__(self, other):
return (self.region == other.region and self.url == other.url and
self.endpoint_type == other.endpoint_type)
def __ne__(self, other):
return not self.__eq__(other=other)
def __repr__(self):
return (('<OpenStackServiceCatalogEntryEndpoint region=%s, url=%s, '
'type=%s' % (self.region, self.url, self.endpoint_type)))
class OpenStackAuthResponse(Response):
def success(self):
return self.status in [httplib.OK, httplib.CREATED,
httplib.ACCEPTED, httplib.NO_CONTENT,
httplib.MULTIPLE_CHOICES,
httplib.UNAUTHORIZED,
httplib.INTERNAL_SERVER_ERROR]
def parse_body(self):
if not self.body:
return None
if 'content-type' in self.headers:
key = 'content-type'
elif 'Content-Type' in self.headers:
key = 'Content-Type'
else:
raise LibcloudError('Missing content-type header',
driver=OpenStackIdentityConnection)
content_type = self.headers[key]
if content_type.find(';') != -1:
content_type = content_type.split(';')[0]
if content_type == 'application/json':
try:
data = json.loads(self.body)
except:
driver = OpenStackIdentityConnection
raise MalformedResponseError('Failed to parse JSON',
body=self.body,
driver=driver)
elif content_type == 'text/plain':
data = self.body
else:
data = self.body
return data
class OpenStackIdentityConnection(ConnectionUserAndKey):
"""
Base identity connection class which contains common / shared logic.
Note: This class shouldn't be instantiated directly.
"""
responseCls = OpenStackAuthResponse
timeout = None
auth_version = None
def __init__(self, auth_url, user_id, key, tenant_name=None,
domain_name='Default',
token_scope=OpenStackIdentityTokenScope.PROJECT,
timeout=None, parent_conn=None):
super(OpenStackIdentityConnection, self).__init__(user_id=user_id,
key=key,
url=auth_url,
timeout=timeout)
self.parent_conn = parent_conn
# enable tests to use the same mock connection classes.
if parent_conn:
self.conn_classes = parent_conn.conn_classes
self.driver = parent_conn.driver
else:
self.driver = None
self.auth_url = auth_url
self.tenant_name = tenant_name
self.domain_name = domain_name
self.token_scope = token_scope
self.timeout = timeout
self.urls = {}
self.auth_token = None
self.auth_token_expires = None
self.auth_user_info = None
def authenticated_request(self, action, params=None, data=None,
headers=None, method='GET', raw=False):
"""
Perform an authenticated request against the identity API.
"""
if not self.auth_token:
raise ValueError('Not to be authenticated to perform this request')
headers = headers or {}
headers['X-Auth-Token'] = self.auth_token
return self.request(action=action, params=params, data=data,
headers=headers, method=method, raw=raw)
def morph_action_hook(self, action):
(_, _, _, request_path) = self._tuple_from_url(self.auth_url)
if request_path == '':
# No path is provided in the auth_url, use action passed to this
# method.
return action
return request_path
def add_default_headers(self, headers):
headers['Accept'] = 'application/json'
headers['Content-Type'] = 'application/json; charset=UTF-8'
return headers
def is_token_valid(self):
"""
Return True if the current auth token is already cached and hasn't
expired yet.
:return: ``True`` if the token is still valid, ``False`` otherwise.
:rtype: ``bool``
"""
if not self.auth_token:
return False
if not self.auth_token_expires:
return False
expires = self.auth_token_expires - \
datetime.timedelta(seconds=AUTH_TOKEN_EXPIRES_GRACE_SECONDS)
time_tuple_expires = expires.utctimetuple()
time_tuple_now = datetime.datetime.utcnow().utctimetuple()
if time_tuple_now < time_tuple_expires:
return True
return False
def authenticate(self, force=False):
"""
Authenticate against the identity API.
:param force: Forcefully update the token even if it's already cached
and still valid.
:type force: ``bool``
"""
raise NotImplementedError('authenticate not implemented')
def list_supported_versions(self):
"""
Retrieve a list of all the identity versions which are supported by
this installation.
:rtype: ``list`` of :class:`.OpenStackIdentityVersion`
"""
response = self.request('/', method='GET')
result = self._to_versions(data=response.object['versions']['values'])
result = sorted(result, key=lambda x: x.version)
return result
def _to_versions(self, data):
result = []
for item in data:
version = self._to_version(data=item)
result.append(version)
return result
def _to_version(self, data):
try:
updated = parse_date(data['updated'])
except Exception:
updated = None
try:
url = data['links'][0]['href']
except IndexError:
url = None
version = OpenStackIdentityVersion(version=data['id'],
status=data['status'],
updated=updated,
url=url)
return version
def _is_authentication_needed(self, force=False):
"""
Determine if the authentication is needed or if the existing token (if
any exists) is still valid.
"""
if force:
return True
if self.auth_version not in AUTH_VERSIONS_WITH_EXPIRES:
return True
if self.is_token_valid():
return False
return True
def _to_projects(self, data):
result = []
for item in data:
project = self._to_project(data=item)
result.append(project)
return result
def _to_project(self, data):
project = OpenStackIdentityProject(id=data['id'],
name=data['name'],
description=data['description'],
enabled=data['enabled'],
domain_id=data.get('domain_id',
None))
return project
class OpenStackIdentity_1_0_Connection(OpenStackIdentityConnection):
"""
Connection class for Keystone API v1.0.
"""
responseCls = OpenStackAuthResponse
name = 'OpenStack Identity API v1.0'
auth_version = '1.0'
def authenticate(self, force=False):
if not self._is_authentication_needed(force=force):
return self
headers = {
'X-Auth-User': self.user_id,
'X-Auth-Key': self.key,
}
resp = self.request('/v1.0', headers=headers, method='GET')
if resp.status == httplib.UNAUTHORIZED:
# HTTP UNAUTHORIZED (401): auth failed
raise InvalidCredsError()
elif resp.status not in [httplib.NO_CONTENT, httplib.OK]:
body = 'code: %s body:%s headers:%s' % (resp.status,
resp.body,
resp.headers)
raise MalformedResponseError('Malformed response', body=body,
driver=self.driver)
else:
headers = resp.headers
# emulate the auth 1.1 URL list
self.urls = {}
self.urls['cloudServers'] = \
[{'publicURL': headers.get('x-server-management-url', None)}]
self.urls['cloudFilesCDN'] = \
[{'publicURL': headers.get('x-cdn-management-url', None)}]
self.urls['cloudFiles'] = \
[{'publicURL': headers.get('x-storage-url', None)}]
self.auth_token = headers.get('x-auth-token', None)
self.auth_user_info = None
if not self.auth_token:
raise MalformedResponseError('Missing X-Auth-Token in \
response headers')
return self
class OpenStackIdentity_1_1_Connection(OpenStackIdentityConnection):
"""
Connection class for Keystone API v1.1.
"""
responseCls = OpenStackAuthResponse
name = 'OpenStack Identity API v1.1'
auth_version = '1.1'
def authenticate(self, force=False):
if not self._is_authentication_needed(force=force):
return self
reqbody = json.dumps({'credentials': {'username': self.user_id,
'key': self.key}})
resp = self.request('/v1.1/auth', data=reqbody, headers={},
method='POST')
if resp.status == httplib.UNAUTHORIZED:
# HTTP UNAUTHORIZED (401): auth failed
raise InvalidCredsError()
elif resp.status != httplib.OK:
body = 'code: %s body:%s' % (resp.status, resp.body)
raise MalformedResponseError('Malformed response', body=body,
driver=self.driver)
else:
try:
body = json.loads(resp.body)
except Exception:
e = sys.exc_info()[1]
raise MalformedResponseError('Failed to parse JSON', e)
try:
expires = body['auth']['token']['expires']
self.auth_token = body['auth']['token']['id']
self.auth_token_expires = parse_date(expires)
self.urls = body['auth']['serviceCatalog']
self.auth_user_info = None
except KeyError:
e = sys.exc_info()[1]
raise MalformedResponseError('Auth JSON response is \
missing required elements', e)
return self
class OpenStackIdentity_2_0_Connection(OpenStackIdentityConnection):
"""
Connection class for Keystone API v2.0.
"""
responseCls = OpenStackAuthResponse
name = 'OpenStack Identity API v1.0'
auth_version = '2.0'
def authenticate(self, auth_type='api_key', force=False):
if not self._is_authentication_needed(force=force):
return self
if auth_type == 'api_key':
return self._authenticate_2_0_with_api_key()
elif auth_type == 'password':
return self._authenticate_2_0_with_password()
else:
raise ValueError('Invalid value for auth_type argument')
def _authenticate_2_0_with_api_key(self):
# API Key based authentication uses the RAX-KSKEY extension.
# http://s.apache.org/oAi
data = {'auth':
{'RAX-KSKEY:apiKeyCredentials':
{'username': self.user_id, 'apiKey': self.key}}}
if self.tenant_name:
data['auth']['tenantName'] = self.tenant_name
reqbody = json.dumps(data)
return self._authenticate_2_0_with_body(reqbody)
def _authenticate_2_0_with_password(self):
# Password based authentication is the only 'core' authentication
# method in Keystone at this time.
# 'keystone' - http://s.apache.org/e8h
data = {'auth':
{'passwordCredentials':
{'username': self.user_id, 'password': self.key}}}
if self.tenant_name:
data['auth']['tenantName'] = self.tenant_name
reqbody = json.dumps(data)
return self._authenticate_2_0_with_body(reqbody)
def _authenticate_2_0_with_body(self, reqbody):
resp = self.request('/v2.0/tokens', data=reqbody,
headers={'Content-Type': 'application/json'},
method='POST')
if resp.status == httplib.UNAUTHORIZED:
raise InvalidCredsError()
elif resp.status not in [httplib.OK,
httplib.NON_AUTHORITATIVE_INFORMATION]:
body = 'code: %s body: %s' % (resp.status, resp.body)
raise MalformedResponseError('Malformed response', body=body,
driver=self.driver)
else:
body = resp.object
try:
access = body['access']
expires = access['token']['expires']
self.auth_token = access['token']['id']
self.auth_token_expires = parse_date(expires)
self.urls = access['serviceCatalog']
self.auth_user_info = access.get('user', {})
except KeyError:
e = sys.exc_info()[1]
raise MalformedResponseError('Auth JSON response is \
missing required elements', e)
return self
def list_projects(self):
response = self.authenticated_request('/v2.0/tenants', method='GET')
result = self._to_projects(data=response.object['tenants'])
return result
def list_tenants(self):
return self.list_projects()
class OpenStackIdentity_3_0_Connection(OpenStackIdentityConnection):
"""
Connection class for Keystone API v3.x.
"""
responseCls = OpenStackAuthResponse
name = 'OpenStack Identity API v3.x'
auth_version = '3.0'
VALID_TOKEN_SCOPES = [
OpenStackIdentityTokenScope.PROJECT,
OpenStackIdentityTokenScope.DOMAIN,
OpenStackIdentityTokenScope.UNSCOPED
]
def __init__(self, auth_url, user_id, key, tenant_name=None,
domain_name='Default',
token_scope=OpenStackIdentityTokenScope.PROJECT,
timeout=None, parent_conn=None):
"""
:param tenant_name: Name of the project this user belongs to. Note:
When token_scope is set to project, this argument
control to which project to scope the token to.
:type tenant_name: ``str``
:param domain_name: Domain the user belongs to. Note: Then token_scope
is set to token, this argument controls to which
domain to scope the token to.
:type domain_name: ``str``
:param token_scope: Whether to scope a token to a "project", a
"domain" or "unscoped"
:type token_scope: ``str``
"""
super(OpenStackIdentity_3_0_Connection,
self).__init__(auth_url=auth_url,
user_id=user_id,
key=key,
tenant_name=tenant_name,
domain_name=domain_name,
token_scope=token_scope,
timeout=timeout,
parent_conn=parent_conn)
if self.token_scope not in self.VALID_TOKEN_SCOPES:
raise ValueError('Invalid value for "token_scope" argument: %s' %
(self.token_scope))
if (self.token_scope == OpenStackIdentityTokenScope.PROJECT and
(not self.tenant_name or not self.domain_name)):
raise ValueError('Must provide tenant_name and domain_name '
'argument')
elif (self.token_scope == OpenStackIdentityTokenScope.DOMAIN and
not self.domain_name):
raise ValueError('Must provide domain_name argument')
self.auth_user_roles = None
def authenticate(self, force=False):
"""
Perform authentication.
"""
if not self._is_authentication_needed(force=force):
return self
data = {
'auth': {
'identity': {
'methods': ['password'],
'password': {
'user': {
'domain': {
'name': self.domain_name
},
'name': self.user_id,
'password': self.key
}
}
}
}
}
if self.token_scope == OpenStackIdentityTokenScope.PROJECT:
# Scope token to project (tenant)
data['auth']['scope'] = {
'project': {
'domain': {
'name': self.domain_name
},
'name': self.tenant_name
}
}
elif self.token_scope == OpenStackIdentityTokenScope.DOMAIN:
# Scope token to domain
data['auth']['scope'] = {
'domain': {
'name': self.domain_name
}
}
elif self.token_scope == OpenStackIdentityTokenScope.UNSCOPED:
pass
else:
raise ValueError('Token needs to be scoped either to project or '
'a domain')
data = json.dumps(data)
response = self.request('/v3/auth/tokens', data=data,
headers={'Content-Type': 'application/json'},
method='POST')
if response.status == httplib.UNAUTHORIZED:
# Invalid credentials
raise InvalidCredsError()
elif response.status in [httplib.OK, httplib.CREATED]:
headers = response.headers
try:
body = json.loads(response.body)
except Exception:
e = sys.exc_info()[1]
raise MalformedResponseError('Failed to parse JSON', e)
try:
roles = self._to_roles(body['token']['roles'])
except Exception:
e = sys.exc_info()[1]
roles = []
try:
expires = body['token']['expires_at']
self.auth_token = headers['x-subject-token']
self.auth_token_expires = parse_date(expires)
# Note: catalog is not returned for unscoped tokens
self.urls = body['token'].get('catalog', None)
self.auth_user_info = None
self.auth_user_roles = roles
except KeyError:
e = sys.exc_info()[1]
raise MalformedResponseError('Auth JSON response is \
missing required elements', e)
body = 'code: %s body:%s' % (response.status, response.body)
else:
raise MalformedResponseError('Malformed response', body=body,
driver=self.driver)
return self
def list_domains(self):
"""
List the available domains.
:rtype: ``list`` of :class:`OpenStackIdentityDomain`
"""
response = self.authenticated_request('/v3/domains', method='GET')
result = self._to_domains(data=response.object['domains'])
return result
def list_projects(self):
"""
List the available projects.
Note: To perform this action, user you are currently authenticated with
needs to be an admin.
:rtype: ``list`` of :class:`OpenStackIdentityProject`
"""
response = self.authenticated_request('/v3/projects', method='GET')
result = self._to_projects(data=response.object['projects'])
return result
def list_users(self):
"""
List the available users.
:rtype: ``list`` of :class:`.OpenStackIdentityUser`
"""
response = self.authenticated_request('/v3/users', method='GET')
result = self._to_users(data=response.object['users'])
return result
def list_roles(self):
"""
List the available roles.
:rtype: ``list`` of :class:`.OpenStackIdentityRole`
"""
response = self.authenticated_request('/v3/roles', method='GET')
result = self._to_roles(data=response.object['roles'])
return result
def get_domain(self, domain_id):
"""
Retrieve information about a single domain.
:param domain_id: ID of domain to retrieve information for.
:type domain_id: ``str``
:rtype: :class:`.OpenStackIdentityDomain`
"""
response = self.authenticated_request('/v3/domains/%s' % (domain_id),
method='GET')
result = self._to_domain(data=response.object['domain'])
return result
def list_user_projects(self, user):
"""
Retrieve all the projects user belongs to.
:rtype: ``list`` of :class:`.OpenStackIdentityProject`
"""
path = '/v3/users/%s/projects' % (user.id)
response = self.authenticated_request(path, method='GET')
result = self._to_projects(data=response.object['projects'])
return result
def list_user_domain_roles(self, domain, user):
"""
Retrieve all the roles for a particular user on a domain.
:rtype: ``list`` of :class:`.OpenStackIdentityRole`
"""
# TODO: Also add "get users roles" and "get assginements" which are
# available in 3.1 and 3.3
path = '/v3/domains/%s/users/%s/roles' % (domain.id, user.id)
response = self.authenticated_request(path, method='GET')
result = self._to_roles(data=response.object['roles'])
return result
def grant_domain_role_to_user(self, domain, role, user):
"""
Grant domain role to a user.
Note: This function appears to be idempotent.
:param domain: Domain to grant the role to.
:type domain: :class:`.OpenStackIdentityDomain`
:param role: Role to grant.
:type role: :class:`.OpenStackIdentityRole`
:param user: User to grant the role to.
:type user: :class:`.OpenStackIdentityUser`
:return: ``True`` on success.
:rtype: ``bool``
"""
path = ('/v3/domains/%s/users/%s/roles/%s' %
(domain.id, user.id, role.id))
response = self.authenticated_request(path, method='PUT')
return response.status == httplib.NO_CONTENT
def revoke_domain_role_from_user(self, domain, user, role):
"""
Revoke domain role from a user.
:param domain: Domain to revoke the role from.
:type domain: :class:`.OpenStackIdentityDomain`
:param role: Role to revoke.
:type role: :class:`.OpenStackIdentityRole`
:param user: User to revoke the role from.
:type user: :class:`.OpenStackIdentityUser`
:return: ``True`` on success.
:rtype: ``bool``
"""
path = ('/v3/domains/%s/users/%s/roles/%s' %
(domain.id, user.id, role.id))
response = self.authenticated_request(path, method='DELETE')
return response.status == httplib.NO_CONTENT
def grant_project_role_to_user(self, project, role, user):
"""
Grant project role to a user.
Note: This function appears to be idempotent.
:param project: Project to grant the role to.
:type project: :class:`.OpenStackIdentityDomain`
:param role: Role to grant.
:type role: :class:`.OpenStackIdentityRole`
:param user: User to grant the role to.
:type user: :class:`.OpenStackIdentityUser`
:return: ``True`` on success.
:rtype: ``bool``
"""
path = ('/v3/projects/%s/users/%s/roles/%s' %
(project.id, user.id, role.id))
response = self.authenticated_request(path, method='PUT')
return response.status == httplib.NO_CONTENT
def revoke_project_role_from_user(self, project, role, user):
"""
Revoke project role from a user.
:param project: Project to revoke the role from.
:type project: :class:`.OpenStackIdentityDomain`
:param role: Role to revoke.
:type role: :class:`.OpenStackIdentityRole`
:param user: User to revoke the role from.
:type user: :class:`.OpenStackIdentityUser`
:return: ``True`` on success.
:rtype: ``bool``
"""
path = ('/v3/projects/%s/users/%s/roles/%s' %
(project.id, user.id, role.id))
response = self.authenticated_request(path, method='DELETE')
return response.status == httplib.NO_CONTENT
def create_user(self, email, password, name, description=None,
domain_id=None, default_project_id=None, enabled=True):
"""
Create a new user account.
:param email: User's mail address.
:type email: ``str``
:param password: User's password.
:type password: ``str``
:param name: User's name.
:type name: ``str``
:param description: Optional description.
:type description: ``str``
:param domain_id: ID of the domain to add the user to (optional).
:type domain_id: ``str``
:param default_project_id: ID of the default user project (optional).
:type default_project_id: ``str``
:param enabled: True to enable user after creation.
:type enabled: ``bool``
:return: Created user.
:rtype: :class:`.OpenStackIdentityUser`
"""
data = {
'email': email,
'password': password,
'name': name,
'enabled': enabled
}
if description:
data['description'] = description
if domain_id:
data['domain_id'] = domain_id
if default_project_id:
data['default_project_id'] = default_project_id
data = json.dumps({'user': data})
response = self.authenticated_request('/v3/users', data=data,
method='POST')
user = self._to_user(data=response.object['user'])
return user
def enable_user(self, user):
"""
Enable user account.
Note: This operation appears to be idempotent.
:param user: User to enable.
:type user: :class:`.OpenStackIdentityUser`
:return: User account which has been enabled.
:rtype: :class:`.OpenStackIdentityUser`
"""
data = {
'enabled': True
}
data = json.dumps({'user': data})
response = self.authenticated_request('/v3/users/%s' % (user.id),
data=data,
method='PATCH')
user = self._to_user(data=response.object['user'])
return user
def disable_user(self, user):
"""
Disable user account.
Note: This operation appears to be idempotent.
:param user: User to disable.
:type user: :class:`.OpenStackIdentityUser`
:return: User account which has been disabled.
:rtype: :class:`.OpenStackIdentityUser`
"""
data = {
'enabled': False
}
data = json.dumps({'user': data})
response = self.authenticated_request('/v3/users/%s' % (user.id),
data=data,
method='PATCH')
user = self._to_user(data=response.object['user'])
return user
def _to_domains(self, data):
result = []
for item in data:
domain = self._to_domain(data=item)
result.append(domain)
return result
def _to_domain(self, data):
domain = OpenStackIdentityDomain(id=data['id'],
name=data['name'],
enabled=data['enabled'])
return domain
def _to_users(self, data):
result = []
for item in data:
user = self._to_user(data=item)
result.append(user)
return result
def _to_user(self, data):
user = OpenStackIdentityUser(id=data['id'],
domain_id=data['domain_id'],
name=data['name'],
email=data['email'],
description=data.get('description',
None),
enabled=data['enabled'])
return user
def _to_roles(self, data):
result = []
for item in data:
user = self._to_role(data=item)
result.append(user)
return result
def _to_role(self, data):
role = OpenStackIdentityRole(id=data['id'],
name=data['name'],
description=data.get('description',
None),
enabled=data.get('enabled', True))
return role
def get_class_for_auth_version(auth_version):
"""
Retrieve class for the provided auth version.
"""
if auth_version == '1.0':
cls = OpenStackIdentity_1_0_Connection
elif auth_version == '1.1':
cls = OpenStackIdentity_1_1_Connection
elif auth_version == '2.0' or auth_version == '2.0_apikey':
cls = OpenStackIdentity_2_0_Connection
elif auth_version == '2.0_password':
cls = OpenStackIdentity_2_0_Connection
elif auth_version == '3.x_password':
cls = OpenStackIdentity_3_0_Connection
else:
raise LibcloudError('Unsupported Auth Version requested')
return cls
| wrigri/libcloud | libcloud/common/openstack_identity.py | Python | apache-2.0 | 48,080 |
# -*- coding: utf-8 -*-
#
# Copyright 2017 Ricequant, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from rqalpha.utils.logger import system_log
def import_mod(mod_name):
try:
from importlib import import_module
return import_module(mod_name)
except Exception as e:
system_log.error("*" * 30)
system_log.error("Mod Import Error: {}, error: {}", mod_name, e)
system_log.error("*" * 30)
raise
| xclxxl414/rqalpha | rqalpha/utils/package_helper.py | Python | apache-2.0 | 953 |
# Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib import constants as p_const
# Special vlan_tci value indicating flat network
FLAT_VLAN_TCI = '0x0000/0x1fff'
# Topic for tunnel notifications between the plugin and agent
TUNNEL = 'tunnel'
# Name prefixes for veth device or patch port pair linking the integration
# bridge with the physical bridge for a physical network
PEER_INTEGRATION_PREFIX = 'int-'
PEER_PHYSICAL_PREFIX = 'phy-'
# Nonexistent peer used to create patch ports without associating them, it
# allows to define flows before association
NONEXISTENT_PEER = 'nonexistent-peer'
# The different types of tunnels
TUNNEL_NETWORK_TYPES = [p_const.TYPE_GRE, p_const.TYPE_VXLAN,
p_const.TYPE_GENEVE]
# --- OpenFlow table IDs
# --- Integration bridge (int_br)
LOCAL_SWITCHING = 0
# The pyhsical network types of support DVR router
DVR_PHYSICAL_NETWORK_TYPES = [p_const.TYPE_VLAN, p_const.TYPE_FLAT]
# Various tables for DVR use of integration bridge flows
DVR_TO_SRC_MAC = 1
DVR_TO_SRC_MAC_PHYSICAL = 2
ARP_DVR_MAC_TO_DST_MAC = 3
ARP_DVR_MAC_TO_DST_MAC_PHYSICAL = 4
CANARY_TABLE = 23
# Table for ARP poison/spoofing prevention rules
ARP_SPOOF_TABLE = 24
# Table for MAC spoof filtering
MAC_SPOOF_TABLE = 25
LOCAL_EGRESS_TABLE = 30
LOCAL_IP_TABLE = 31
# packet rate limit table
PACKET_RATE_LIMIT = 59
# Table to decide whether further filtering is needed
TRANSIENT_TABLE = 60
LOCAL_MAC_DIRECT = 61
TRANSIENT_EGRESS_TABLE = 62
# Table for DHCP
DHCP_IPV4_TABLE = 77
DHCP_IPV6_TABLE = 78
# Tables used for ovs firewall
BASE_EGRESS_TABLE = 71
RULES_EGRESS_TABLE = 72
ACCEPT_OR_INGRESS_TABLE = 73
BASE_INGRESS_TABLE = 81
RULES_INGRESS_TABLE = 82
OVS_FIREWALL_TABLES = (
BASE_EGRESS_TABLE,
RULES_EGRESS_TABLE,
ACCEPT_OR_INGRESS_TABLE,
BASE_INGRESS_TABLE,
RULES_INGRESS_TABLE,
)
# Tables for parties interacting with ovs firewall
ACCEPTED_EGRESS_TRAFFIC_TABLE = 91
ACCEPTED_INGRESS_TRAFFIC_TABLE = 92
DROPPED_TRAFFIC_TABLE = 93
ACCEPTED_EGRESS_TRAFFIC_NORMAL_TABLE = 94
INT_BR_ALL_TABLES = (
LOCAL_SWITCHING,
DVR_TO_SRC_MAC,
DVR_TO_SRC_MAC_PHYSICAL,
CANARY_TABLE,
ARP_SPOOF_TABLE,
MAC_SPOOF_TABLE,
LOCAL_MAC_DIRECT,
LOCAL_EGRESS_TABLE,
LOCAL_IP_TABLE,
PACKET_RATE_LIMIT,
TRANSIENT_TABLE,
TRANSIENT_EGRESS_TABLE,
BASE_EGRESS_TABLE,
RULES_EGRESS_TABLE,
ACCEPT_OR_INGRESS_TABLE,
DHCP_IPV4_TABLE,
DHCP_IPV6_TABLE,
BASE_INGRESS_TABLE,
RULES_INGRESS_TABLE,
ACCEPTED_EGRESS_TRAFFIC_TABLE,
ACCEPTED_INGRESS_TRAFFIC_TABLE,
DROPPED_TRAFFIC_TABLE)
# --- Tunnel bridge (tun_br)
# Various tables for tunneling flows
DVR_PROCESS = 1
PATCH_LV_TO_TUN = 2
GRE_TUN_TO_LV = 3
VXLAN_TUN_TO_LV = 4
GENEVE_TUN_TO_LV = 6
DVR_NOT_LEARN = 9
LEARN_FROM_TUN = 10
UCAST_TO_TUN = 20
ARP_RESPONDER = 21
FLOOD_TO_TUN = 22
# NOTE(vsaienko): transit table used by networking-bagpipe driver to
# mirror traffic to EVPN and standard tunnels to gateway nodes
BAGPIPE_FLOOD_TO_TUN_BROADCAST = 222
TUN_BR_ALL_TABLES = (
LOCAL_SWITCHING,
DVR_PROCESS,
PATCH_LV_TO_TUN,
GRE_TUN_TO_LV,
VXLAN_TUN_TO_LV,
GENEVE_TUN_TO_LV,
DVR_NOT_LEARN,
LEARN_FROM_TUN,
UCAST_TO_TUN,
ARP_RESPONDER,
FLOOD_TO_TUN)
# --- Physical Bridges (phys_brs)
# Various tables for DVR use of physical bridge flows
DVR_PROCESS_PHYSICAL = 1
LOCAL_VLAN_TRANSLATION = 2
DVR_NOT_LEARN_PHYSICAL = 3
PHY_BR_ALL_TABLES = (
LOCAL_SWITCHING,
DVR_PROCESS_PHYSICAL,
LOCAL_VLAN_TRANSLATION,
DVR_NOT_LEARN_PHYSICAL)
# --- end of OpenFlow table IDs
# type for ARP reply in ARP header
ARP_REPLY = '0x2'
# Map tunnel types to tables number
TUN_TABLE = {p_const.TYPE_GRE: GRE_TUN_TO_LV,
p_const.TYPE_VXLAN: VXLAN_TUN_TO_LV,
p_const.TYPE_GENEVE: GENEVE_TUN_TO_LV}
# The default respawn interval for the ovsdb monitor
DEFAULT_OVSDBMON_RESPAWN = 30
# Represent invalid OF Port
OFPORT_INVALID = -1
ARP_RESPONDER_ACTIONS = ('move:NXM_OF_ETH_SRC[]->NXM_OF_ETH_DST[],'
'mod_dl_src:%(mac)s,'
'load:0x2->NXM_OF_ARP_OP[],'
'move:NXM_NX_ARP_SHA[]->NXM_NX_ARP_THA[],'
'move:NXM_OF_ARP_SPA[]->NXM_OF_ARP_TPA[],'
'load:%(mac)#x->NXM_NX_ARP_SHA[],'
'load:%(ip)#x->NXM_OF_ARP_SPA[],'
'in_port')
# Represent ovs status
OVS_RESTARTED = 0
OVS_NORMAL = 1
OVS_DEAD = 2
EXTENSION_DRIVER_TYPE = 'ovs'
# ovs datapath types
OVS_DATAPATH_SYSTEM = 'system'
OVS_DATAPATH_NETDEV = 'netdev'
OVS_DPDK_VHOST_USER = 'dpdkvhostuser'
OVS_DPDK_VHOST_USER_CLIENT = 'dpdkvhostuserclient'
OVS_DPDK_PORT_TYPES = [OVS_DPDK_VHOST_USER, OVS_DPDK_VHOST_USER_CLIENT]
# default ovs vhost-user socket location
VHOST_USER_SOCKET_DIR = '/var/run/openvswitch'
MAX_DEVICE_RETRIES = 5
# OpenFlow version constants
OPENFLOW10 = "OpenFlow10"
OPENFLOW11 = "OpenFlow11"
OPENFLOW12 = "OpenFlow12"
OPENFLOW13 = "OpenFlow13"
OPENFLOW14 = "OpenFlow14"
OPENFLOW15 = "OpenFlow15"
OPENFLOW_MAX_PRIORITY = 65535
# A placeholder for dead vlans.
DEAD_VLAN_TAG = p_const.MAX_VLAN_TAG + 1
# callback resource for setting 'bridge_name' in the 'binding:vif_details'
OVS_BRIDGE_NAME = 'ovs_bridge_name'
# callback resource for notifying to ovsdb handler
OVSDB_RESOURCE = 'ovsdb'
# Used in ovs port 'external_ids' in order mark it for no cleanup when
# ovs_cleanup script is used.
SKIP_CLEANUP = 'skip_cleanup'
| mahak/neutron | neutron/plugins/ml2/drivers/openvswitch/agent/common/constants.py | Python | apache-2.0 | 6,043 |
from __future__ import absolute_import, division, print_function, unicode_literals
CASH_TRANSACTION_TYPES = {'Cashflow', 'Coupon', 'Dividend', 'Payment'}
TRANSACTION_TYPES = {'Allocation', 'Block', 'Exercise', 'Expiry', 'Journal', 'Maturity', 'Net',
'Novation', 'Split', 'Trade', 'Transfer'} | CASH_TRANSACTION_TYPES
TRANSACTION_INVESTOR_ACTIONS = {'Subscription', 'Redemption'}
TRANSACTION_LIFECYCLE_ACTIONS = {'Acquire', 'Remove'}
TRANSACTION_ACTIONS = {'Buy', 'Sell', 'Short Sell', 'Deliver', 'Receive'} | TRANSACTION_LIFECYCLE_ACTIONS | \
TRANSACTION_INVESTOR_ACTIONS
TRANSACTION_CANCEL_STATUSES = {'Cancelled', 'Netted', 'Novated'}
TRANSACTION_STATUSES = {'New', 'Amended', 'Superseded'} | TRANSACTION_CANCEL_STATUSES
| nedlowe/amaas-core-sdk-python | amaascore/transactions/enums.py | Python | apache-2.0 | 767 |
# coding: utf-8
"""
OpenAPI spec version:
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1ReplicaSetSpec(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
operations = [
]
# The key is attribute name
# and the value is attribute type.
swagger_types = {
'replicas': 'int',
'selector': 'V1beta1LabelSelector',
'template': 'V1PodTemplateSpec'
}
# The key is attribute name
# and the value is json key in definition.
attribute_map = {
'replicas': 'replicas',
'selector': 'selector',
'template': 'template'
}
def __init__(self, replicas=None, selector=None, template=None):
"""
V1beta1ReplicaSetSpec - a model defined in Swagger
"""
self._replicas = replicas
self._selector = selector
self._template = template
@property
def replicas(self):
"""
Gets the replicas of this V1beta1ReplicaSetSpec.
Replicas is the number of desired replicas. This is a pointer to distinguish between explicit zero and unspecified. Defaults to 1. More info: http://releases.k8s.io/release-1.2/docs/user-guide/replication-controller.md#what-is-a-replication-controller
:return: The replicas of this V1beta1ReplicaSetSpec.
:rtype: int
"""
return self._replicas
@replicas.setter
def replicas(self, replicas):
"""
Sets the replicas of this V1beta1ReplicaSetSpec.
Replicas is the number of desired replicas. This is a pointer to distinguish between explicit zero and unspecified. Defaults to 1. More info: http://releases.k8s.io/release-1.2/docs/user-guide/replication-controller.md#what-is-a-replication-controller
:param replicas: The replicas of this V1beta1ReplicaSetSpec.
:type: int
"""
self._replicas = replicas
@property
def selector(self):
"""
Gets the selector of this V1beta1ReplicaSetSpec.
Selector is a label query over pods that should match the replica count. If the selector is empty, it is defaulted to the labels present on the pod template. Label keys and values that must match in order to be controlled by this replica set. More info: http://releases.k8s.io/release-1.2/docs/user-guide/labels.md#label-selectors
:return: The selector of this V1beta1ReplicaSetSpec.
:rtype: V1beta1LabelSelector
"""
return self._selector
@selector.setter
def selector(self, selector):
"""
Sets the selector of this V1beta1ReplicaSetSpec.
Selector is a label query over pods that should match the replica count. If the selector is empty, it is defaulted to the labels present on the pod template. Label keys and values that must match in order to be controlled by this replica set. More info: http://releases.k8s.io/release-1.2/docs/user-guide/labels.md#label-selectors
:param selector: The selector of this V1beta1ReplicaSetSpec.
:type: V1beta1LabelSelector
"""
self._selector = selector
@property
def template(self):
"""
Gets the template of this V1beta1ReplicaSetSpec.
Template is the object that describes the pod that will be created if insufficient replicas are detected. More info: http://releases.k8s.io/release-1.2/docs/user-guide/replication-controller.md#pod-template
:return: The template of this V1beta1ReplicaSetSpec.
:rtype: V1PodTemplateSpec
"""
return self._template
@template.setter
def template(self, template):
"""
Sets the template of this V1beta1ReplicaSetSpec.
Template is the object that describes the pod that will be created if insufficient replicas are detected. More info: http://releases.k8s.io/release-1.2/docs/user-guide/replication-controller.md#pod-template
:param template: The template of this V1beta1ReplicaSetSpec.
:type: V1PodTemplateSpec
"""
self._template = template
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(V1beta1ReplicaSetSpec.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| detiber/lib_openshift | lib_openshift/models/v1beta1_replica_set_spec.py | Python | apache-2.0 | 6,193 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from cs.CsConfig import CsConfig
config = CsConfig()
logging.basicConfig(filename=config.get_logger(),
level=config.get_level(),
format=config.get_format())
| GabrielBrascher/cloudstack | systemvm/debian/opt/cloud/bin/cs/__init__.py | Python | apache-2.0 | 1,000 |
# -*- coding: utf-8 -*-
'''
Created on Apr 27, 2016
@author: Aaron Ponti
'''
from ch.systemsx.cisd.openbis.dss.etl.dto.api.impl import MaximumIntensityProjectionGenerationAlgorithm
class GenericTIFFSeriesMaximumIntensityProjectionGenerationAlgorithm(MaximumIntensityProjectionGenerationAlgorithm):
'''
Custom MaximumIntensityProjectionGenerationAlgorithm for Generic TIFF Series
that makes sure that the first timepoint in a series is registered for
creation of the representative thumbnail.
'''
def __init__(self, datasetTypeCode, width, height, filename):
"""
Constructor
"""
# Call the parent base constructor
MaximumIntensityProjectionGenerationAlgorithm.__init__(self,
datasetTypeCode, width, height, filename)
def imageToBeIgnored(self, image):
"""
Overrides the parent imageToBeIgnored method. The selection of which
series should be used to create the representative thumbnail is done
in GenericTIFFSeriesCompositeDatasetConfig. Here we prevent the base
MaximumIntensityProjectionGenerationAlgorithm.imageToBeIgnored() method
to make a decision based on the timepoint (== 0), since we cannot know
which is the first time point in a Generic TIFF Series.
"""
return False
| aarpon/obit_microscopy_core_technology | core-plugins/microscopy/3/dss/drop-boxes/MicroscopyDropbox/GenericTIFFSeriesMaximumIntensityProjectionGenerationAlgorithm.py | Python | apache-2.0 | 1,343 |
"""Core functions used by the Thunder streaming feeder scripts, including asynchronous checking for new files.
"""
import errno
import os
import time
from thunder_streaming.feeder.utils.filenames import getFilenamePostfix, getFilenamePrefix
from thunder_streaming.feeder.utils.logger import global_logger
from thunder_streaming.feeder.utils.regex import RegexMatchToQueueName, RegexMatchToTimepointString
from thunder_streaming.feeder.utils.updating_walk import updating_walk as uw
def file_check_generator(source_dir, mod_buffer_time, max_files=-1, filename_predicate=None):
"""Generator function that polls the passed directory tree for new files, using the updating_walk.py logic.
This generator will restart the underlying updating_walk at the last seen file if the updating walk runs
out of available files.
"""
next_batch_file, walker_restart_file = None, None
walker = uw(source_dir, filefilterfunc=filename_predicate)
while True:
filebatch = []
files_left = max_files
try:
if not next_batch_file:
next_batch_file = next(walker)
walker_restart_file = next_batch_file
delta = time.time() - os.stat(next_batch_file).st_mtime
while delta > mod_buffer_time and files_left:
filebatch.append(next_batch_file)
files_left -= 1
next_batch_file = None # reset in case of exception on next line
next_batch_file = next(walker)
delta = time.time() - os.stat(next_batch_file).st_mtime
walker_restart_file = next_batch_file
except StopIteration:
# no files left, restart after polling interval
if not filebatch:
global_logger.get().info("Out of files, waiting...")
walker = uw(source_dir, walker_restart_file, filefilterfunc=filename_predicate)
yield filebatch
def build_filecheck_generators(source_dir_or_dirs, mod_buffer_time, max_files=-1, filename_predicate=None):
if isinstance(source_dir_or_dirs, basestring):
source_dirs = [source_dir_or_dirs]
else:
source_dirs = source_dir_or_dirs
file_checkers = [file_check_generator(source_dir, mod_buffer_time,
max_files=max_files, filename_predicate=filename_predicate)
for source_dir in source_dirs]
return file_checkers
def runloop(file_checkers, feeder, poll_time):
""" Main program loop. This will check for new files in the passed input directories using file_check_generator,
push any new files found into the passed Feeder subclass via its feed() method, wait for poll_time,
and repeat forever.
"""
last_time = time.time()
while True:
for file_checker in file_checkers:
# this should never throw StopIteration, will just yield an empty list if nothing is avail:
filebatch = feeder.feed(next(file_checker))
if filebatch:
global_logger.get().info("Pushed %d files, last: %s", len(filebatch), os.path.basename(filebatch[-1]))
removedfiles = feeder.clean()
if removedfiles:
global_logger.get().info("Removed %d temp files, last: %s", len(removedfiles), os.path.basename(removedfiles[-1]))
next_time = last_time + poll_time
try:
time.sleep(next_time - time.time())
except IOError, e:
if e.errno == errno.EINVAL:
# passed a negative number, which is fine, just don't sleep
pass
else:
raise e
last_time = next_time
def get_parsing_functions(opts):
if opts.prefix_regex_file:
fname_to_qname_fcn = RegexMatchToQueueName.fromFile(opts.prefix_regex_file).queueName
else:
fname_to_qname_fcn = getFilenamePrefix
if opts.timepoint_regex_file:
fname_to_timepoint_fcn = RegexMatchToTimepointString.fromFile(opts.timepoint_regex_file).timepoint
else:
fname_to_timepoint_fcn = getFilenamePostfix
return fname_to_qname_fcn, fname_to_timepoint_fcn | andrewosh/thunder-streaming | python/thunder_streaming/feeder/core.py | Python | apache-2.0 | 4,148 |
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glanceclient.common import progressbar
from glanceclient.common import utils
from glanceclient import exc
import json
import os
from os.path import expanduser
IMAGE_SCHEMA = None
def get_image_schema():
global IMAGE_SCHEMA
if IMAGE_SCHEMA is None:
schema_path = expanduser("~/.glanceclient/image_schema.json")
if os.path.exists(schema_path) and os.path.isfile(schema_path):
with file(schema_path, "r") as f:
schema_raw = f.read()
IMAGE_SCHEMA = json.loads(schema_raw)
return IMAGE_SCHEMA
@utils.schema_args(get_image_schema)
@utils.arg('--property', metavar="<key=value>", action='append',
default=[], help=('Arbitrary property to associate with image.'
' May be used multiple times.'))
def do_image_create(gc, args):
"""Create a new image."""
schema = gc.schemas.get("image")
_args = [(x[0].replace('-', '_'), x[1]) for x in vars(args).items()]
fields = dict(filter(lambda x: x[1] is not None and
(x[0] == 'property' or
schema.is_core_property(x[0])),
_args))
raw_properties = fields.pop('property', [])
for datum in raw_properties:
key, value = datum.split('=', 1)
fields[key] = value
image = gc.images.create(**fields)
ignore = ['self', 'access', 'file', 'schema']
image = dict([item for item in image.iteritems()
if item[0] not in ignore])
utils.print_dict(image)
@utils.arg('id', metavar='<IMAGE_ID>', help='ID of image to update.')
@utils.schema_args(get_image_schema, omit=['id'])
@utils.arg('--property', metavar="<key=value>", action='append',
default=[], help=('Arbitrary property to associate with image.'
' May be used multiple times.'))
@utils.arg('--remove-property', metavar="key", action='append', default=[],
help="Name of arbitrary property to remove from the image")
def do_image_update(gc, args):
"""Update an existing image."""
schema = gc.schemas.get("image")
_args = [(x[0].replace('-', '_'), x[1]) for x in vars(args).items()]
fields = dict(filter(lambda x: x[1] is not None and
(x[0] in ['property', 'remove_property'] or
schema.is_core_property(x[0])),
_args))
raw_properties = fields.pop('property', [])
for datum in raw_properties:
key, value = datum.split('=', 1)
fields[key] = value
remove_properties = fields.pop('remove_property', None)
image_id = fields.pop('id')
image = gc.images.update(image_id, remove_properties, **fields)
ignore = ['self', 'access', 'file', 'schema']
image = dict([item for item in image.iteritems()
if item[0] not in ignore])
utils.print_dict(image)
@utils.arg('--page-size', metavar='<SIZE>', default=None, type=int,
help='Number of images to request in each paginated request.')
@utils.arg('--visibility', metavar='<VISIBILITY>',
help='The visibility of the images to display.')
@utils.arg('--member-status', metavar='<MEMBER_STATUS>',
help='The status of images to display.')
@utils.arg('--owner', metavar='<OWNER>',
help='Display images owned by <OWNER>.')
@utils.arg('--checksum', metavar='<CHECKSUM>',
help='Display images matching the checksum')
@utils.arg('--tag', metavar='<TAG>', action='append',
help="Filter images by an user-defined tag.")
def do_image_list(gc, args):
"""List images you can access."""
filter_keys = ['visibility', 'member_status', 'owner', 'checksum', 'tag']
filter_items = [(key, getattr(args, key)) for key in filter_keys]
filters = dict([item for item in filter_items if item[1] is not None])
kwargs = {'filters': filters}
if args.page_size is not None:
kwargs['page_size'] = args.page_size
images = gc.images.list(**kwargs)
columns = ['ID', 'Name']
utils.print_list(images, columns)
@utils.arg('id', metavar='<IMAGE_ID>', help='ID of image to describe.')
def do_image_show(gc, args):
"""Describe a specific image."""
image = gc.images.get(args.id)
ignore = ['self', 'access', 'file', 'schema']
image = dict([item for item in image.iteritems() if item[0] not in ignore])
utils.print_dict(image)
@utils.arg('--image-id', metavar='<IMAGE_ID>', required=True,
help='Image to display members of.')
def do_member_list(gc, args):
"""Describe sharing permissions by image."""
members = gc.image_members.list(args.image_id)
columns = ['Image ID', 'Member ID', 'Status']
utils.print_list(members, columns)
@utils.arg('image_id', metavar='<IMAGE_ID>',
help='Image from which to remove member')
@utils.arg('member_id', metavar='<MEMBER_ID>',
help='Tenant to remove as member')
def do_member_delete(gc, args):
"""Delete image member"""
if not (args.image_id and args.member_id):
utils.exit('Unable to delete member. Specify image_id and member_id')
else:
gc.image_members.delete(args.image_id, args.member_id)
@utils.arg('image_id', metavar='<IMAGE_ID>',
help='Image from which to update member')
@utils.arg('member_id', metavar='<MEMBER_ID>',
help='Tenant to update')
@utils.arg('member_status', metavar='<MEMBER_STATUS>',
help='Updated status of member')
def do_member_update(gc, args):
"""Update the status of a member for a given image."""
if not (args.image_id and args.member_id and args.member_status):
utils.exit('Unable to update member. Specify image_id, member_id and'
' member_status')
else:
member = gc.image_members.update(args.image_id, args.member_id,
args.member_status)
member = [member]
columns = ['Image ID', 'Member ID', 'Status']
utils.print_list(member, columns)
@utils.arg('image_id', metavar='<IMAGE_ID>',
help='Image on which to create member')
@utils.arg('member_id', metavar='<MEMBER_ID>',
help='Tenant to add as member')
def do_member_create(gc, args):
"""Create member for a given image."""
if not (args.image_id and args.member_id):
utils.exit('Unable to create member. Specify image_id and member_id')
else:
member = gc.image_members.create(args.image_id, args.member_id)
member = [member]
columns = ['Image ID', 'Member ID', 'Status']
utils.print_list(member, columns)
@utils.arg('model', metavar='<MODEL>', help='Name of model to describe.')
def do_explain(gc, args):
"""Describe a specific model."""
try:
schema = gc.schemas.get(args.model)
except exc.HTTPNotFound:
utils.exit('Unable to find requested model \'%s\'' % args.model)
else:
formatters = {'Attribute': lambda m: m.name}
columns = ['Attribute', 'Description']
utils.print_list(schema.properties, columns, formatters)
@utils.arg('--file', metavar='<FILE>',
help='Local file to save downloaded image data to. '
'If this is not specified the image data will be '
'written to stdout.')
@utils.arg('id', metavar='<IMAGE_ID>', help='ID of image to download.')
@utils.arg('--progress', action='store_true', default=False,
help='Show download progress bar.')
def do_image_download(gc, args):
"""Download a specific image."""
body = gc.images.data(args.id)
if args.progress:
body = progressbar.VerboseIteratorWrapper(body, len(body))
utils.save_image(body, args.file)
@utils.arg('--file', metavar='<FILE>',
help=('Local file that contains disk image to be uploaded'
' during creation. Alternatively, images can be passed'
' to the client via stdin.'))
@utils.arg('id', metavar='<IMAGE_ID>',
help='ID of image to upload data to.')
def do_image_upload(gc, args):
"""Upload data for a specific image."""
image_data = utils.get_data_file(args)
gc.images.upload(args.id, image_data)
@utils.arg('id', metavar='<IMAGE_ID>', help='ID of image to delete.')
def do_image_delete(gc, args):
"""Delete specified image."""
gc.images.delete(args.id)
@utils.arg('image_id', metavar='<IMAGE_ID>',
help='Image to be updated with the given tag')
@utils.arg('tag_value', metavar='<TAG_VALUE>',
help='Value of the tag')
def do_image_tag_update(gc, args):
"""Update an image with the given tag."""
if not (args.image_id and args.tag_value):
utils.exit('Unable to update tag. Specify image_id and tag_value')
else:
gc.image_tags.update(args.image_id, args.tag_value)
image = gc.images.get(args.image_id)
image = [image]
columns = ['ID', 'Tags']
utils.print_list(image, columns)
@utils.arg('image_id', metavar='<IMAGE_ID>',
help='Image whose tag to be deleted')
@utils.arg('tag_value', metavar='<TAG_VALUE>',
help='Value of the tag')
def do_image_tag_delete(gc, args):
"""Delete the tag associated with the given image."""
if not (args.image_id and args.tag_value):
utils.exit('Unable to delete tag. Specify image_id and tag_value')
else:
gc.image_tags.delete(args.image_id, args.tag_value)
| ntt-sic/python-glanceclient | glanceclient/v2/shell.py | Python | apache-2.0 | 10,082 |
# -*- coding: utf-8 -*-
import collections
import httplib as http
import pytz
from flask import request
from modularodm import Q
from framework.exceptions import HTTPError
from framework.auth.decorators import must_be_logged_in
from framework.auth.utils import privacy_info_handle
from framework.forms.utils import sanitize
from website import settings
from website.notifications.emails import notify
from website.filters import gravatar
from website.models import Guid, Comment
from website.project.decorators import must_be_contributor_or_public
from datetime import datetime
from website.project.model import has_anonymous_link
def resolve_target(node, guid):
if not guid:
return node
target = Guid.load(guid)
if target is None:
raise HTTPError(http.BAD_REQUEST)
return target.referent
def collect_discussion(target, users=None):
users = users or collections.defaultdict(list)
for comment in getattr(target, 'commented', []):
if not comment.is_deleted:
users[comment.user].append(comment)
collect_discussion(comment, users=users)
return users
@must_be_contributor_or_public
def comment_discussion(**kwargs):
node = kwargs['node'] or kwargs['project']
auth = kwargs['auth']
users = collect_discussion(node)
anonymous = has_anonymous_link(node, auth)
# Sort users by comment frequency
# TODO: Allow sorting by recency, combination of frequency and recency
sorted_users = sorted(
users.keys(),
key=lambda item: len(users[item]),
reverse=True,
)
return {
'discussion': [
{
'id': privacy_info_handle(user._id, anonymous),
'url': privacy_info_handle(user.url, anonymous),
'fullname': privacy_info_handle(user.fullname, anonymous, name=True),
'isContributor': node.is_contributor(user),
'gravatarUrl': privacy_info_handle(
gravatar(
user, use_ssl=True, size=settings.GRAVATAR_SIZE_DISCUSSION,
),
anonymous
),
}
for user in sorted_users
]
}
def serialize_comment(comment, auth, anonymous=False):
return {
'id': comment._id,
'author': {
'id': privacy_info_handle(comment.user._id, anonymous),
'url': privacy_info_handle(comment.user.url, anonymous),
'name': privacy_info_handle(
comment.user.fullname, anonymous, name=True
),
'gravatarUrl': privacy_info_handle(
gravatar(
comment.user, use_ssl=True,
size=settings.GRAVATAR_SIZE_DISCUSSION
),
anonymous
),
},
'dateCreated': comment.date_created.isoformat(),
'dateModified': comment.date_modified.isoformat(),
'content': comment.content,
'hasChildren': bool(getattr(comment, 'commented', [])),
'canEdit': comment.user == auth.user,
'modified': comment.modified,
'isDeleted': comment.is_deleted,
'isAbuse': auth.user and auth.user._id in comment.reports,
}
def serialize_comments(record, auth, anonymous=False):
return [
serialize_comment(comment, auth, anonymous)
for comment in getattr(record, 'commented', [])
]
def kwargs_to_comment(kwargs, owner=False):
comment = Comment.load(kwargs.get('cid'))
if comment is None:
raise HTTPError(http.BAD_REQUEST)
if owner:
auth = kwargs['auth']
if auth.user != comment.user:
raise HTTPError(http.FORBIDDEN)
return comment
@must_be_logged_in
@must_be_contributor_or_public
def add_comment(**kwargs):
auth = kwargs['auth']
node = kwargs['node'] or kwargs['project']
if not node.comment_level:
raise HTTPError(http.BAD_REQUEST)
if not node.can_comment(auth):
raise HTTPError(http.FORBIDDEN)
guid = request.json.get('target')
target = resolve_target(node, guid)
content = request.json.get('content').strip()
content = sanitize(content)
if not content:
raise HTTPError(http.BAD_REQUEST)
if len(content) > settings.COMMENT_MAXLENGTH:
raise HTTPError(http.BAD_REQUEST)
comment = Comment.create(
auth=auth,
node=node,
target=target,
user=auth.user,
content=content,
)
comment.save()
context = dict(
node_type=node.project_or_component,
timestamp=datetime.utcnow().replace(tzinfo=pytz.utc),
commenter=auth.user,
gravatar_url=auth.user.gravatar_url,
content=content,
target_user=target.user if is_reply(target) else None,
parent_comment=target.content if is_reply(target) else "",
title=node.title,
node_id=node._id,
url=node.absolute_url
)
sent_subscribers = notify(uid=node._id, event="comments", **context)
if is_reply(target):
if target.user and target.user not in sent_subscribers:
notify(uid=target.user._id, event='comment_replies', **context)
return {
'comment': serialize_comment(comment, auth)
}, http.CREATED
def is_reply(target):
return isinstance(target, Comment)
@must_be_contributor_or_public
def list_comments(auth, **kwargs):
node = kwargs['node'] or kwargs['project']
anonymous = has_anonymous_link(node, auth)
guid = request.args.get('target')
target = resolve_target(node, guid)
serialized_comments = serialize_comments(target, auth, anonymous)
n_unread = 0
if node.is_contributor(auth.user):
if auth.user.comments_viewed_timestamp is None:
auth.user.comments_viewed_timestamp = {}
auth.user.save()
n_unread = n_unread_comments(target, auth.user)
return {
'comments': serialized_comments,
'nUnread': n_unread
}
def n_unread_comments(node, user):
"""Return the number of unread comments on a node for a user."""
default_timestamp = datetime(1970, 1, 1, 12, 0, 0)
view_timestamp = user.comments_viewed_timestamp.get(node._id, default_timestamp)
return Comment.find(Q('node', 'eq', node) &
Q('user', 'ne', user) &
Q('date_created', 'gt', view_timestamp) &
Q('date_modified', 'gt', view_timestamp)).count()
@must_be_logged_in
@must_be_contributor_or_public
def edit_comment(**kwargs):
auth = kwargs['auth']
comment = kwargs_to_comment(kwargs, owner=True)
content = request.json.get('content').strip()
content = sanitize(content)
if not content:
raise HTTPError(http.BAD_REQUEST)
if len(content) > settings.COMMENT_MAXLENGTH:
raise HTTPError(http.BAD_REQUEST)
comment.edit(
content=content,
auth=auth,
save=True
)
return serialize_comment(comment, auth)
@must_be_logged_in
@must_be_contributor_or_public
def delete_comment(**kwargs):
auth = kwargs['auth']
comment = kwargs_to_comment(kwargs, owner=True)
comment.delete(auth=auth, save=True)
return {}
@must_be_logged_in
@must_be_contributor_or_public
def undelete_comment(**kwargs):
auth = kwargs['auth']
comment = kwargs_to_comment(kwargs, owner=True)
comment.undelete(auth=auth, save=True)
return {}
@must_be_logged_in
@must_be_contributor_or_public
def update_comments_timestamp(auth, **kwargs):
node = kwargs['node'] or kwargs['project']
if node.is_contributor(auth.user):
auth.user.comments_viewed_timestamp[node._id] = datetime.utcnow()
auth.user.save()
list_comments(**kwargs)
return {node._id: auth.user.comments_viewed_timestamp[node._id].isoformat()}
else:
return {}
@must_be_logged_in
@must_be_contributor_or_public
def report_abuse(**kwargs):
auth = kwargs['auth']
user = auth.user
comment = kwargs_to_comment(kwargs)
category = request.json.get('category')
text = request.json.get('text', '')
if not category:
raise HTTPError(http.BAD_REQUEST)
try:
comment.report_abuse(user, save=True, category=category, text=text)
except ValueError:
raise HTTPError(http.BAD_REQUEST)
return {}
@must_be_logged_in
@must_be_contributor_or_public
def unreport_abuse(**kwargs):
auth = kwargs['auth']
user = auth.user
comment = kwargs_to_comment(kwargs)
try:
comment.unreport_abuse(user, save=True)
except ValueError:
raise HTTPError(http.BAD_REQUEST)
return {}
| himanshuo/osf.io | website/project/views/comment.py | Python | apache-2.0 | 8,683 |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
from swift3.test.functional import Swift3FunctionalTestCase
from swift3.test.functional.s3_test_client import Connection, \
get_tester2_connection
from swift3.test.functional.utils import get_error_code
from swift3.etree import fromstring
class TestSwift3Acl(Swift3FunctionalTestCase):
def setUp(self):
super(TestSwift3Acl, self).setUp()
self.bucket = 'bucket'
self.obj = 'object'
self.conn.make_request('PUT', self.bucket)
self.conn2 = get_tester2_connection()
def test_acl(self):
self.conn.make_request('PUT', self.bucket, self.obj)
query = 'acl'
# PUT Bucket ACL
headers = {'x-amz-acl': 'public-read'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, headers=headers,
query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-length'], '0')
# GET Bucket ACL
status, headers, body = \
self.conn.make_request('GET', self.bucket, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
# TODO: Fix the response that last-modified must be in the response.
# self.assertTrue(headers['last-modified'] is not None)
self.assertEqual(headers['content-length'], str(len(body)))
self.assertTrue(headers['content-type'] is not None)
elem = fromstring(body, 'AccessControlPolicy')
owner = elem.find('Owner')
self.assertEqual(owner.find('ID').text, self.conn.user_id)
self.assertEqual(owner.find('DisplayName').text, self.conn.user_id)
acl = elem.find('AccessControlList')
self.assertTrue(acl.find('Grant') is not None)
# GET Object ACL
status, headers, body = \
self.conn.make_request('GET', self.bucket, self.obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
# TODO: Fix the response that last-modified must be in the response.
# self.assertTrue(headers['last-modified'] is not None)
self.assertEqual(headers['content-length'], str(len(body)))
self.assertTrue(headers['content-type'] is not None)
elem = fromstring(body, 'AccessControlPolicy')
owner = elem.find('Owner')
self.assertEqual(owner.find('ID').text, self.conn.user_id)
self.assertEqual(owner.find('DisplayName').text, self.conn.user_id)
acl = elem.find('AccessControlList')
self.assertTrue(acl.find('Grant') is not None)
def test_put_bucket_acl_error(self):
req_headers = {'x-amz-acl': 'public-read'}
aws_error_conn = Connection(aws_secret_key='invalid')
status, headers, body = \
aws_error_conn.make_request('PUT', self.bucket,
headers=req_headers, query='acl')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('PUT', 'nothing',
headers=req_headers, query='acl')
self.assertEqual(get_error_code(body), 'NoSuchBucket')
status, headers, body = \
self.conn2.make_request('PUT', self.bucket,
headers=req_headers, query='acl')
self.assertEqual(get_error_code(body), 'AccessDenied')
def test_get_bucket_acl_error(self):
aws_error_conn = Connection(aws_secret_key='invalid')
status, headers, body = \
aws_error_conn.make_request('GET', self.bucket, query='acl')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('GET', 'nothing', query='acl')
self.assertEqual(get_error_code(body), 'NoSuchBucket')
status, headers, body = \
self.conn2.make_request('GET', self.bucket, query='acl')
self.assertEqual(get_error_code(body), 'AccessDenied')
def test_get_object_acl_error(self):
self.conn.make_request('PUT', self.bucket, self.obj)
aws_error_conn = Connection(aws_secret_key='invalid')
status, headers, body = \
aws_error_conn.make_request('GET', self.bucket, self.obj,
query='acl')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('GET', self.bucket, 'nothing', query='acl')
self.assertEqual(get_error_code(body), 'NoSuchKey')
status, headers, body = \
self.conn2.make_request('GET', self.bucket, self.obj, query='acl')
self.assertEqual(get_error_code(body), 'AccessDenied')
@unittest.skipIf(os.environ['AUTH'] == 'tempauth',
'v4 is supported only in keystone')
class TestSwift3AclSigV4(TestSwift3Acl):
@classmethod
def setUpClass(cls):
os.environ['S3_USE_SIGV4'] = "True"
@classmethod
def tearDownClass(cls):
del os.environ['S3_USE_SIGV4']
if __name__ == '__main__':
unittest.main()
| swiftstack/swift3-stackforge | swift3/test/functional/test_acl.py | Python | apache-2.0 | 5,823 |
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base test case for all tests.
To change behavoir only for tests that do not rely on Tempest, please
target the neutron.tests.base module instead.
There should be no non-test Neutron imports in this module to ensure
that the functional API tests can import Tempest without triggering
errors due to duplicate configuration definitions.
"""
import contextlib
import logging as std_logging
import os
import os.path
import random
import traceback
import eventlet.timeout
import fixtures
import mock
from oslo_utils import strutils
import testtools
from neutron.tests import post_mortem_debug
LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s"
def get_rand_name(max_length=None, prefix='test'):
name = prefix + str(random.randint(1, 0x7fffffff))
return name[:max_length] if max_length is not None else name
def bool_from_env(key, strict=False, default=False):
value = os.environ.get(key)
return strutils.bool_from_string(value, strict=strict, default=default)
class SubBaseTestCase(testtools.TestCase):
def setUp(self):
super(SubBaseTestCase, self).setUp()
# Configure this first to ensure pm debugging support for setUp()
debugger = os.environ.get('OS_POST_MORTEM_DEBUGGER')
if debugger:
self.addOnException(post_mortem_debug.get_exception_handler(
debugger))
if bool_from_env('OS_DEBUG'):
_level = std_logging.DEBUG
else:
_level = std_logging.INFO
capture_logs = bool_from_env('OS_LOG_CAPTURE')
if not capture_logs:
std_logging.basicConfig(format=LOG_FORMAT, level=_level)
self.log_fixture = self.useFixture(
fixtures.FakeLogger(
format=LOG_FORMAT,
level=_level,
nuke_handlers=capture_logs,
))
test_timeout = int(os.environ.get('OS_TEST_TIMEOUT', 0))
if test_timeout == -1:
test_timeout = 0
if test_timeout > 0:
self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
# If someone does use tempfile directly, ensure that it's cleaned up
self.useFixture(fixtures.NestedTempfile())
self.useFixture(fixtures.TempHomeDir())
self.addCleanup(mock.patch.stopall)
if bool_from_env('OS_STDOUT_CAPTURE'):
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if bool_from_env('OS_STDERR_CAPTURE'):
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.addOnException(self.check_for_systemexit)
def check_for_systemexit(self, exc_info):
if isinstance(exc_info[1], SystemExit):
self.fail("A SystemExit was raised during the test. %s"
% traceback.format_exception(*exc_info))
@contextlib.contextmanager
def assert_max_execution_time(self, max_execution_time=5):
with eventlet.timeout.Timeout(max_execution_time, False):
yield
return
self.fail('Execution of this test timed out')
def assertOrderedEqual(self, expected, actual):
expect_val = self.sort_dict_lists(expected)
actual_val = self.sort_dict_lists(actual)
self.assertEqual(expect_val, actual_val)
def sort_dict_lists(self, dic):
for key, value in dic.iteritems():
if isinstance(value, list):
dic[key] = sorted(value)
elif isinstance(value, dict):
dic[key] = self.sort_dict_lists(value)
return dic
def assertDictSupersetOf(self, expected_subset, actual_superset):
"""Checks that actual dict contains the expected dict.
After checking that the arguments are of the right type, this checks
that each item in expected_subset is in, and matches, what is in
actual_superset. Separate tests are done, so that detailed info can
be reported upon failure.
"""
if not isinstance(expected_subset, dict):
self.fail("expected_subset (%s) is not an instance of dict" %
type(expected_subset))
if not isinstance(actual_superset, dict):
self.fail("actual_superset (%s) is not an instance of dict" %
type(actual_superset))
for k, v in expected_subset.items():
self.assertIn(k, actual_superset)
self.assertEqual(v, actual_superset[k],
"Key %(key)s expected: %(exp)r, actual %(act)r" %
{'key': k, 'exp': v, 'act': actual_superset[k]})
| cloudbase/neutron-virtualbox | neutron/tests/sub_base.py | Python | apache-2.0 | 5,363 |
from typing import Optional
from common.serializers.serialization import state_roots_serializer
from crypto.bls.bls_bft import BlsBft
from crypto.bls.bls_bft_replica import BlsBftReplica
from crypto.bls.bls_multi_signature import MultiSignature, MultiSignatureValue
from crypto.bls.indy_crypto.bls_crypto_indy_crypto import IndyCryptoBlsUtils
from plenum.common.constants import BLS_PREFIX, AUDIT_LEDGER_ID, TXN_PAYLOAD, \
TXN_PAYLOAD_DATA, AUDIT_TXN_LEDGER_ROOT, AUDIT_TXN_STATE_ROOT, AUDIT_TXN_PP_SEQ_NO
from plenum.common.messages.node_messages import PrePrepare, Prepare, Commit
from plenum.common.metrics_collector import MetricsCollector, NullMetricsCollector, measure_time, MetricsName
from plenum.common.types import f
from plenum.common.util import compare_3PC_keys
from plenum.server.consensus.utils import replica_name_to_node_name
from plenum.server.database_manager import DatabaseManager
from stp_core.common.log import getlogger
logger = getlogger()
class BlsBftReplicaPlenum(BlsBftReplica):
def __init__(self,
node_id,
bls_bft: BlsBft,
is_master,
database_manager: DatabaseManager,
metrics: MetricsCollector = NullMetricsCollector()):
super().__init__(bls_bft, is_master)
self._all_bls_latest_multi_sigs = None
self.node_id = node_id
self._database_manager = database_manager
self._all_signatures = {}
self.state_root_serializer = state_roots_serializer
self.metrics = metrics
def _can_process_ledger(self, ledger_id):
# enable BLS for all ledgers
return True
# ----VALIDATE----
@measure_time(MetricsName.BLS_VALIDATE_PREPREPARE_TIME)
def validate_pre_prepare(self, pre_prepare: PrePrepare, sender):
if f.BLS_MULTI_SIGS.nm in pre_prepare and pre_prepare.blsMultiSigs:
multi_sigs = pre_prepare.blsMultiSigs
for sig in multi_sigs:
multi_sig = MultiSignature.from_list(*sig)
if not self._validate_multi_sig(multi_sig):
return BlsBftReplica.PPR_BLS_MULTISIG_WRONG
def validate_prepare(self, prepare: Prepare, sender):
pass
@measure_time(MetricsName.BLS_VALIDATE_COMMIT_TIME)
def validate_commit(self, commit: Commit, sender, pre_prepare: PrePrepare):
if f.BLS_SIGS.nm not in commit:
return
audit_txn = self._get_correct_audit_transaction(pre_prepare)
if not audit_txn:
return
audit_payload = audit_txn[TXN_PAYLOAD][TXN_PAYLOAD_DATA]
for lid, sig in commit.blsSigs.items():
lid = int(lid)
if lid not in audit_payload[AUDIT_TXN_STATE_ROOT] or lid not in audit_payload[AUDIT_TXN_LEDGER_ROOT]:
return BlsBftReplicaPlenum.CM_BLS_SIG_WRONG
if not self._validate_signature(sender, sig,
BlsBftReplicaPlenum._create_fake_pre_prepare_for_multi_sig(
lid,
audit_payload[AUDIT_TXN_STATE_ROOT][lid],
audit_payload[AUDIT_TXN_LEDGER_ROOT][lid],
pre_prepare
)):
return BlsBftReplicaPlenum.CM_BLS_SIG_WRONG
# ----CREATE/UPDATE----
@measure_time(MetricsName.BLS_UPDATE_PREPREPARE_TIME)
def update_pre_prepare(self, pre_prepare_params, ledger_id):
if not self._can_process_ledger(ledger_id):
return pre_prepare_params
if self._all_bls_latest_multi_sigs is not None:
# update BLS_MULTI_SIGS only (not BLS_MULTI_SIG)
# Pass None for backward compatibility
pre_prepare_params.append(None)
pre_prepare_params.append([val.as_list() for val in self._all_bls_latest_multi_sigs])
self._all_bls_latest_multi_sigs = None
return pre_prepare_params
def update_prepare(self, prepare_params, ledger_id):
# Send BLS signature in COMMITs only
return prepare_params
@measure_time(MetricsName.BLS_UPDATE_COMMIT_TIME)
def update_commit(self, commit_params, pre_prepare: PrePrepare):
ledger_id = pre_prepare.ledgerId
state_root_hash = pre_prepare.stateRootHash
if not self._can_process_ledger(ledger_id):
return commit_params
if not self._bls_bft.can_sign_bls():
logger.debug("{}{} can not sign COMMIT {} for state {}: No BLS keys"
.format(BLS_PREFIX, self, commit_params, state_root_hash))
return commit_params
# update BLS_SIGS only (not BLS_SIG)
# Use ' ' as BLS_SIG for backward-compatibility as BLS_SIG in COMMIT is optional but not Nullable
commit_params.append(' ')
last_audit_txn = self._get_correct_audit_transaction(pre_prepare)
if last_audit_txn:
res = {}
payload_data = last_audit_txn[TXN_PAYLOAD][TXN_PAYLOAD_DATA]
for ledger_id in payload_data[AUDIT_TXN_STATE_ROOT].keys():
fake_pp = BlsBftReplicaPlenum._create_fake_pre_prepare_for_multi_sig(
ledger_id,
payload_data[AUDIT_TXN_STATE_ROOT].get(ledger_id),
payload_data[AUDIT_TXN_LEDGER_ROOT].get(ledger_id),
pre_prepare
)
bls_signature = self._sign_state(fake_pp)
logger.debug("{}{} signed COMMIT {} for state {} with sig {}"
.format(BLS_PREFIX, self, commit_params, state_root_hash, bls_signature))
res[str(ledger_id)] = bls_signature
commit_params.append(res)
return commit_params
# ----PROCESS----
def process_pre_prepare(self, pre_prepare: PrePrepare, sender):
# does not matter which ledger id is current PPR for
# mult-sig is for domain ledger anyway
self._save_multi_sig_shared(pre_prepare)
def process_prepare(self, prepare: Prepare, sender):
pass
def process_commit(self, commit: Commit, sender):
key_3PC = (commit.viewNo, commit.ppSeqNo)
if f.BLS_SIGS.nm in commit and commit.blsSigs is not None:
if key_3PC not in self._all_signatures:
self._all_signatures[key_3PC] = {}
for ledger_id in commit.blsSigs.keys():
if ledger_id not in self._all_signatures[key_3PC]:
self._all_signatures[key_3PC][ledger_id] = {}
self._all_signatures[key_3PC][ledger_id][self.get_node_name(sender)] = commit.blsSigs[ledger_id]
def process_order(self, key, quorums, pre_prepare):
if not self._can_process_ledger(pre_prepare.ledgerId):
return
if not self._can_calculate_multi_sig(key, quorums):
return
# calculate signature always to keep master and non-master in sync
# but save on master only
all_bls_multi_sigs = self._calculate_all_multi_sigs(key, pre_prepare)
if not self._is_master:
return
if all_bls_multi_sigs:
for bls_multi_sig in all_bls_multi_sigs:
self._save_multi_sig_local(bls_multi_sig)
self._all_bls_latest_multi_sigs = all_bls_multi_sigs
# ----GC----
def gc(self, key_3PC):
keys_to_remove = []
for key in self._all_signatures.keys():
if compare_3PC_keys(key, key_3PC) >= 0:
keys_to_remove.append(key)
for key in keys_to_remove:
self._all_signatures.pop(key, None)
# ----MULT_SIG----
def _create_multi_sig_value_for_pre_prepare(self, pre_prepare: PrePrepare, pool_state_root_hash):
multi_sig_value = MultiSignatureValue(ledger_id=pre_prepare.ledgerId,
state_root_hash=pre_prepare.stateRootHash,
pool_state_root_hash=pool_state_root_hash,
txn_root_hash=pre_prepare.txnRootHash,
timestamp=pre_prepare.ppTime)
return multi_sig_value
def _validate_signature(self, sender, bls_sig, pre_prepare: PrePrepare):
pool_root_hash = self._get_pool_root_hash(pre_prepare, serialize=False)
sender_node = self.get_node_name(sender)
pk = self._bls_bft.bls_key_register.get_key_by_name(sender_node, pool_root_hash)
if not pk:
return False
pool_root_hash_ser = self._get_pool_root_hash(pre_prepare)
message = self._create_multi_sig_value_for_pre_prepare(pre_prepare,
pool_root_hash_ser)
result = self._bls_bft.bls_crypto_verifier.verify_sig(bls_sig, message.as_single_value(), pk)
if not result:
logger.info("Incorrect bls signature {} in commit for "
"{} public key: '{}' and message: '{}' from "
"pre-prepare: {}".format(bls_sig, sender,
IndyCryptoBlsUtils.bls_to_str(pk),
message, pre_prepare))
return result
def _validate_multi_sig(self, multi_sig: MultiSignature):
public_keys = []
pool_root_hash = self.state_root_serializer.deserialize(
multi_sig.value.pool_state_root_hash)
for node_name in multi_sig.participants:
bls_key = self._bls_bft.bls_key_register.get_key_by_name(node_name,
pool_root_hash)
# TODO: It's optional for now
if bls_key:
public_keys.append(bls_key)
value = multi_sig.value.as_single_value()
return self._bls_bft.bls_crypto_verifier.verify_multi_sig(multi_sig.signature,
value,
public_keys)
def _sign_state(self, pre_prepare: PrePrepare):
pool_root_hash = self._get_pool_root_hash(pre_prepare)
message = self._create_multi_sig_value_for_pre_prepare(pre_prepare,
pool_root_hash).as_single_value()
return self._bls_bft.bls_crypto_signer.sign(message)
def _can_calculate_multi_sig(self,
key_3PC,
quorums) -> bool:
if key_3PC not in self._all_signatures:
return False
sigs_for_request = self._all_signatures[key_3PC]
sigs_invalid = list(
filter(
lambda item: not quorums.bls_signatures.is_reached(len(list(item[1].values()))),
sigs_for_request.items()
)
)
if sigs_invalid:
for lid, sigs in sigs_invalid:
logger.debug(
'{}Can not create bls signatures for batch {}: '
'There are only {} signatures for ledger {}, '
'while {} required for multi_signature'.format(BLS_PREFIX,
key_3PC,
len(list(sigs.values())),
quorums.bls_signatures.value,
lid)
)
return False
return True
def _calculate_all_multi_sigs(self, key_3PC, pre_prepare) -> Optional[list]:
sigs_for_request = self._all_signatures.get(key_3PC)
res = []
if sigs_for_request:
for lid in sigs_for_request:
sig = sigs_for_request[lid]
audit_txn = self._get_correct_audit_transaction(pre_prepare)
if audit_txn:
audit_payload = audit_txn[TXN_PAYLOAD][TXN_PAYLOAD_DATA]
fake_pp = BlsBftReplicaPlenum. \
_create_fake_pre_prepare_for_multi_sig(int(lid),
audit_payload[AUDIT_TXN_STATE_ROOT][int(lid)],
audit_payload[AUDIT_TXN_LEDGER_ROOT][int(lid)],
pre_prepare)
res.append(self._calculate_single_multi_sig(sig, fake_pp))
return res
def _calculate_single_multi_sig(self, sigs_for_request, pre_prepare) -> Optional[MultiSignature]:
bls_signatures = list(sigs_for_request.values())
participants = list(sigs_for_request.keys())
sig = self._bls_bft.bls_crypto_verifier.create_multi_sig(bls_signatures)
pool_root_hash_ser = self._get_pool_root_hash(pre_prepare)
multi_sig_value = self._create_multi_sig_value_for_pre_prepare(pre_prepare,
pool_root_hash_ser)
return MultiSignature(signature=sig,
participants=participants,
value=multi_sig_value)
def _get_pool_root_hash(self, pre_prepare, serialize=True):
if f.POOL_STATE_ROOT_HASH.nm in pre_prepare:
pool_root_hash = self.state_root_serializer.deserialize(pre_prepare.poolStateRootHash)
pool_root_hash_ser = pre_prepare.poolStateRootHash
else:
pool_root_hash = self._bls_bft.bls_key_register.get_pool_root_hash_committed()
pool_root_hash_ser = self.state_root_serializer.serialize(bytes(pool_root_hash))
return pool_root_hash_ser if serialize else pool_root_hash
def _save_multi_sig_local(self,
multi_sig: MultiSignature):
self._bls_bft.bls_store.put(multi_sig)
logger.debug("{}{} saved multi signature {} for root {} (locally calculated)"
.format(BLS_PREFIX, self, multi_sig,
multi_sig.value.state_root_hash))
def _save_multi_sig_shared(self, pre_prepare: PrePrepare):
if f.BLS_MULTI_SIGS.nm not in pre_prepare or pre_prepare.blsMultiSigs is None:
return
multi_sigs = pre_prepare.blsMultiSigs
for sig in multi_sigs:
multi_sig = MultiSignature.from_list(*sig)
self._bls_bft.bls_store.put(multi_sig)
logger.debug("{}{} saved multi signature {} for root {} (calculated by Primary)"
.format(BLS_PREFIX, self, multi_sig,
multi_sig.value.state_root_hash))
def _get_correct_audit_transaction(self, pp: PrePrepare):
ledger = self._database_manager.get_ledger(AUDIT_LEDGER_ID)
if ledger is None:
return None
seqNo = ledger.uncommitted_size
for curSeqNo in reversed(range(1, seqNo + 1)):
txn = ledger.get_by_seq_no_uncommitted(curSeqNo)
if txn:
payload = txn[TXN_PAYLOAD][TXN_PAYLOAD_DATA]
if pp.ppSeqNo == payload[AUDIT_TXN_PP_SEQ_NO]:
return txn
return None
@staticmethod
def _create_fake_pre_prepare_for_multi_sig(lid, state_root_hash, txn_root_hash, pre_prepare):
params = [
pre_prepare.instId,
pre_prepare.viewNo,
pre_prepare.ppSeqNo,
pre_prepare.ppTime,
pre_prepare.reqIdr,
pre_prepare.discarded,
pre_prepare.digest,
# doing it to work around the ledgers that are not in plenum -- it will fail the validation of pre-prepare
1,
state_root_hash,
txn_root_hash,
pre_prepare.sub_seq_no,
pre_prepare.final,
pre_prepare.poolStateRootHash,
pre_prepare.auditTxnRootHash,
]
pp = PrePrepare(*params)
pp.ledgerId = lid
return pp
@staticmethod
def get_node_name(replica_name: str):
# TODO: Remove this wrapper
return replica_name_to_node_name(replica_name)
def __str__(self, *args, **kwargs):
return self.node_id
| evernym/plenum | plenum/bls/bls_bft_replica_plenum.py | Python | apache-2.0 | 16,403 |
"""Base class for Tado entity."""
from homeassistant.helpers.entity import Entity
from .const import DEFAULT_NAME, DOMAIN, TADO_ZONE
class TadoDeviceEntity(Entity):
"""Base implementation for Tado device."""
def __init__(self, device_info):
"""Initialize a Tado device."""
super().__init__()
self._device_info = device_info
self.device_name = device_info["shortSerialNo"]
self.device_id = device_info["serialNo"]
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self.device_id)},
"name": self.device_name,
"manufacturer": DEFAULT_NAME,
"sw_version": self._device_info["currentFwVersion"],
"model": self._device_info["deviceType"],
"via_device": (DOMAIN, self._device_info["serialNo"]),
}
@property
def should_poll(self):
"""Do not poll."""
return False
class TadoZoneEntity(Entity):
"""Base implementation for Tado zone."""
def __init__(self, zone_name, home_id, zone_id):
"""Initialize a Tado zone."""
super().__init__()
self._device_zone_id = f"{home_id}_{zone_id}"
self.zone_name = zone_name
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self._device_zone_id)},
"name": self.zone_name,
"manufacturer": DEFAULT_NAME,
"model": TADO_ZONE,
}
@property
def should_poll(self):
"""Do not poll."""
return False
| tboyce1/home-assistant | homeassistant/components/tado/entity.py | Python | apache-2.0 | 1,664 |
# ===============================================================================
# Copyright 2014 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
from __future__ import absolute_import
from __future__ import print_function
import os
import yaml
from pyface.confirmation_dialog import ConfirmationDialog
from pyface.constant import OK, CANCEL, YES
from pyface.file_dialog import FileDialog
from traits.api import (
HasTraits,
Str,
List,
Int,
Any,
Button,
Bool,
on_trait_change,
Instance,
)
from traitsui.api import (
View,
Item,
UItem,
HGroup,
InstanceEditor,
HSplit,
VGroup,
EnumEditor,
)
from traitsui.handler import Controller
from traitsui.menu import Action
from traitsui.table_column import ObjectColumn
from pychron.core.helpers.filetools import fileiter, add_extension
from pychron.core.ui.table_editor import myTableEditor
from pychron.core.ui.text_editor import myTextEditor
from pychron.core.yaml import yload
from pychron.envisage.icon_button_editor import icon_button_editor
from pychron.envisage.resources import icon
from pychron.experiment.automated_run.hop_util import split_hopstr
from pychron.loggable import Loggable
from pychron.paths import paths
# class NullInt(Int):
# default_value = None
class Position(HasTraits):
detector = Str
isotope = Str
deflection = Int
name = Str
# available_isotopes=Property(depends_on='isotope')
#
# def _get_available_isotopes(self):
# from pychron.pychron_constants import ISOTOPES
# isos=list(ISOTOPES)
# isos.remove(self.isotope)
# return isos
def to_yaml(self):
return {
"detector": self.detector,
"isotope": self.isotope,
"active": True,
"deflection": self.deflection,
"protect": False,
"is_baseline": False,
}
def to_string(self):
s = "{}:{}".format(self.isotope, self.detector)
if self.deflection:
s = "{}:{}".format(s, self.deflection)
return s
class Hop(HasTraits):
positions = List
counts = Int
settle = Int
isotope_label = Str
name = Str
detectors = List(["A", "B"])
add_position_button = Button
remove_position_button = Button
selected = Any
error_message = Str
def to_string(self):
vs = [str(self.counts), str(self.settle)]
hs = "'{}'".format(
", ".join(
[p.to_string() for p in self.positions if p.isotope and p.detector]
)
)
return "({}, {})".format(hs, ", ".join(vs))
def to_yaml(self):
obj = {"counts": self.counts, "settle": self.settle}
poss = [p for p in self.positions if p.isotope and p.detector]
if poss:
obj["cup_configuration"] = [p.to_yaml() for p in poss]
pp = poss[0]
obj["positioning"] = {"detector": pp.detector, "isotope": pp.isotope}
return obj
def parse_hopstr(self, hs):
for is_baseline, iso, det, defl in split_hopstr(hs):
p = Position(isotope=iso, detector=det, deflection=int(defl) if defl else 0)
self.positions.append(p)
self._handle_position_change()
def validate_hop(self):
"""
return true if no duplicates
"""
self.error_message = ""
n = len(self.positions)
ps = {p.isotope for p in self.positions}
dup_iso = len(set(ps)) < n
if dup_iso:
self.error_message = self._make_error_message("isotope")
ds = {p.detector for p in self.positions}
dup_det = len(ds) < n
if dup_det:
em = self._make_error_message("detector")
if self.error_message:
self.error_message = "{}; {}".format(self.error_message, em)
else:
self.error_message = em
return not (dup_iso or dup_det)
def _make_error_message(self, attr):
dets = []
ps = []
for p in self.positions:
det = getattr(p, attr)
if det in dets:
ps.append(det)
dets.append(det)
return "Multiple {}s: {}".format(attr.capitalize(), ", ".join(ps))
def _add_position_button_fired(self):
self.positions.append(Position())
def _remove_position_button_fired(self):
idx = self.positions.index(self.selected)
self.positions.remove(self.selected)
if len(self.positions) > 0:
self.selected = self.positions[idx - 1]
else:
self.selected = None
@on_trait_change("positions:isotope, positions[]")
def _handle_position_change(self):
self.isotopes_label = ",".join([i.isotope for i in self.positions])
def traits_view(self):
from pychron.pychron_constants import ISOTOPES
cols = [
ObjectColumn(name="name", label="", width=20, editable=False),
ObjectColumn(name="isotope", editor=EnumEditor(values=ISOTOPES)),
ObjectColumn(name="detector", editor=EnumEditor(values=self.detectors)),
ObjectColumn(
name="deflection",
),
]
v = View(
VGroup(
HGroup(
Item("counts", tooltip="Number of measurements at this position"),
Item(
"settle",
label="Settle (s)",
tooltip="Delay in seconds after magnet move and before measurement",
),
),
UItem(
"positions",
editor=myTableEditor(
columns=cols,
sortable=False,
clear_selection_on_dclicked=True,
selected="selected",
),
),
HGroup(
icon_button_editor(
"add_position_button",
"add",
tooltip="Add isotope/detector to measure",
),
icon_button_editor(
"remove_position_button",
"delete",
tooltip="Remove selected isotope/detector",
enabled_when="selected",
),
),
)
)
return v
class HopSequence(HasTraits):
hops = List
def to_string(self):
return "\n".join([hi.to_string() for hi in self.hops])
def to_yaml(self):
return [hi.to_yaml() for hi in self.hops]
def add_hop(self, idx):
if idx is not None:
h = self.hops[idx]
hh = h.clone_traits()
self.hops.insert(idx, hh)
else:
h = Hop()
self.hops.append(h)
self._label_hops()
def _label_hops(self):
for i, hi in enumerate(self.hops):
hi.name = str(i + 1)
print(hi.name)
for j, pi in enumerate(hi.positions):
pi.name = str(j + 1)
def remove_hop(self, idx):
self.hops.pop(idx)
self._label_hops()
def label_hops(self):
self._label_hops()
class HopEditorModel(Loggable):
hop_sequence = Instance(HopSequence)
selected = Any
path = Str
detectors = List
add_hop_button = Button
remove_hop_button = Button
# saveable = Bool
# saveasable = Bool
text = Str
dirty = Bool
use_yaml = True
def new(self):
self.hop_sequence = HopSequence()
return True
def open(self, p=None):
if p is None:
p = "/Users/ross/Pychrondata_dev/scripts/measurement/hops/hop.txt"
if not os.path.isfile(p):
p = ""
dialog = FileDialog(action="open", default_directory=paths.hops_dir)
if dialog.open() == OK:
p = dialog.path
if os.path.isfile(p):
self.path = p
# self.saveable = True
# self.saveasable = True
return self._load(p)
def save(self):
if self.path:
if self._validate_sequence():
self._save_file(self.path)
else:
self.save_as()
def save_as(self):
if self._validate_sequence():
dialog = FileDialog(action="save as", default_directory=paths.hops_dir)
if dialog.open() == OK:
p = dialog.path
p = add_extension(p, ".yaml" if self.use_yaml else ".txt")
self._save_file(p)
self.path = p
def _load(self, p):
self.hop_sequence = hs = HopSequence()
if p.endswith(".txt"):
self.use_yaml = False
with open(p, "r") as rfile:
hops = [eval(l) for l in fileiter(rfile)]
for i, (hopstr, cnt, settle) in enumerate(hops):
h = Hop(
name=str(i + 1),
counts=cnt,
settle=settle,
detectors=self.detectors,
)
h.parse_hopstr(hopstr)
hs.hops.append(h)
hs.label_hops()
self.selected = hs.hops[0]
with open(p, "r") as rfile:
self.text = rfile.read()
else:
self.use_yaml = True
with open(p, "r") as rfile:
self.text = rfile.read()
try:
for i, hop in enumerate(yload(self.text)):
h = Hop(
name=str(i + 1),
counts=hop.get("counts", 0),
settle=hop.get("settle", 0),
detectors=self.detectors,
)
for p in hop.get("cup_configurations"):
pos = Position(
detector=p.get("detector", ""),
isotope=p.get("isotope", ""),
active=p.get("active", True),
is_baseline=p.get("is_baseline", False),
protect=p.get("protect", False),
deflection=p.get("deflection", ""),
)
h.positions.append(pos)
hs.hops.append(h)
hs.label_hops()
return True
except yaml.YAMLError:
pass
def _validate_sequence(self):
hs = []
for h in self.hop_sequence.hops:
if not h.validate_hop():
hs.append("Invalid Hop {}. {}".format(h.name, h.error_message))
if hs:
self.warning_dialog("\n".join(hs))
else:
return True
def _save_file(self, p):
self.info("saving hop to {}".format(p))
with open(p, "w") as wfile:
if self.use_yaml:
yaml.dump(self.to_yaml(), wfile, default_flow_style=False)
else:
# header = '#hopstr e.i iso:det[:defl][,iso:det....], count, settle\n'
# txt = self.hop_sequence.to_string()
txt = self.to_string()
wfile.write(txt)
self.text = txt
self.dirty = False
def to_yaml(self):
return self.hop_sequence.to_yaml()
def to_string(self):
header1 = "#hopstr ('iso:det[:defl][,iso:det....]', count, settle)"
header2 = "#e.g ('Ar40:H1, Ar41:H2, Ar38:L1, Ar37:L2, Ar36:CDD:110', 15, 3)"
return "\n".join((header1, header2, self.hop_sequence.to_string()))
def to_text(self):
if self.use_yaml:
return yaml.dump(self.to_yaml(), default_flow_style=False)
else:
return self.to_string()
def _add_hop_button_fired(self):
idx = None
if self.selected:
idx = self.hop_sequence.hops.index(self.selected)
self.hop_sequence.add_hop(idx)
# self.saveasable = True
self.dirty = True
def _remove_hop_button_fired(self):
hops = self.hop_sequence.hops
idx = hops.index(self.selected)
if len(hops) > 1:
self.selected = hops[0]
else:
self.selected = None
self.hop_sequence.remove_hop(idx)
self.dirty = True
# if not self.hop_sequence.hops:
# self.saveasable = False
# self.saveable = False
class HopEditorView(Controller):
model = HopEditorModel
title = Str("Peak Hops Editor")
def close(self, info, is_ok):
if self.model.dirty:
dlg = ConfirmationDialog(
message="Save changes to Hops file",
cancel=True,
default=CANCEL,
title="Save Changes?",
)
ret = dlg.open()
if ret == CANCEL:
return False
elif ret == YES:
self.model.save()
return True
@on_trait_change(
"model:hop_sequence:hops:[counts,settle, positions:[isotope,detector,deflection]]"
)
def _handle_edit(self):
self.model.dirty = True
self.model.text = self.model.to_text()
@on_trait_change("model.[path,dirty]")
def _handle_path_change(self):
p = self.model.path
n = os.path.basename(p)
if self.model.dirty:
n = "*{}".format(n)
d = os.path.dirname(p)
d = d.replace(os.path.expanduser("~"), "")
t = "{} - PeakHop Editor - {}".format(n, d)
if not self.info:
self.title = t
else:
self.info.ui.title = t
def save(self, info):
self.model.save()
def save_as(self, info):
self.model.save_as()
def traits_view(self):
cols = [
ObjectColumn(name="name", label="", editable=False),
ObjectColumn(name="counts"),
ObjectColumn(name="settle", label="Settle (s)"),
ObjectColumn(
name="isotopes_label", editable=False, width=175, label="Isotopes"
),
]
hgrp = VGroup(
UItem(
"object.hop_sequence.hops",
editor=myTableEditor(
columns=cols,
clear_selection_on_dclicked=True,
sortable=False,
selected="selected",
),
),
HGroup(
icon_button_editor("add_hop_button", "add", tooltip="Add peak hop"),
icon_button_editor(
"remove_hop_button",
"delete",
tooltip="Delete selected peak hop",
enabled_when="selected",
),
),
)
sgrp = UItem("selected", style="custom", editor=InstanceEditor())
grp = HSplit(hgrp, sgrp)
save_action = Action(
name="Save",
image=icon("document-save"),
enabled_when="object.saveable",
action="save",
)
save_as_acion = Action(
name="Save As",
image=icon("document-save-as"),
action="save_as",
enabled_when="object.saveasable",
)
teditor = myTextEditor(
bgcolor="#F7F6D0",
fontsize=12,
fontsize_name="fontsize",
wrap=False,
tab_width=15,
)
v = View(
VGroup(
VGroup(grp, label="Editor"),
VGroup(
UItem("object.text", editor=teditor, style="custom"), label="Text"
),
),
# toolbar=ToolBar(),
width=690,
title=self.title,
buttons=["OK", save_action, save_as_acion],
resizable=True,
)
return v
if __name__ == "__main__":
root = os.path.join(os.path.expanduser("~"), "PychronDev")
paths.build(root)
m = HopEditorModel()
m.detectors = ["H2", "H1", "CDD"]
# m.open()
m.new()
h = HopEditorView(model=m)
# m.new()
h.configure_traits()
# ============= EOF =============================================
| USGSDenverPychron/pychron | pychron/pyscripts/hops_editor.py | Python | apache-2.0 | 17,028 |
'''
Integration Test for creating KVM VM with all nodes shutdown and recovered.
@author: Quarkonics
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.zstack_test.zstack_test_vm as test_vm_header
import test_stub
import time
import os
vm = None
def test():
global vm
cmd = "init 0"
host_username = os.environ.get('nodeUserName')
host_password = os.environ.get('nodePassword')
zstack_ha_vip = os.environ.get('zstackHaVip')
node1_ip = os.environ.get('node1Ip')
test_util.test_logger("shutdown node: %s" % (node1_ip))
rsp = test_lib.lib_execute_ssh_cmd(node1_ip, host_username, host_password, cmd, 180)
node2_ip = os.environ.get('node2Ip')
test_util.test_logger("shutdown node: %s" % (node2_ip))
rsp = test_lib.lib_execute_ssh_cmd(node2_ip, host_username, host_password, cmd, 180)
test_util.test_logger("recover node: %s" % (node1_ip))
os.system('bash -ex %s %s' % (os.environ.get('nodeRecoverScript'), node1_ip))
test_util.test_logger("recover node: %s" % (node2_ip))
os.system('bash -ex %s %s' % (os.environ.get('nodeRecoverScript'), node2_ip))
cmd = "zstack-ctl recover_ha"
rsp = test_lib.lib_execute_ssh_cmd(node1_ip, host_username, host_password, cmd, 180)
if not rsp:
rsp = test_lib.lib_execute_ssh_cmd(node2_ip, host_username, host_password, cmd, 180)
time.sleep(180)
test_stub.exercise_connection(600)
vm = test_stub.create_basic_vm()
vm.check()
vm.destroy()
test_util.test_pass('After Recover Node with One command, Create VM Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
global vm
if vm:
try:
vm.destroy()
except:
pass
| zstackorg/zstack-woodpecker | integrationtest/vm/ha/test_all_nodes_recovery_with_one_cmd_create_vm.py | Python | apache-2.0 | 1,972 |
from __future__ import absolute_import
# Copyright (c) 2010-2019 openpyxl
from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.descriptors import (
Typed,
Alias,
)
from openpyxl.descriptors.excel import (
ExtensionList,
)
from openpyxl.descriptors.sequence import (
MultiSequence,
MultiSequencePart,
)
from openpyxl.descriptors.nested import (
NestedBool,
)
from ._3d import _3DBase
from .area_chart import AreaChart, AreaChart3D
from .bar_chart import BarChart, BarChart3D
from .bubble_chart import BubbleChart
from .line_chart import LineChart, LineChart3D
from .pie_chart import PieChart, PieChart3D, ProjectedPieChart, DoughnutChart
from .radar_chart import RadarChart
from .scatter_chart import ScatterChart
from .stock_chart import StockChart
from .surface_chart import SurfaceChart, SurfaceChart3D
from .layout import Layout
from .shapes import GraphicalProperties
from .text import RichText
from .axis import (
NumericAxis,
TextAxis,
SeriesAxis,
DateAxis,
)
class DataTable(Serialisable):
tagname = "dTable"
showHorzBorder = NestedBool(allow_none=True)
showVertBorder = NestedBool(allow_none=True)
showOutline = NestedBool(allow_none=True)
showKeys = NestedBool(allow_none=True)
spPr = Typed(expected_type=GraphicalProperties, allow_none=True)
graphicalProperties = Alias('spPr')
txPr = Typed(expected_type=RichText, allow_none=True)
extLst = Typed(expected_type=ExtensionList, allow_none=True)
__elements__ = ('showHorzBorder', 'showVertBorder', 'showOutline',
'showKeys', 'spPr', 'txPr')
def __init__(self,
showHorzBorder=None,
showVertBorder=None,
showOutline=None,
showKeys=None,
spPr=None,
txPr=None,
extLst=None,
):
self.showHorzBorder = showHorzBorder
self.showVertBorder = showVertBorder
self.showOutline = showOutline
self.showKeys = showKeys
self.spPr = spPr
self.txPr = txPr
class PlotArea(Serialisable):
tagname = "plotArea"
layout = Typed(expected_type=Layout, allow_none=True)
dTable = Typed(expected_type=DataTable, allow_none=True)
spPr = Typed(expected_type=GraphicalProperties, allow_none=True)
graphicalProperties = Alias("spPr")
extLst = Typed(expected_type=ExtensionList, allow_none=True)
# at least one chart
_charts = MultiSequence()
areaChart = MultiSequencePart(expected_type=AreaChart, store="_charts")
area3DChart = MultiSequencePart(expected_type=AreaChart3D, store="_charts")
lineChart = MultiSequencePart(expected_type=LineChart, store="_charts")
line3DChart = MultiSequencePart(expected_type=LineChart3D, store="_charts")
stockChart = MultiSequencePart(expected_type=StockChart, store="_charts")
radarChart = MultiSequencePart(expected_type=RadarChart, store="_charts")
scatterChart = MultiSequencePart(expected_type=ScatterChart, store="_charts")
pieChart = MultiSequencePart(expected_type=PieChart, store="_charts")
pie3DChart = MultiSequencePart(expected_type=PieChart3D, store="_charts")
doughnutChart = MultiSequencePart(expected_type=DoughnutChart, store="_charts")
barChart = MultiSequencePart(expected_type=BarChart, store="_charts")
bar3DChart = MultiSequencePart(expected_type=BarChart3D, store="_charts")
ofPieChart = MultiSequencePart(expected_type=ProjectedPieChart, store="_charts")
surfaceChart = MultiSequencePart(expected_type=SurfaceChart, store="_charts")
surface3DChart = MultiSequencePart(expected_type=SurfaceChart3D, store="_charts")
bubbleChart = MultiSequencePart(expected_type=BubbleChart, store="_charts")
# axes
_axes = MultiSequence()
valAx = MultiSequencePart(expected_type=NumericAxis, store="_axes")
catAx = MultiSequencePart(expected_type=TextAxis, store="_axes")
dateAx = MultiSequencePart(expected_type=DateAxis, store="_axes")
serAx = MultiSequencePart(expected_type=SeriesAxis, store="_axes")
__elements__ = ('layout', '_charts', '_axes', 'dTable', 'spPr')
def __init__(self,
layout=None,
dTable=None,
spPr=None,
_charts=(),
_axes=(),
extLst=None,
):
self.layout = layout
self.dTable = dTable
self.spPr = spPr
self._charts = _charts
self._axes = _axes
def to_tree(self, tagname=None, idx=None, namespace=None):
axIds = set((ax.axId for ax in self._axes))
for chart in self._charts:
for id, axis in chart._axes.items():
if id not in axIds:
setattr(self, axis.tagname, axis)
axIds.add(id)
return super(PlotArea, self).to_tree(tagname)
@classmethod
def from_tree(cls, node):
self = super(PlotArea, cls).from_tree(node)
axes = dict((axis.axId, axis) for axis in self._axes)
for chart in self._charts:
if isinstance(chart, ScatterChart):
x, y = (axes[axId] for axId in chart.axId)
chart.x_axis = x
chart.y_axis = y
continue
for axId in chart.axId:
axis = axes.get(axId)
if axis is None and isinstance(chart, _3DBase):
# Series Axis can be optional
chart.z_axis = None
continue
if axis.tagname in ("catAx", "dateAx"):
chart.x_axis = axis
elif axis.tagname == "valAx":
chart.y_axis = axis
elif axis.tagname == "serAx":
chart.z_axis = axis
return self
| kawamon/hue | desktop/core/ext-py/openpyxl-2.6.4/openpyxl/chart/plotarea.py | Python | apache-2.0 | 5,861 |
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tensorflow_fold.util.proto."""
import os
# import google3
import tensorflow as tf
from tensorflow_fold.util import proto_tools
from tensorflow_fold.util import test3_pb2
from tensorflow_fold.util import test_pb2
from google.protobuf import text_format
# Make sure SerializedMessageToTree can see our proto files.
proto_tools.map_proto_source_tree_path("", os.getcwd())
# Note: Tests run in the bazel root directory, which we will use as the root for
# our source protos.
proto_tools.import_proto_file("tensorflow_fold/util/test.proto")
proto_tools.import_proto_file("tensorflow_fold/util/test3.proto")
def MakeCyclicProto(message_str):
return text_format.Parse(message_str, test_pb2.CyclicType())
def MakeCyclicProto3(message_str):
return text_format.Parse(message_str, test3_pb2.CyclicType3())
def MakeOneAtomProto(message_str):
return text_format.Parse(message_str, test_pb2.OneAtom())
class ProtoTest(tf.test.TestCase):
def testSerializedMessageToTree(self):
example = MakeCyclicProto(
"some_same<"
" many_int32: 1"
" many_int32: 2"
" some_same<"
" many_int32: 3"
" many_int32: 4"
" some_bool: false"
" >"
">"
"some_enum: THAT")
result = proto_tools.serialized_message_to_tree(
"tensorflow.fold.CyclicType", example.SerializeToString())
self.assertEqual(result["some_same"]["many_int32"], [1, 2])
self.assertEqual(result["some_same"]["some_same"]["many_int32"], [3, 4])
self.assertEqual(result["some_same"]["some_same"]["some_bool"], False)
self.assertEqual(result["many_bool"], [])
self.assertEqual(result["some_bool"], None)
self.assertEqual(result["some_same"]["many_bool"], [])
self.assertEqual(result["some_same"]["some_bool"], None)
self.assertEqual(result["some_enum"]["name"], "THAT")
self.assertEqual(result["some_enum"]["index"], 1)
self.assertEqual(result["some_enum"]["number"], 1)
def testSerializedMessageToTreeProto3(self):
example = MakeCyclicProto3(
"some_same<"
" many_int32: 1"
" many_int32: 2"
" some_same<"
" many_int32: 3"
" many_int32: 4"
" some_bool: false"
" >"
">"
"some_enum: THAT")
result = proto_tools.serialized_message_to_tree(
"tensorflow.fold.CyclicType3", example.SerializeToString())
self.assertEqual(result["some_same"]["many_int32"], [1, 2])
self.assertEqual(result["some_same"]["some_same"]["many_int32"], [3, 4])
self.assertEqual(result["some_same"]["some_same"]["some_bool"], False)
self.assertEqual(result["many_bool"], [])
self.assertEqual(result["some_bool"], False)
self.assertEqual(result["some_same"]["many_bool"], [])
self.assertEqual(result["some_same"]["some_bool"], False)
self.assertEqual(result["some_enum"]["name"], "THAT")
self.assertEqual(result["some_enum"]["index"], 1)
self.assertEqual(result["some_enum"]["number"], 1)
def testSerializedMessageToTreeOneofEmpty(self):
empty_proto = MakeOneAtomProto("").SerializeToString()
empty_result = proto_tools.serialized_message_to_tree(
"tensorflow.fold.OneAtom", empty_proto)
self.assertEqual(empty_result["atom_type"], None)
self.assertEqual(empty_result["some_int32"], None)
self.assertEqual(empty_result["some_int64"], None)
self.assertEqual(empty_result["some_uint32"], None)
self.assertEqual(empty_result["some_uint64"], None)
self.assertEqual(empty_result["some_double"], None)
self.assertEqual(empty_result["some_float"], None)
self.assertEqual(empty_result["some_bool"], None)
self.assertEqual(empty_result["some_enum"], None)
self.assertEqual(empty_result["some_string"], None)
def testSerializedMessageToTreeOneof(self):
empty_proto = MakeOneAtomProto("some_string: \"x\"").SerializeToString()
empty_result = proto_tools.serialized_message_to_tree(
"tensorflow.fold.OneAtom", empty_proto)
self.assertEqual(empty_result["atom_type"], "some_string")
self.assertEqual(empty_result["some_int32"], None)
self.assertEqual(empty_result["some_int64"], None)
self.assertEqual(empty_result["some_uint32"], None)
self.assertEqual(empty_result["some_uint64"], None)
self.assertEqual(empty_result["some_double"], None)
self.assertEqual(empty_result["some_float"], None)
self.assertEqual(empty_result["some_bool"], None)
self.assertEqual(empty_result["some_enum"], None)
self.assertEqual(empty_result["some_string"], "x")
def testNonConsecutiveEnum(self):
name = "tensorflow.fold.NonConsecutiveEnumMessage"
msg = test_pb2.NonConsecutiveEnumMessage(
the_enum=test_pb2.NonConsecutiveEnumMessage.THREE)
self.assertEqual(
{"the_enum": {"name": "THREE", "index": 1, "number": 3}},
proto_tools.serialized_message_to_tree(name, msg.SerializeToString()))
msg.the_enum = test_pb2.NonConsecutiveEnumMessage.SEVEN
self.assertEqual(
{"the_enum": {"name": "SEVEN", "index": 0, "number": 7}},
proto_tools.serialized_message_to_tree(name, msg.SerializeToString()))
if __name__ == "__main__":
tf.test.main()
| pklfz/fold | tensorflow_fold/util/proto_test.py | Python | apache-2.0 | 5,810 |
"""
The Django User class is used to handle users and authentication.
https://docs.djangoproject.com/en/1.10/ref/contrib/auth/
User groups:
superadmin - Can access django admin page
admin - Can access regular admin pages
hacker - Hacker pages
mentor - Mentor pages
judge - Judge pages
user (implied when logged in) - User pages
"""
from .hackathon import Hackathon
from .hackathon_countdown import HackathonCountdown
from .hackathon_map import HackathonMap
from .hackathon_sponsor import HackathonSponsor
from .hackathon_update import HackathonUpdate
from .attendee_status import AttendeeStatus
from .schedule_item import ScheduleItem
from .school import School
from .anon_stat import AnonStat
from .scan_event import ScanEvent
from .scan_record import ScanRecord
from .user_info import UserInfo
from .hacker_info import HackerInfo
from .judge_info import JudgeInfo
from .mentor_info import MentorInfo
from .organizer_info import OrganizerInfo
from .help_request import HelpRequest
from .subscriber import Subscriber
from .wifi_cred import WifiCred
from .link_key import LinkKey
from .hackathon_prize import HackathonPrize
from .judging_expo import JudgingExpo
from .judging_criteria import JudgingCriteria
from .hack import Hack
from .judging_grade import JudgingGrade
from .judging_assignment import JudgingAssignment
from .preview_email import PreviewEmail
from .nomination import Nomination
# TODO OldParseUser
# TODO OldParseHacker
# TODO OldParseMentor
| andrewsosa/hackfsu_com | api/api/models/__init__.py | Python | apache-2.0 | 1,488 |
"""Treadmill bootstrap module.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import errno
import json
import logging
import os
import sys
import subprocess
import tempfile
import pkg_resources
if os.name == 'posix':
import stat
from treadmill import bootstrap
from treadmill import fs
from treadmill import plugin_manager
from treadmill import supervisor
from treadmill import logging as tm_logging
from treadmill import yamlwrapper as yaml
from treadmill import subproc
from treadmill import utils
if os.name == 'nt':
DEFAULT_INSTALL_DIR = 'c:\\'
PLATFORM = 'windows'
else:
DEFAULT_INSTALL_DIR = '/var/lib'
PLATFORM = 'linux'
_LOGGER = logging.getLogger(__name__)
_CONTROL_DIR_NAME = supervisor.ScanDir.control_dir_name()
_CONTROL_DIR_FILE = '{}.yml'.format(_CONTROL_DIR_NAME)
def _is_executable(filename):
"""Check if file is executable.
"""
# XXX: This is an ugly hack until we can replace bootstrap with
# a treadmill.supervisor based installation.
if os.path.basename(filename) in ['run', 'finish', 'app_start',
'SIGTERM', 'SIGHUP', 'SIGQUIT',
'SIGINT', 'SIGUSR1', 'SIGUSR2']:
return True
if filename.endswith('.sh'):
return True
return False
def _rename_file(src, dst):
"""Rename the specified file.
"""
fs.replace(src, dst)
if os.name == 'posix':
mode = os.stat(dst).st_mode
mode |= (stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
if _is_executable(dst):
mode |= (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
os.chmod(dst, mode)
def _update(filename, content):
"""Updates file with content if different.
"""
try:
with io.open(filename) as f:
current = f.read()
if current == content:
return
except OSError as os_err:
if os_err.errno != errno.ENOENT:
raise
except IOError as io_err: # pylint: disable=duplicate-except
if io_err.errno != errno.ENOENT:
raise
with tempfile.NamedTemporaryFile(dir=os.path.dirname(filename),
mode='w',
prefix='.tmp',
delete=False) as tmp_file:
tmp_file.write(content)
_rename_file(tmp_file.name, filename)
def _is_scan_dir(package, src_dir, dst_dir):
"""Check if working on a scan dir.
"""
if os.path.exists(os.path.join(dst_dir, _CONTROL_DIR_NAME)):
return True
package_name = package.__name__
if pkg_resources.resource_isdir(package_name,
os.path.join(src_dir, _CONTROL_DIR_NAME)):
return True
if pkg_resources.resource_exists(package_name,
os.path.join(src_dir, _CONTROL_DIR_FILE)):
return True
return False
def _run(script):
"""Runs the services.
"""
if os.name == 'nt':
sys.exit(subprocess.call(script))
else:
utils.sane_execvp(script, [script])
def _install_services(scan_dir, package, src_dir, dst_dir, params, prefix_len,
rec=None):
"""Expand services in scan directory and install.
"""
package_name = package.__name__
contents = pkg_resources.resource_listdir(package_name, src_dir)
for item in contents:
if item in (_CONTROL_DIR_NAME, _CONTROL_DIR_FILE):
continue
resource_path = os.path.join(src_dir, item)
if pkg_resources.resource_isdir(package_name,
os.path.join(src_dir, item)):
dst_path = os.path.join(dst_dir, resource_path[prefix_len:])
fs.mkdir_safe(dst_path)
if rec:
rec.write('%s\n' % os.path.join(dst_path, ''))
_install(
package,
os.path.join(src_dir, item),
dst_dir,
params,
prefix_len=prefix_len,
rec=rec
)
elif resource_path.endswith('.yml'):
dst_path = os.path.join(dst_dir, resource_path[prefix_len:-4])
name = os.path.basename(dst_path)
_LOGGER.info('Expand service (%s): %s => %s', name, resource_path,
dst_path)
fs.mkdir_safe(dst_path)
if rec:
rec.write('%s\n' % os.path.join(dst_path, ''))
service_conf_file = pkg_resources.resource_string(
package_name,
resource_path
)
if not service_conf_file:
_LOGGER.warning('Service def was empty: %s', resource_path)
continue
service_conf = yaml.load(service_conf_file.decode('utf8'))
service_conf = bootstrap.interpolate_service_conf(
resource_path, service_conf, name, params)
svc = supervisor.create_service(
scan_dir,
service_conf['name'],
service_conf['command'],
userid=service_conf['userid'],
downed=service_conf['downed'],
environ_dir=service_conf['environ_dir'],
environ=service_conf['environ'],
monitor_policy=service_conf['monitor_policy'],
notification_fd=service_conf['notification_fd'],
call_before_run=service_conf['call_before_run'],
call_before_finish=service_conf['call_before_finish'],
logger_args=service_conf['logger_args'],
ionice_prio=0,
)
for file in service_conf['data_dir']:
permission = 0o644
if file['executable']:
permission = 0o755
fs.write_safe(
os.path.join(svc.data_dir, file['path']),
lambda f, file=file: f.write(
file['content']
),
mode='w',
permission=permission
)
def _install_scan_dir(package, src_dir, dst_dir, params, prefix_len, rec=None):
"""Interpolate source directory as a scan directory containing service
definitions.
"""
package_name = package.__name__
src_control_dir = os.path.join(src_dir, _CONTROL_DIR_NAME)
src_control_dir_file = os.path.join(src_dir, _CONTROL_DIR_FILE)
dst_path = os.path.join(dst_dir, src_dir[prefix_len:])
dst_control_dir = os.path.join(dst_path, _CONTROL_DIR_NAME)
scan_dir = None
if not os.path.exists(dst_control_dir):
fs.mkdir_safe(dst_control_dir)
if rec:
rec.write('%s\n' % os.path.join(dst_control_dir, ''))
if pkg_resources.resource_isdir(package_name, src_control_dir):
_install(package, src_control_dir, dst_dir, params,
prefix_len=prefix_len, rec=rec)
elif pkg_resources.resource_exists(package_name, src_control_dir_file):
_LOGGER.info('Expand control dir: %s => %s', src_control_dir_file,
dst_control_dir)
svscan_conf_file = pkg_resources.resource_string(
package_name,
src_control_dir_file
)
if svscan_conf_file:
svscan_conf = yaml.load(svscan_conf_file.decode('utf8'))
else:
svscan_conf = {}
scan_dir = supervisor.create_scan_dir(
dst_path,
svscan_conf.get('finish_timeout', 0),
wait_cgroups=svscan_conf.get('wait_cgroups', None),
kill_svc=svscan_conf.get('kill_svc', None)
)
scan_dir.write()
if not scan_dir:
scan_dir = supervisor.ScanDir(dst_path)
_install_services(
scan_dir,
package,
src_dir,
dst_dir,
params,
prefix_len=prefix_len,
rec=rec
)
def _install(package, src_dir, dst_dir, params, prefix_len=None, rec=None):
"""Interpolate source directory into target directory with params.
"""
package_name = package.__name__
_LOGGER.info(
'Installing package: %s %s %s', package_name, src_dir, dst_dir
)
contents = pkg_resources.resource_listdir(package_name, src_dir)
if prefix_len is None:
prefix_len = len(src_dir) + 1
for item in contents:
resource_path = os.path.join(src_dir, item)
dst_path = os.path.join(dst_dir, resource_path[prefix_len:])
if pkg_resources.resource_isdir(package_name,
os.path.join(src_dir, item)):
fs.mkdir_safe(dst_path)
# Check directory ownership.
owner_rsrc = os.path.join(resource_path, '.owner')
if pkg_resources.resource_exists(package_name, owner_rsrc):
owner = bootstrap.interpolate(
pkg_resources.resource_string(
package_name, owner_rsrc
).decode(),
params
).strip()
try:
_LOGGER.info('Setting owner: %r - %r', dst_path, owner)
(uid, gid) = utils.get_uid_gid(owner)
os.chown(dst_path, uid, gid)
except (IOError, OSError) as err:
if err.errno != errno.ENOENT:
raise
if rec:
rec.write('%s\n' % os.path.join(dst_path, ''))
install_fn = _install
# Test if is a scan dir first
if _is_scan_dir(package, os.path.join(src_dir, item), dst_path):
_LOGGER.info('Scan dir found: %s => %s', resource_path,
dst_path)
install_fn = _install_scan_dir
install_fn(
package,
os.path.join(src_dir, item),
dst_dir,
params,
prefix_len=prefix_len,
rec=rec
)
else:
if resource_path.endswith('.swp'):
continue
if resource_path.endswith('.owner'):
continue
resource_str = pkg_resources.resource_string(
package_name,
resource_path
)
if rec:
rec.write('%s\n' % dst_path)
_update(dst_path,
bootstrap.render(resource_str.decode('utf8'), params))
def install(package, dst_dir, params, run=None, profile=None):
"""Installs the services.
"""
_LOGGER.info('install: %s - %s, profile: %s', package, dst_dir, profile)
packages = [package]
module = plugin_manager.load('treadmill.bootstrap', package)
extension_module = None
if profile:
_LOGGER.info('Installing profile: %s', profile)
extension_name = '{}.{}'.format(package, profile)
packages.append(extension_name)
try:
extension_module = plugin_manager.load('treadmill.bootstrap',
extension_name)
except KeyError:
_LOGGER.info('Extension not defined: %s, profile: %s',
package, profile)
subproc.load_packages(packages, lazy=False)
# Store resolved aliases
aliases_path = os.path.join(dst_dir, '.aliases.json')
aliases = subproc.get_aliases()
with io.open(aliases_path, 'w') as f_aliases:
f_aliases.write(json.dumps(aliases))
defaults = {}
defaults.update(getattr(module, 'DEFAULTS', {}))
if extension_module:
defaults.update(getattr(extension_module, 'DEFAULTS', {}))
# TODO: this is ugly, error prone and should go away.
# aliases should be in default scope, everything else in _args.
defaults['_alias'] = aliases
defaults.update(aliases)
defaults.update(params)
defaults['aliases_path'] = aliases_path
os.environ['TREADMILL_ALIASES_PATH'] = defaults['aliases_path']
interpolated = bootstrap.interpolate(defaults, defaults)
fs.mkdir_safe(dst_dir)
with io.open(os.path.join(dst_dir, '.install'), 'w') as rec:
_install(module, PLATFORM, dst_dir, interpolated, rec=rec)
if extension_module:
_install(
extension_module,
'.'.join([profile, PLATFORM]), dst_dir, interpolated,
rec=rec
)
# Extract logging configuration.
logconf_dir = os.path.join(dst_dir, 'logging')
fs.mkdir_safe(logconf_dir)
tm_logging.write_configs(logconf_dir)
# Write entry-point cache
distributions = pkg_resources.AvailableDistributions()
plugin_manager.dump_cache(
os.path.join(dst_dir, 'plugins.json'), distributions
)
if run:
_run(run)
def wipe(wipe_me, wipe_script):
"""Check if flag file is present, invoke cleanup script.
"""
if os.path.exists(wipe_me):
_LOGGER.info('Requested clean start, calling: %s', wipe_script)
subprocess.check_call(wipe_script)
else:
_LOGGER.info('Preserving data, no clean restart.')
| Morgan-Stanley/treadmill | lib/python/treadmill/bootstrap/install.py | Python | apache-2.0 | 13,260 |
#!/usr/bin/python
# Copyright 2017 Telstra Open Source
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from clean_topology import cleanup
from create_topology import create_topo
print "\n -- "
cleanup()
create_topo('multi-path-topology.json')
print "\n -- "
| nikitamarchenko/open-kilda | services/topology-engine/queue-engine/tests/smoke-tests/create-multi-path-topology.py | Python | apache-2.0 | 779 |
#!/usr/bin/env python2
'''Androguard Gui'''
import argparse
import sys
from androguard.core import androconf
from androguard.session import Session
from androguard.gui.mainwindow import MainWindow
from androguard.misc import init_print_colors
from PySide import QtCore, QtGui
from threading import Thread
class IpythonConsole(Thread):
def __init__(self):
Thread.__init__(self)
def run(self):
from IPython.terminal.embed import InteractiveShellEmbed
from traitlets.config import Config
cfg = Config()
ipshell = InteractiveShellEmbed(
config=cfg,
banner1="Androguard version %s" % androconf.ANDROGUARD_VERSION)
init_print_colors()
ipshell()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Androguard GUI")
parser.add_argument("-d", "--debug", action="store_true", default=False)
parser.add_argument("-i", "--input_file", default=None)
parser.add_argument("-c", "--console", action="store_true", default=False)
args = parser.parse_args()
if args.debug:
androconf.set_debug()
# We need that to save huge sessions when leaving and avoid
# RuntimeError: maximum recursion depth exceeded while pickling an object
# or
# RuntimeError: maximum recursion depth exceeded in cmp
# http://stackoverflow.com/questions/2134706/hitting-maximum-recursion-depth-using-pythons-pickle-cpickle
sys.setrecursionlimit(50000)
session = Session(export_ipython=args.console)
console = None
if args.console:
console = IpythonConsole()
console.start()
app = QtGui.QApplication(sys.argv)
window = MainWindow(session=session, input_file=args.input_file)
window.resize(1024, 768)
window.show()
sys.exit(app.exec_())
| revolutionaryG/androguard | androgui.py | Python | apache-2.0 | 1,817 |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""The Python datastore API used by app developers.
Defines Entity, Query, and Iterator classes, as well as methods for all of the
datastore's calls. Also defines conversions between the Python classes and
their PB counterparts.
The datastore errors are defined in the datastore_errors module. That module is
only required to avoid circular imports. datastore imports datastore_types,
which needs BadValueError, so it can't be defined in datastore.
"""
import logging
import re
import string
import sys
import traceback
from xml.sax import saxutils
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.datastore import datastore_index
from google.appengine.datastore import datastore_pb
from google.appengine.runtime import apiproxy_errors
from google.appengine.datastore import entity_pb
TRANSACTION_RETRIES = 3
_MAX_INDEXED_PROPERTIES = 5000
Key = datastore_types.Key
typename = datastore_types.typename
_txes = {}
def NormalizeAndTypeCheck(arg, types):
"""Normalizes and type checks the given argument.
Args:
arg: an instance, tuple, list, iterator, or generator of the given type(s)
types: allowed type or tuple of types
Returns:
A (list, bool) tuple. The list is a normalized, shallow copy of the
argument. The boolean is True if the argument was a sequence, False
if it was a single object.
Raises:
AssertionError: types includes list or tuple.
BadArgumentError: arg is not an instance or sequence of one of the given
types.
"""
if not isinstance(types, (list, tuple)):
types = (types,)
assert list not in types and tuple not in types
if isinstance(arg, types):
return ([arg], False)
else:
try:
for val in arg:
if not isinstance(val, types):
raise datastore_errors.BadArgumentError(
'Expected one of %s; received %s (a %s).' %
(types, val, typename(val)))
except TypeError:
raise datastore_errors.BadArgumentError(
'Expected an instance or sequence of %s; received %s (a %s).' %
(types, arg, typename(arg)))
return (list(arg), True)
def NormalizeAndTypeCheckKeys(keys):
"""Normalizes and type checks that the given argument is a valid key or keys.
A wrapper around NormalizeAndTypeCheck() that accepts strings, Keys, and
Entities, and normalizes to Keys.
Args:
keys: a Key or sequence of Keys
Returns:
A (list of Keys, bool) tuple. See NormalizeAndTypeCheck.
Raises:
BadArgumentError: arg is not an instance or sequence of one of the given
types.
"""
keys, multiple = NormalizeAndTypeCheck(keys, (basestring, Entity, Key))
keys = [_GetCompleteKeyOrError(key) for key in keys]
return (keys, multiple)
def Put(entities):
"""Store one or more entities in the datastore.
The entities may be new or previously existing. For new entities, Put() will
fill in the app id and key assigned by the datastore.
If the argument is a single Entity, a single Key will be returned. If the
argument is a list of Entity, a list of Keys will be returned.
Args:
entities: Entity or list of Entities
Returns:
Key or list of Keys
Raises:
TransactionFailedError, if the Put could not be committed.
"""
entities, multiple = NormalizeAndTypeCheck(entities, Entity)
if multiple and not entities:
return []
for entity in entities:
if not entity.kind() or not entity.app():
raise datastore_errors.BadRequestError(
'App and kind must not be empty, in entity: %s' % entity)
req = datastore_pb.PutRequest()
req.entity_list().extend([e._ToPb() for e in entities])
keys = [e.key() for e in entities]
tx = _MaybeSetupTransaction(req, keys)
if tx:
tx.RecordModifiedKeys([k for k in keys if k.has_id_or_name()])
resp = datastore_pb.PutResponse()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put', req, resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
keys = resp.key_list()
num_keys = len(keys)
num_entities = len(entities)
if num_keys != num_entities:
raise datastore_errors.InternalError(
'Put accepted %d entities but returned %d keys.' %
(num_entities, num_keys))
for entity, key in zip(entities, keys):
entity._Entity__key._Key__reference.CopyFrom(key)
if tx:
tx.RecordModifiedKeys([e.key() for e in entities], error_on_repeat=False)
if multiple:
return [Key._FromPb(k) for k in keys]
else:
return Key._FromPb(resp.key(0))
def Get(keys):
"""Retrieves one or more entities from the datastore.
Retrieves the entity or entities with the given key(s) from the datastore
and returns them as fully populated Entity objects, as defined below. If
there is an error, raises a subclass of datastore_errors.Error.
If keys is a single key or string, an Entity will be returned, or
EntityNotFoundError will be raised if no existing entity matches the key.
However, if keys is a list or tuple, a list of entities will be returned
that corresponds to the sequence of keys. It will include entities for keys
that were found and None placeholders for keys that were not found.
Args:
# the primary key(s) of the entity(ies) to retrieve
keys: Key or string or list of Keys or strings
Returns:
Entity or list of Entity objects
"""
keys, multiple = NormalizeAndTypeCheckKeys(keys)
if multiple and not keys:
return []
req = datastore_pb.GetRequest()
req.key_list().extend([key._Key__reference for key in keys])
_MaybeSetupTransaction(req, keys)
resp = datastore_pb.GetResponse()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Get', req, resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
entities = []
for group in resp.entity_list():
if group.has_entity():
entities.append(Entity._FromPb(group.entity()))
else:
entities.append(None)
if multiple:
return entities
else:
if entities[0] is None:
raise datastore_errors.EntityNotFoundError()
return entities[0]
def Delete(keys):
"""Deletes one or more entities from the datastore. Use with care!
Deletes the given entity(ies) from the datastore. You can only delete
entities from your app. If there is an error, raises a subclass of
datastore_errors.Error.
Args:
# the primary key(s) of the entity(ies) to delete
keys: Key or string or list of Keys or strings
Raises:
TransactionFailedError, if the Delete could not be committed.
"""
keys, multiple = NormalizeAndTypeCheckKeys(keys)
if multiple and not keys:
return
req = datastore_pb.DeleteRequest()
req.key_list().extend([key._Key__reference for key in keys])
tx = _MaybeSetupTransaction(req, keys)
if tx:
tx.RecordModifiedKeys(keys)
resp = api_base_pb.VoidProto()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Delete', req, resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
class Entity(dict):
"""A datastore entity.
Includes read-only accessors for app id, kind, and primary key. Also
provides dictionary-style access to properties.
"""
def __init__(self, kind, parent=None, _app=None, name=None):
"""Constructor. Takes the kind and transaction root, which cannot be
changed after the entity is constructed, and an optional parent. Raises
BadArgumentError or BadKeyError if kind is invalid or parent is not an
existing Entity or Key in the datastore.
Args:
# this entity's kind
kind: string
# if provided, this entity's parent. Its key must be complete.
parent: Entity or Key
# if provided, this entity's name.
name: string
"""
ref = entity_pb.Reference()
_app = datastore_types.ResolveAppId(_app)
ref.set_app(_app)
datastore_types.ValidateString(kind, 'kind',
datastore_errors.BadArgumentError)
if parent is not None:
parent = _GetCompleteKeyOrError(parent)
if _app != parent.app():
raise datastore_errors.BadArgumentError(
"_app %s doesn't match parent's app %s" % (_app, parent.app()))
ref.CopyFrom(parent._Key__reference)
last_path = ref.mutable_path().add_element()
last_path.set_type(kind.encode('utf-8'))
if name is not None:
datastore_types.ValidateString(name, 'name')
if name[0] in string.digits:
raise datastore_errors.BadValueError('name cannot begin with a digit')
last_path.set_name(name.encode('utf-8'))
self.__key = Key._FromPb(ref)
def app(self):
"""Returns the name of the application that created this entity, a
string.
"""
return self.__key.app()
def kind(self):
"""Returns this entity's kind, a string.
"""
return self.__key.kind()
def key(self):
"""Returns this entity's primary key, a Key instance.
"""
return self.__key
def parent(self):
"""Returns this entity's parent, as a Key. If this entity has no parent,
returns None.
"""
return self.key().parent()
def entity_group(self):
"""Returns this entitys's entity group as a Key.
Note that the returned Key will be incomplete if this is a a root entity
and its key is incomplete.
"""
return self.key().entity_group()
def __setitem__(self, name, value):
"""Implements the [] operator. Used to set property value(s).
If the property name is the empty string or not a string, raises
BadPropertyError. If the value is not a supported type, raises
BadValueError.
"""
datastore_types.ValidateProperty(name, value)
dict.__setitem__(self, name, value)
def setdefault(self, name, value):
"""If the property exists, returns its value. Otherwise sets it to value.
If the property name is the empty string or not a string, raises
BadPropertyError. If the value is not a supported type, raises
BadValueError.
"""
datastore_types.ValidateProperty(name, value)
return dict.setdefault(self, name, value)
def update(self, other):
"""Updates this entity's properties from the values in other.
If any property name is the empty string or not a string, raises
BadPropertyError. If any value is not a supported type, raises
BadValueError.
"""
for name, value in other.items():
self.__setitem__(name, value)
def copy(self):
"""The copy method is not supported.
"""
raise NotImplementedError('Entity does not support the copy() method.')
def ToXml(self):
"""Returns an XML representation of this entity. Atom and gd:namespace
properties are converted to XML according to their respective schemas. For
more information, see:
http://www.atomenabled.org/developers/syndication/
http://code.google.com/apis/gdata/common-elements.html
This is *not* optimized. It shouldn't be used anywhere near code that's
performance-critical.
"""
xml = u'<entity kind=%s' % saxutils.quoteattr(self.kind())
if self.__key.has_id_or_name():
xml += ' key=%s' % saxutils.quoteattr(str(self.__key))
xml += '>'
if self.__key.has_id_or_name():
xml += '\n <key>%s</key>' % self.__key.ToTagUri()
properties = self.keys()
if properties:
properties.sort()
xml += '\n ' + '\n '.join(self._PropertiesToXml(properties))
xml += '\n</entity>\n'
return xml
def _PropertiesToXml(self, properties):
""" Returns a list of the XML representations of each of the given
properties. Ignores properties that don't exist in this entity.
Arg:
properties: string or list of strings
Returns:
list of strings
"""
xml_properties = []
for propname in properties:
if not self.has_key(propname):
continue
propname_xml = saxutils.quoteattr(propname)
values = self[propname]
if not isinstance(values, list):
values = [values]
proptype = datastore_types.PropertyTypeName(values[0])
proptype_xml = saxutils.quoteattr(proptype)
escaped_values = self._XmlEscapeValues(propname)
open_tag = u'<property name=%s type=%s>' % (propname_xml, proptype_xml)
close_tag = u'</property>'
xml_properties += [open_tag + val + close_tag for val in escaped_values]
return xml_properties
def _XmlEscapeValues(self, property):
""" Returns a list of the XML-escaped string values for the given property.
Raises an AssertionError if the property doesn't exist.
Arg:
property: string
Returns:
list of strings
"""
assert self.has_key(property)
xml = []
values = self[property]
if not isinstance(values, list):
values = [values]
for val in values:
if hasattr(val, 'ToXml'):
xml.append(val.ToXml())
else:
if val is None:
xml.append('')
else:
xml.append(saxutils.escape(unicode(val)))
return xml
def _ToPb(self):
"""Converts this Entity to its protocol buffer representation. Not
intended to be used by application developers.
Returns:
entity_pb.Entity
"""
pb = entity_pb.EntityProto()
pb.mutable_key().CopyFrom(self.key()._ToPb())
group = pb.mutable_entity_group()
if self.__key.has_id_or_name():
root = pb.key().path().element(0)
group.add_element().CopyFrom(root)
properties = self.items()
properties.sort()
for (name, values) in properties:
properties = datastore_types.ToPropertyPb(name, values)
if not isinstance(properties, list):
properties = [properties]
sample = values
if isinstance(sample, list):
sample = values[0]
if isinstance(sample, (datastore_types.Blob, datastore_types.Text)):
pb.raw_property_list().extend(properties)
else:
pb.property_list().extend(properties)
if pb.property_size() > _MAX_INDEXED_PROPERTIES:
raise datastore_errors.BadRequestError(
'Too many indexed properties for entity %r.' % self.key())
return pb
@staticmethod
def _FromPb(pb):
"""Static factory method. Returns the Entity representation of the
given protocol buffer (datastore_pb.Entity). Not intended to be used by
application developers.
The Entity PB's key must be complete. If it isn't, an AssertionError is
raised.
Args:
# a protocol buffer Entity
pb: datastore_pb.Entity
Returns:
# the Entity representation of the argument
Entity
"""
assert pb.key().path().element_size() > 0
last_path = pb.key().path().element_list()[-1]
assert last_path.has_id() ^ last_path.has_name()
if last_path.has_id():
assert last_path.id() != 0
else:
assert last_path.has_name()
assert last_path.name()
e = Entity(unicode(last_path.type().decode('utf-8')))
ref = e.__key._Key__reference
ref.CopyFrom(pb.key())
temporary_values = {}
for prop_list in (pb.property_list(), pb.raw_property_list()):
for prop in prop_list:
if not prop.has_multiple():
raise datastore_errors.Error(
'Property %s is corrupt in the datastore; it\'s missing the '
'multiple valued field.' % prop.name())
try:
value = datastore_types.FromPropertyPb(prop)
except (AssertionError, AttributeError, TypeError, ValueError), e:
raise datastore_errors.Error(
'Property %s is corrupt in the datastore. %s: %s' %
(e.__class__, prop.name(), e))
multiple = prop.multiple()
if multiple:
value = [value]
name = prop.name()
cur_value = temporary_values.get(name)
if cur_value is None:
temporary_values[name] = value
elif not multiple:
raise datastore_errors.Error(
'Property %s is corrupt in the datastore; it has multiple '
'values, but is not marked as multiply valued.' % name)
else:
cur_value.extend(value)
for name, value in temporary_values.iteritems():
decoded_name = unicode(name.decode('utf-8'))
datastore_types.ValidateReadProperty(decoded_name, value)
dict.__setitem__(e, decoded_name, value)
return e
class Query(dict):
"""A datastore query.
(Instead of this, consider using appengine.ext.gql.Query! It provides a
query language interface on top of the same functionality.)
Queries are used to retrieve entities that match certain criteria, including
app id, kind, and property filters. Results may also be sorted by properties.
App id and kind are required. Only entities from the given app, of the given
type, are returned. If an ancestor is set, with Ancestor(), only entities
with that ancestor are returned.
Property filters are used to provide criteria based on individual property
values. A filter compares a specific property in each entity to a given
value or list of possible values.
An entity is returned if its property values match *all* of the query's
filters. In other words, filters are combined with AND, not OR. If an
entity does not have a value for a property used in a filter, it is not
returned.
Property filters map filter strings of the form '<property name> <operator>'
to filter values. Use dictionary accessors to set property filters, like so:
> query = Query('Person')
> query['name ='] = 'Ryan'
> query['age >='] = 21
This query returns all Person entities where the name property is 'Ryan',
'Ken', or 'Bret', and the age property is at least 21.
Another way to build this query is:
> query = Query('Person')
> query.update({'name =': 'Ryan', 'age >=': 21})
The supported operators are =, >, <, >=, and <=. Only one inequality
filter may be used per query. Any number of equals filters may be used in
a single Query.
A filter value may be a list or tuple of values. This is interpreted as
multiple filters with the same filter string and different values, all ANDed
together. For example, this query returns everyone with the tags "google"
and "app engine":
> Query('Person', {'tag =': ('google', 'app engine')})
Result entities can be returned in different orders. Use the Order()
method to specify properties that results will be sorted by, and in which
direction.
Note that filters and orderings may be provided at any time before the query
is run. When the query is fully specified, Run() runs the query and returns
an iterator. The query results can be accessed through the iterator.
A query object may be reused after it's been run. Its filters and
orderings can be changed to create a modified query.
If you know how many result entities you need, use Get() to fetch them:
> query = Query('Person', {'age >': 21})
> for person in query.Get(4):
> print 'I have four pints left. Have one on me, %s!' % person['name']
If you don't know how many results you need, or if you need them all, you
can get an iterator over the results by calling Run():
> for person in Query('Person', {'age >': 21}).Run():
> print 'Have a pint on me, %s!' % person['name']
Get() is more efficient than Run(), so use Get() whenever possible.
Finally, the Count() method returns the number of result entities matched by
the query. The returned count is cached; successive Count() calls will not
re-scan the datastore unless the query is changed.
"""
ASCENDING = datastore_pb.Query_Order.ASCENDING
DESCENDING = datastore_pb.Query_Order.DESCENDING
ORDER_FIRST = datastore_pb.Query.ORDER_FIRST
ANCESTOR_FIRST = datastore_pb.Query.ANCESTOR_FIRST
FILTER_FIRST = datastore_pb.Query.FILTER_FIRST
OPERATORS = {'<': datastore_pb.Query_Filter.LESS_THAN,
'<=': datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL,
'>': datastore_pb.Query_Filter.GREATER_THAN,
'>=': datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL,
'=': datastore_pb.Query_Filter.EQUAL,
'==': datastore_pb.Query_Filter.EQUAL,
}
INEQUALITY_OPERATORS = frozenset(['<', '<=', '>', '>='])
FILTER_REGEX = re.compile(
'^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(OPERATORS.keys()),
re.IGNORECASE | re.UNICODE)
__kind = None
__app = None
__orderings = None
__cached_count = None
__hint = None
__ancestor = None
__filter_order = None
__filter_counter = 0
__inequality_prop = None
__inequality_count = 0
def __init__(self, kind, filters={}, _app=None):
"""Constructor.
Raises BadArgumentError if kind is not a string. Raises BadValueError or
BadFilterError if filters is not a dictionary of valid filters.
Args:
# kind is required. filters is optional; if provided, it's used
# as an initial set of property filters.
kind: string
filters: dict
"""
datastore_types.ValidateString(kind, 'kind',
datastore_errors.BadArgumentError)
self.__kind = kind
self.__orderings = []
self.__filter_order = {}
self.update(filters)
self.__app = datastore_types.ResolveAppId(_app)
def Order(self, *orderings):
"""Specify how the query results should be sorted.
Result entities will be sorted by the first property argument, then by the
second, and so on. For example, this:
> query = Query('Person')
> query.Order('bday', ('age', Query.DESCENDING))
sorts everyone in order of their birthday, starting with January 1.
People with the same birthday are sorted by age, oldest to youngest.
The direction for each sort property may be provided; if omitted, it
defaults to ascending.
Order() may be called multiple times. Each call resets the sort order
from scratch.
If an inequality filter exists in this Query it must be the first property
passed to Order. Any number of sort orders may be used after the
inequality filter property. Without inequality filters, any number of
filters with different orders may be specified.
Entities with multiple values for an order property are sorted by their
lowest value.
Note that a sort order implies an existence filter! In other words,
Entities without the sort order property are filtered out, and *not*
included in the query results.
If the sort order property has different types in different entities - ie,
if bob['id'] is an int and fred['id'] is a string - the entities will be
grouped first by the property type, then sorted within type. No attempt is
made to compare property values across types.
Raises BadArgumentError if any argument is of the wrong format.
Args:
# the properties to sort by, in sort order. each argument may be either a
# string or (string, direction) 2-tuple.
Returns:
# this query
Query
"""
orderings = list(orderings)
for (order, i) in zip(orderings, range(len(orderings))):
if not (isinstance(order, basestring) or
(isinstance(order, tuple) and len(order) in [2, 3])):
raise datastore_errors.BadArgumentError(
'Order() expects strings or 2- or 3-tuples; received %s (a %s). ' %
(order, typename(order)))
if isinstance(order, basestring):
order = (order,)
datastore_types.ValidateString(order[0], 'sort order property',
datastore_errors.BadArgumentError)
property = order[0]
direction = order[-1]
if direction not in (Query.ASCENDING, Query.DESCENDING):
if len(order) == 3:
raise datastore_errors.BadArgumentError(
'Order() expects Query.ASCENDING or DESCENDING; received %s' %
str(direction))
direction = Query.ASCENDING
orderings[i] = (property, direction)
if (orderings and self.__inequality_prop and
orderings[0][0] != self.__inequality_prop):
raise datastore_errors.BadArgumentError(
'First ordering property must be the same as inequality filter '
'property, if specified for this query; received %s, expected %s' %
(orderings[0][0], self.__inequality_prop))
self.__orderings = orderings
return self
def Hint(self, hint):
"""Sets a hint for how this query should run.
The query hint gives us information about how best to execute your query.
Currently, we can only do one index scan, so the query hint should be used
to indicates which index we should scan against.
Use FILTER_FIRST if your first filter will only match a few results. In
this case, it will be most efficient to scan against the index for this
property, load the results into memory, and apply the remaining filters
and sort orders there.
Similarly, use ANCESTOR_FIRST if the query's ancestor only has a few
descendants. In this case, it will be most efficient to scan all entities
below the ancestor and load them into memory first.
Use ORDER_FIRST if the query has a sort order and the result set is large
or you only plan to fetch the first few results. In that case, we
shouldn't try to load all of the results into memory; instead, we should
scan the index for this property, which is in sorted order.
Note that hints are currently ignored in the v3 datastore!
Arg:
one of datastore.Query.[ORDER_FIRST, ANCESTOR_FIRST, FILTER_FIRST]
Returns:
# this query
Query
"""
if hint not in [self.ORDER_FIRST, self.ANCESTOR_FIRST, self.FILTER_FIRST]:
raise datastore_errors.BadArgumentError(
'Query hint must be ORDER_FIRST, ANCESTOR_FIRST, or FILTER_FIRST.')
self.__hint = hint
return self
def Ancestor(self, ancestor):
"""Sets an ancestor for this query.
This restricts the query to only return result entities that are descended
from a given entity. In other words, all of the results will have the
ancestor as their parent, or parent's parent, or etc.
Raises BadArgumentError or BadKeyError if parent is not an existing Entity
or Key in the datastore.
Args:
# the key must be complete
ancestor: Entity or Key
Returns:
# this query
Query
"""
key = _GetCompleteKeyOrError(ancestor)
self.__ancestor = datastore_pb.Reference()
self.__ancestor.CopyFrom(key._Key__reference)
return self
def Run(self):
"""Runs this query.
If a filter string is invalid, raises BadFilterError. If a filter value is
invalid, raises BadValueError. If an IN filter is provided, and a sort
order on another property is provided, raises BadQueryError.
If you know in advance how many results you want, use Get() instead. It's
more efficient.
Returns:
# an iterator that provides access to the query results
Iterator
"""
return self._Run()
def _Run(self, limit=None, offset=None):
"""Runs this query, with an optional result limit and an optional offset.
Identical to Run, with the extra optional limit and offset parameters.
limit and offset must both be integers >= 0.
This is not intended to be used by application developers. Use Get()
instead!
"""
if _CurrentTransactionKey():
raise datastore_errors.BadRequestError(
"Can't query inside a transaction.")
pb = self._ToPb(limit, offset)
result = datastore_pb.QueryResult()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery', pb, result)
except apiproxy_errors.ApplicationError, err:
try:
_ToDatastoreError(err)
except datastore_errors.NeedIndexError, exc:
yaml = datastore_index.IndexYamlForQuery(
*datastore_index.CompositeIndexForQuery(pb)[1:-1])
raise datastore_errors.NeedIndexError(
str(exc) + '\nThis query needs this index:\n' + yaml)
return Iterator._FromPb(result.cursor())
def Get(self, limit, offset=0):
"""Fetches and returns a maximum number of results from the query.
This method fetches and returns a list of resulting entities that matched
the query. If the query specified a sort order, entities are returned in
that order. Otherwise, the order is undefined.
The limit argument specifies the maximum number of entities to return. If
it's greater than the number of remaining entities, all of the remaining
entities are returned. In that case, the length of the returned list will
be smaller than limit.
The offset argument specifies the number of entities that matched the
query criteria to skip before starting to return results. The limit is
applied after the offset, so if you provide a limit of 10 and an offset of 5
and your query matches 20 records, the records whose index is 0 through 4
will be skipped and the records whose index is 5 through 14 will be
returned.
The results are always returned as a list. If there are no results left,
an empty list is returned.
If you know in advance how many results you want, this method is more
efficient than Run(), since it fetches all of the results at once. (The
datastore backend sets the the limit on the underlying
scan, which makes the scan significantly faster.)
Args:
# the maximum number of entities to return
int or long
# the number of entities to skip
int or long
Returns:
# a list of entities
[Entity, ...]
"""
if not isinstance(limit, (int, long)) or limit <= 0:
raise datastore_errors.BadArgumentError(
'Argument to Get named \'limit\' must be an int greater than 0; '
'received %s (a %s)' % (limit, typename(limit)))
if not isinstance(offset, (int, long)) or offset < 0:
raise datastore_errors.BadArgumentError(
'Argument to Get named \'offset\' must be an int greater than or '
'equal to 0; received %s (a %s)' % (offset, typename(offset)))
return self._Run(limit, offset)._Next(limit)
def Count(self, limit=None):
"""Returns the number of entities that this query matches. The returned
count is cached; successive Count() calls will not re-scan the datastore
unless the query is changed.
Args:
limit, a number. If there are more results than this, stop short and
just return this number. Providing this argument makes the count
operation more efficient.
Returns:
The number of results.
"""
if self.__cached_count:
return self.__cached_count
resp = api_base_pb.Integer64Proto()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Count',
self._ToPb(limit=limit), resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
else:
self.__cached_count = resp.value()
return self.__cached_count
def __iter__(self):
raise NotImplementedError(
'Query objects should not be used as iterators. Call Run() first.')
def __setitem__(self, filter, value):
"""Implements the [] operator. Used to set filters.
If the filter string is empty or not a string, raises BadFilterError. If
the value is not a supported type, raises BadValueError.
"""
if isinstance(value, tuple):
value = list(value)
datastore_types.ValidateProperty(' ', value, read_only=True)
match = self._CheckFilter(filter, value)
property = match.group(1)
operator = match.group(3)
dict.__setitem__(self, filter, value)
if operator in self.INEQUALITY_OPERATORS:
if self.__inequality_prop is None:
self.__inequality_prop = property
else:
assert self.__inequality_prop == property
self.__inequality_count += 1
if filter not in self.__filter_order:
self.__filter_order[filter] = self.__filter_counter
self.__filter_counter += 1
self.__cached_count = None
def setdefault(self, filter, value):
"""If the filter exists, returns its value. Otherwise sets it to value.
If the property name is the empty string or not a string, raises
BadPropertyError. If the value is not a supported type, raises
BadValueError.
"""
datastore_types.ValidateProperty(' ', value)
self._CheckFilter(filter, value)
self.__cached_count = None
return dict.setdefault(self, filter, value)
def __delitem__(self, filter):
"""Implements the del [] operator. Used to remove filters.
"""
dict.__delitem__(self, filter)
del self.__filter_order[filter]
self.__cached_count = None
match = Query.FILTER_REGEX.match(filter)
property = match.group(1)
operator = match.group(3)
if operator in self.INEQUALITY_OPERATORS:
assert self.__inequality_count >= 1
assert property == self.__inequality_prop
self.__inequality_count -= 1
if self.__inequality_count == 0:
self.__inequality_prop = None
def update(self, other):
"""Updates this query's filters from the ones in other.
If any filter string is invalid, raises BadFilterError. If any value is
not a supported type, raises BadValueError.
"""
for filter, value in other.items():
self.__setitem__(filter, value)
def copy(self):
"""The copy method is not supported.
"""
raise NotImplementedError('Query does not support the copy() method.')
def _CheckFilter(self, filter, values):
"""Type check a filter string and list of values.
Raises BadFilterError if the filter string is empty, not a string, or
invalid. Raises BadValueError if the value type is not supported.
Args:
filter: String containing the filter text.
values: List of associated filter values.
Returns:
re.MatchObject (never None) that matches the 'filter'. Group 1 is the
property name, group 3 is the operator. (Group 2 is unused.)
"""
try:
match = Query.FILTER_REGEX.match(filter)
if not match:
raise datastore_errors.BadFilterError(
'Could not parse filter string: %s' % str(filter))
except TypeError:
raise datastore_errors.BadFilterError(
'Could not parse filter string: %s' % str(filter))
property = match.group(1)
operator = match.group(3)
if operator is None:
operator = '='
if isinstance(values, tuple):
values = list(values)
elif not isinstance(values, list):
values = [values]
if isinstance(values[0], datastore_types.Blob):
raise datastore_errors.BadValueError(
'Filtering on Blob properties is not supported.')
if isinstance(values[0], datastore_types.Text):
raise datastore_errors.BadValueError(
'Filtering on Text properties is not supported.')
if operator in self.INEQUALITY_OPERATORS:
if self.__inequality_prop and property != self.__inequality_prop:
raise datastore_errors.BadFilterError(
'Only one property per query may have inequality filters (%s).' %
', '.join(self.INEQUALITY_OPERATORS))
elif len(self.__orderings) >= 1 and self.__orderings[0][0] != property:
raise datastore_errors.BadFilterError(
'Inequality operators (%s) must be on the same property as the '
'first sort order, if any sort orders are supplied' %
', '.join(self.INEQUALITY_OPERATORS))
elif property in datastore_types._SPECIAL_PROPERTIES:
if property == datastore_types._KEY_SPECIAL_PROPERTY:
for value in values:
if not isinstance(value, Key):
raise datastore_errors.BadFilterError(
'%s filter value must be a Key; received %s (a %s)' %
(datastore_types._KEY_SPECIAL_PROPERTY, value, typename(value)))
return match
def _ToPb(self, limit=None, offset=None):
"""Converts this Query to its protocol buffer representation. Not
intended to be used by application developers. Enforced by hiding the
datastore_pb classes.
Args:
# an upper bound on the number of results returned by the query.
limit: int
# number of results that match the query to skip. limit is applied
# after the offset is fulfilled
offset: int
Returns:
# the PB representation of this Query
datastore_pb.Query
"""
pb = datastore_pb.Query()
pb.set_kind(self.__kind.encode('utf-8'))
if self.__app:
pb.set_app(self.__app.encode('utf-8'))
if limit is not None:
pb.set_limit(limit)
if offset is not None:
pb.set_offset(offset)
if self.__ancestor:
pb.mutable_ancestor().CopyFrom(self.__ancestor)
if ((self.__hint == self.ORDER_FIRST and self.__orderings) or
(self.__hint == self.ANCESTOR_FIRST and self.__ancestor) or
(self.__hint == self.FILTER_FIRST and len(self) > 0)):
pb.set_hint(self.__hint)
ordered_filters = [(i, f) for f, i in self.__filter_order.iteritems()]
ordered_filters.sort()
for i, filter_str in ordered_filters:
if filter_str not in self:
continue
values = self[filter_str]
match = self._CheckFilter(filter_str, values)
name = match.group(1)
props = datastore_types.ToPropertyPb(name, values)
if not isinstance(props, list):
props = [props]
op = match.group(3)
if op is None:
op = '='
for prop in props:
filter = pb.add_filter()
filter.set_op(self.OPERATORS[op])
filter.add_property().CopyFrom(prop)
for property, direction in self.__orderings:
order = pb.add_order()
order.set_property(property.encode('utf-8'))
order.set_direction(direction)
return pb
class Iterator(object):
"""An iterator over the results of a datastore query.
Iterators are used to access the results of a Query. An iterator is
obtained by building a Query, then calling Run() on it.
Iterator implements Python's iterator protocol, so results can be accessed
with the for and in statements:
> it = Query('Person').Run()
> for person in it:
> print 'Hi, %s!' % person['name']
"""
def __init__(self, cursor):
self.__cursor = cursor
self.__buffer = []
self.__more_results = True
def _Next(self, count):
"""Returns the next result(s) of the query.
Not intended to be used by application developers. Use the python
iterator protocol instead.
This method returns the next entities from the list of resulting
entities that matched the query. If the query specified a sort
order, entities are returned in that order. Otherwise, the order
is undefined.
The argument specifies the number of entities to return. If it's
greater than the number of remaining entities, all of the
remaining entities are returned. In that case, the length of the
returned list will be smaller than count.
There is an internal buffer for use with the next() method. If
this buffer is not empty, up to 'count' values are removed from
this buffer and returned. It's best not to mix _Next() and
next().
The results are always returned as a list. If there are no results
left, an empty list is returned.
Args:
# the number of entities to return; must be >= 1
count: int or long
Returns:
# a list of entities
[Entity, ...]
"""
if not isinstance(count, (int, long)) or count <= 0:
raise datastore_errors.BadArgumentError(
'Argument to _Next must be an int greater than 0; received %s (a %s)' %
(count, typename(count)))
if self.__buffer:
raise datastore_errors.BadRequestError(
'You can\'t mix next() and _Next()')
if not self.__more_results:
return []
req = datastore_pb.NextRequest()
req.set_count(count)
req.mutable_cursor().CopyFrom(self._ToPb())
result = datastore_pb.QueryResult()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next', req, result)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
self.__more_results = result.more_results()
ret = [Entity._FromPb(r) for r in result.result_list()]
return ret
_BUFFER_SIZE = 20
def next(self):
if not self.__buffer:
self.__buffer = self._Next(self._BUFFER_SIZE)
try:
return self.__buffer.pop(0)
except IndexError:
raise StopIteration
def __iter__(self): return self
def _ToPb(self):
"""Converts this Iterator to its protocol buffer representation. Not
intended to be used by application developers. Enforced by hiding the
datastore_pb classes.
Returns:
# the PB representation of this Iterator
datastore_pb.Cursor
"""
pb = datastore_pb.Cursor()
pb.set_cursor(self.__cursor)
return pb
@staticmethod
def _FromPb(pb):
"""Static factory method. Returns the Iterator representation of the given
protocol buffer (datastore_pb.Cursor). Not intended to be used by
application developers. Enforced by not hiding the datastore_pb classes.
Args:
# a protocol buffer Cursor
pb: datastore_pb.Cursor
Returns:
# the Iterator representation of the argument
Iterator
"""
return Iterator(pb.cursor())
class _Transaction(object):
"""Encapsulates a transaction currently in progress.
If we've sent a BeginTransaction call, then handle will be a
datastore_pb.Transaction that holds the transaction handle.
If we know the entity group for this transaction, it's stored in the
entity_group attribute, which is set by RecordModifiedKeys().
modified_keys is a set containing the Keys of all entities modified (ie put
or deleted) in this transaction. If an entity is modified more than once, a
BadRequestError is raised.
"""
def __init__(self):
"""Initializes modified_keys to the empty set."""
self.handle = None
self.entity_group = None
self.modified_keys = None
self.modified_keys = set()
def RecordModifiedKeys(self, keys, error_on_repeat=True):
"""Updates the modified keys seen so far.
Also sets entity_group if it hasn't yet been set.
If error_on_repeat is True and any of the given keys have already been
modified, raises BadRequestError.
Args:
keys: sequence of Keys
"""
keys, _ = NormalizeAndTypeCheckKeys(keys)
if keys and not self.entity_group:
self.entity_group = keys[0].entity_group()
keys = set(keys)
if error_on_repeat:
already_modified = self.modified_keys.intersection(keys)
if already_modified:
raise datastore_errors.BadRequestError(
"Can't update entity more than once in a transaction: %r" %
already_modified.pop())
self.modified_keys.update(keys)
def RunInTransaction(function, *args, **kwargs):
"""Runs a function inside a datastore transaction.
Runs the user-provided function inside a full-featured, ACID datastore
transaction. Every Put, Get, and Delete call in the function is made within
the transaction. All entities involved in these calls must belong to the
same entity group. Queries are not supported.
The trailing arguments are passed to the function as positional arguments.
If the function returns a value, that value will be returned by
RunInTransaction. Otherwise, it will return None.
The function may raise any exception to roll back the transaction instead of
committing it. If this happens, the transaction will be rolled back and the
exception will be re-raised up to RunInTransaction's caller.
If you want to roll back intentionally, but don't have an appropriate
exception to raise, you can raise an instance of datastore_errors.Rollback.
It will cause a rollback, but will *not* be re-raised up to the caller.
The function may be run more than once, so it should be idempotent. It
should avoid side effects, and it shouldn't have *any* side effects that
aren't safe to occur multiple times. This includes modifying the arguments,
since they persist across invocations of the function. However, this doesn't
include Put, Get, and Delete calls, of course.
Example usage:
> def decrement(key, amount=1):
> counter = datastore.Get(key)
> counter['count'] -= amount
> if counter['count'] < 0: # don't let the counter go negative
> raise datastore_errors.Rollback()
> datastore.Put(counter)
>
> counter = datastore.Query('Counter', {'name': 'foo'})
> datastore.RunInTransaction(decrement, counter.key(), amount=5)
Transactions satisfy the traditional ACID properties. They are:
- Atomic. All of a transaction's operations are executed or none of them are.
- Consistent. The datastore's state is consistent before and after a
transaction, whether it committed or rolled back. Invariants such as
"every entity has a primary key" are preserved.
- Isolated. Transactions operate on a snapshot of the datastore. Other
datastore operations do not see intermediated effects of the transaction;
they only see its effects after it has committed.
- Durable. On commit, all writes are persisted to the datastore.
Nested transactions are not supported.
Args:
# a function to be run inside the transaction
function: callable
# positional arguments to pass to the function
args: variable number of any type
Returns:
the function's return value, if any
Raises:
TransactionFailedError, if the transaction could not be committed.
"""
if _CurrentTransactionKey():
raise datastore_errors.BadRequestError(
'Nested transactions are not supported.')
tx_key = None
try:
tx_key = _NewTransactionKey()
tx = _Transaction()
_txes[tx_key] = tx
for i in range(0, TRANSACTION_RETRIES + 1):
tx.modified_keys.clear()
try:
result = function(*args, **kwargs)
except:
original_exception = sys.exc_info()
if tx.handle:
try:
resp = api_base_pb.VoidProto()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback',
tx.handle, resp)
except:
exc_info = sys.exc_info()
logging.info('Exception sending Rollback:\n' +
''.join(traceback.format_exception(*exc_info)))
type, value, trace = original_exception
if type is datastore_errors.Rollback:
return
else:
raise type, value, trace
if tx.handle:
try:
resp = api_base_pb.VoidProto()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit',
tx.handle, resp)
except apiproxy_errors.ApplicationError, err:
if (err.application_error ==
datastore_pb.Error.CONCURRENT_TRANSACTION):
logging.warning('Transaction collision for entity group with '
'key %r. Retrying...', tx.entity_group)
tx.handle = None
tx.entity_group = None
continue
else:
raise _ToDatastoreError(err)
return result
raise datastore_errors.TransactionFailedError(
'The transaction could not be committed. Please try again.')
finally:
if tx_key in _txes:
del _txes[tx_key]
del tx_key
def _MaybeSetupTransaction(request, keys):
"""Begins a transaction, if necessary, and populates it in the request.
If we're currently inside a transaction, this records the entity group,
checks that the keys are all in that entity group, creates the transaction
PB, and sends the BeginTransaction. It then populates the transaction handle
in the request.
Raises BadRequestError if the entity has a different entity group than the
current transaction.
Args:
request: GetRequest, PutRequest, or DeleteRequest
keys: sequence of Keys
Returns:
_Transaction if we're inside a transaction, otherwise None
"""
assert isinstance(request, (datastore_pb.GetRequest, datastore_pb.PutRequest,
datastore_pb.DeleteRequest))
tx_key = None
try:
tx_key = _CurrentTransactionKey()
if tx_key:
tx = _txes[tx_key]
groups = [k.entity_group() for k in keys]
if tx.entity_group:
expected_group = tx.entity_group
else:
expected_group = groups[0]
for group in groups:
if (group != expected_group or
(not group.has_id_or_name() and group is not expected_group)):
raise _DifferentEntityGroupError(expected_group, group)
if not tx.handle:
tx.handle = datastore_pb.Transaction()
req = api_base_pb.VoidProto()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction', req,
tx.handle)
request.mutable_transaction().CopyFrom(tx.handle)
return tx
finally:
del tx_key
def _DifferentEntityGroupError(a, b):
"""Raises a BadRequestError that says the given entity groups are different.
Includes the two entity groups in the message, formatted more clearly and
concisely than repr(Key).
Args:
a, b are both Keys that represent entity groups.
"""
def id_or_name(key):
if key.name():
return 'name=%r' % key.name()
else:
return 'id=%r' % key.id()
raise datastore_errors.BadRequestError(
'Cannot operate on different entity groups in a transaction: '
'(kind=%r, %s) and (kind=%r, %s).' % (a.kind(), id_or_name(a),
b.kind(), id_or_name(b)))
def _FindTransactionFrameInStack():
"""Walks the stack to find a RunInTransaction() call.
Returns:
# this is the RunInTransaction() frame record, if found
frame record or None
"""
frame = sys._getframe()
filename = frame.f_code.co_filename
frame = frame.f_back.f_back
while frame:
if (frame.f_code.co_filename == filename and
frame.f_code.co_name == 'RunInTransaction'):
return frame
frame = frame.f_back
return None
_CurrentTransactionKey = _FindTransactionFrameInStack
_NewTransactionKey = sys._getframe
def _GetCompleteKeyOrError(arg):
"""Expects an Entity or a Key, and returns the corresponding Key. Raises
BadArgumentError or BadKeyError if arg is a different type or is incomplete.
Args:
arg: Entity or Key
Returns:
Key
"""
if isinstance(arg, Key):
key = arg
elif isinstance(arg, basestring):
key = Key(arg)
elif isinstance(arg, Entity):
key = arg.key()
elif not isinstance(arg, Key):
raise datastore_errors.BadArgumentError(
'Expects argument to be an Entity or Key; received %s (a %s).' %
(arg, typename(arg)))
assert isinstance(key, Key)
if not key.has_id_or_name():
raise datastore_errors.BadKeyError('Key %r is not complete.' % key)
return key
def _AddOrAppend(dictionary, key, value):
"""Adds the value to the existing values in the dictionary, if any.
If dictionary[key] doesn't exist, sets dictionary[key] to value.
If dictionary[key] is not a list, sets dictionary[key] to [old_value, value].
If dictionary[key] is a list, appends value to that list.
Args:
dictionary: a dict
key, value: anything
"""
if key in dictionary:
existing_value = dictionary[key]
if isinstance(existing_value, list):
existing_value.append(value)
else:
dictionary[key] = [existing_value, value]
else:
dictionary[key] = value
def _ToDatastoreError(err):
"""Converts an apiproxy.ApplicationError to an error in datastore_errors.
Args:
err: apiproxy.ApplicationError
Returns:
a subclass of datastore_errors.Error
"""
errors = {
datastore_pb.Error.BAD_REQUEST: datastore_errors.BadRequestError,
datastore_pb.Error.CONCURRENT_TRANSACTION:
datastore_errors.TransactionFailedError,
datastore_pb.Error.INTERNAL_ERROR: datastore_errors.InternalError,
datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError,
datastore_pb.Error.TIMEOUT: datastore_errors.Timeout,
}
if err.application_error in errors:
raise errors[err.application_error](err.error_detail)
else:
raise datastore_errors.Error(err.error_detail)
| wgrose/leanto | google/appengine/api/datastore.py | Python | apache-2.0 | 52,965 |
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common logging helpers."""
import logging
import requests
from google.cloud.logging.entries import LogEntry
from google.cloud.logging.entries import ProtobufEntry
from google.cloud.logging.entries import StructEntry
from google.cloud.logging.entries import TextEntry
try:
from google.cloud.logging_v2.gapic.enums import LogSeverity
except ImportError: # pragma: NO COVER
class LogSeverity(object):
"""Map severities for non-GAPIC usage."""
DEFAULT = 0
DEBUG = 100
INFO = 200
NOTICE = 300
WARNING = 400
ERROR = 500
CRITICAL = 600
ALERT = 700
EMERGENCY = 800
_NORMALIZED_SEVERITIES = {
logging.CRITICAL: LogSeverity.CRITICAL,
logging.ERROR: LogSeverity.ERROR,
logging.WARNING: LogSeverity.WARNING,
logging.INFO: LogSeverity.INFO,
logging.DEBUG: LogSeverity.DEBUG,
logging.NOTSET: LogSeverity.DEFAULT,
}
METADATA_URL = "http://metadata.google.internal./computeMetadata/v1/"
METADATA_HEADERS = {"Metadata-Flavor": "Google"}
def entry_from_resource(resource, client, loggers):
"""Detect correct entry type from resource and instantiate.
:type resource: dict
:param resource: One entry resource from API response.
:type client: :class:`~google.cloud.logging.client.Client`
:param client: Client that owns the log entry.
:type loggers: dict
:param loggers:
A mapping of logger fullnames -> loggers. If the logger
that owns the entry is not in ``loggers``, the entry
will have a newly-created logger.
:rtype: :class:`~google.cloud.logging.entries._BaseEntry`
:returns: The entry instance, constructed via the resource
"""
if "textPayload" in resource:
return TextEntry.from_api_repr(resource, client, loggers)
if "jsonPayload" in resource:
return StructEntry.from_api_repr(resource, client, loggers)
if "protoPayload" in resource:
return ProtobufEntry.from_api_repr(resource, client, loggers)
return LogEntry.from_api_repr(resource, client, loggers)
def retrieve_metadata_server(metadata_key):
"""Retrieve the metadata key in the metadata server.
See: https://cloud.google.com/compute/docs/storing-retrieving-metadata
:type metadata_key: str
:param metadata_key: Key of the metadata which will form the url. You can
also supply query parameters after the metadata key.
e.g. "tags?alt=json"
:rtype: str
:returns: The value of the metadata key returned by the metadata server.
"""
url = METADATA_URL + metadata_key
try:
response = requests.get(url, headers=METADATA_HEADERS)
if response.status_code == requests.codes.ok:
return response.text
except requests.exceptions.RequestException:
# Ignore the exception, connection failed means the attribute does not
# exist in the metadata server.
pass
return None
def _normalize_severity(stdlib_level):
"""Normalize a Python stdlib severity to LogSeverity enum.
:type stdlib_level: int
:param stdlib_level: 'levelno' from a :class:`logging.LogRecord`
:rtype: int
:returns: Corresponding Stackdriver severity.
"""
return _NORMALIZED_SEVERITIES.get(stdlib_level, stdlib_level)
| tseaver/google-cloud-python | logging/google/cloud/logging/_helpers.py | Python | apache-2.0 | 3,909 |
from bluesky.plans import count
from bluesky.callbacks import LiveTable
RE(count([pe1]), LiveTable([pe1]))
| NSLS-II-XPD/ipython_ophyd | profile_collection_germ/acceptance_tests/01-count-pe1.py | Python | bsd-2-clause | 109 |
# j4cDAC test code
#
# Copyright 2011 Jacob Potter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import socket
import time
import struct
def pack_point(x, y, r, g, b, i = -1, u1 = 0, u2 = 0, flags = 0):
"""Pack some color values into a struct dac_point.
Values must be specified for x, y, r, g, and b. If a value is not
passed in for the other fields, i will default to max(r, g, b); the
rest default to zero.
"""
if i < 0:
i = max(r, g, b)
return struct.pack("<HhhHHHHHH", flags, x, y, r, g, b, i, u1, u2)
class ProtocolError(Exception):
"""Exception used when a protocol error is detected."""
pass
class Status(object):
"""Represents a status response from the DAC."""
def __init__(self, data):
"""Initialize from a chunk of data."""
self.protocol_version, self.le_state, self.playback_state, \
self.source, self.le_flags, self.playback_flags, \
self.source_flags, self.fullness, self.point_rate, \
self.point_count = \
struct.unpack("<BBBBHHHHII", data)
def dump(self, prefix = " - "):
"""Dump to a string."""
lines = [
"Light engine: state %d, flags 0x%x" %
(self.le_state, self.le_flags),
"Playback: state %d, flags 0x%x" %
(self.playback_state, self.playback_flags),
"Buffer: %d points" %
(self.fullness, ),
"Playback: %d kpps, %d points played" %
(self.point_rate, self.point_count),
"Source: %d, flags 0x%x" %
(self.source, self.source_flags)
]
for l in lines:
print prefix + l
class BroadcastPacket(object):
"""Represents a broadcast packet from the DAC."""
def __init__(self, st):
"""Initialize from a chunk of data."""
self.mac = st[:6]
self.hw_rev, self.sw_rev, self.buffer_capacity, \
self.max_point_rate = struct.unpack("<HHHI", st[6:16])
self.status = Status(st[16:36])
def dump(self, prefix = " - "):
"""Dump to a string."""
lines = [
"MAC: " + ":".join(
"%02x" % (ord(o), ) for o in self.mac),
"HW %d, SW %d" %
(self.hw_rev, self.sw_rev),
"Capabilities: max %d points, %d kpps" %
(self.buffer_capacity, self.max_point_rate)
]
for l in lines:
print prefix + l
#self.status.dump(prefix)
class DAC(object):
"""A connection to a DAC."""
def read(self, l):
"""Read exactly length bytes from the connection."""
while l > len(self.buf):
self.buf += self.conn.recv(4096)
obuf = self.buf
self.buf = obuf[l:]
return obuf[:l]
def readresp(self, cmd):
"""Read a response from the DAC."""
data = self.read(22)
response = data[0]
cmdR = data[1]
status = Status(data[2:])
# status.dump()
if cmdR != cmd:
raise ProtocolError("expected resp for %r, got %r"
% (cmd, cmdR))
if response != "a":
raise ProtocolError("expected ACK, got %r"
% (response, ))
self.last_status = status
return status
def __init__(self, host, port = 7765):
"""Connect to the DAC over TCP."""
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn.connect((host, port))
#print "Connected to %s:%s" % (host, port)
self.conn = conn
self.buf = ""
# Read the "hello" message
first_status = self.readresp("?")
#first_status.dump()
def begin(self, lwm, rate):
cmd = struct.pack("<cHI", "b", lwm, rate)
self.conn.sendall(cmd)
return self.readresp("b")
def update(self, lwm, rate):
cmd = struct.pack("<cHI", "u", lwm, rate)
self.conn.sendall(cmd)
return self.readresp("u")
def encode_point(self, point):
try:
return pack_point(*point)
except Exception as e:
##print "Exception"
#print point
raise e
def write(self, points):
epoints = map(self.encode_point, points)
cmd = struct.pack("<cH", "d", len(epoints))
self.conn.sendall(cmd + "".join(epoints))
return self.readresp("d")
def prepare(self):
self.conn.sendall("p")
return self.readresp("p")
def stop(self):
self.conn.sendall("s")
return self.readresp("s")
def estop(self):
self.conn.sendall("\xFF")
return self.readresp("\xFF")
def clear_estop(self):
self.conn.sendall("c")
return self.readresp("c")
def ping(self):
self.conn.sendall("?")
return self.readresp("?")
def play_stream(self, stream):
# First, prepare the stream
if self.last_status.playback_state == 2:
raise Exception("already playing?!")
elif self.last_status.playback_state == 0:
self.prepare()
started = 0
while True:
# How much room?
cap = 1799 - self.last_status.fullness
points = stream.read(cap)
if cap < 100:
time.sleep(0.005)
cap += 150
# print "Writing %d points" % (cap, )
t0 = time.time()
self.write(points)
t1 = time.time()
# print "Took %f" % (t1 - t0, )
if not started:
self.begin(0, 30000)
started = 1
def find_dac():
"""Listen for broadcast packets."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind(("0.0.0.0", 7654))
while True:
data, addr = s.recvfrom(1024)
bp = BroadcastPacket(data)
#print "Packet from %s: " % (addr, )
#bp.dump()
def find_first_dac():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind(("0.0.0.0", 7654))
data, addr = s.recvfrom(1024)
bp = BroadcastPacket(data)
#print "Packet from %s: " % (addr, )
return addr[0]
| lightengine/lightstream | lightstream/oldlib/dac.py | Python | bsd-2-clause | 5,679 |
from django.conf import settings
from django.utils import translation
from geotrek.tourism import models as tourism_models
from geotrek.tourism.views import TouristicContentViewSet, TouristicEventViewSet
from geotrek.trekking.management.commands.sync_rando import Command as BaseCommand
# Register mapentity models
from geotrek.tourism import urls # NOQA
class Command(BaseCommand):
def sync_content(self, lang, content):
self.sync_pdf(lang, content)
for picture, resized in content.resized_pictures:
self.sync_media_file(lang, resized)
def sync_event(self, lang, event):
self.sync_pdf(lang, event)
for picture, resized in event.resized_pictures:
self.sync_media_file(lang, resized)
def sync_tourism(self, lang):
self.sync_geojson(lang, TouristicContentViewSet, 'touristiccontents')
self.sync_geojson(lang, TouristicEventViewSet, 'touristicevents')
contents = tourism_models.TouristicContent.objects.existing().order_by('pk')
contents = contents.filter(**{'published_{lang}'.format(lang=lang): True})
for content in contents:
self.sync_content(lang, content)
events = tourism_models.TouristicEvent.objects.existing().order_by('pk')
events = events.filter(**{'published_{lang}'.format(lang=lang): True})
for event in events:
self.sync_event(lang, event)
def sync(self):
super(Command, self).sync()
self.sync_static_file('**', 'tourism/touristicevent.svg')
self.sync_pictograms('**', tourism_models.InformationDeskType)
self.sync_pictograms('**', tourism_models.TouristicContentCategory)
self.sync_pictograms('**', tourism_models.TouristicContentType)
self.sync_pictograms('**', tourism_models.TouristicEventType)
for lang in settings.MODELTRANSLATION_LANGUAGES:
translation.activate(lang)
self.sync_tourism(lang)
| johan--/Geotrek | geotrek/tourism/management/commands/sync_rando.py | Python | bsd-2-clause | 1,966 |
#!/usr/bin/env python
"""simple thread pool
@author: dn13(dn13@gmail.com)
@author: Fibrizof(dfang84@gmail.com)
"""
import threading
import Queue
import new
def WorkerPoolError( Exception ):
pass
class Task(threading.Thread):
def __init__(self, queue, result_queue):
threading.Thread.__init__(self)
self.queue = queue
self.result_queue = result_queue
self.running = True
def cancel(self):
self.running = False
self.queue.put(None)
def run(self):
while self.running:
call = self.queue.get()
if call:
try:
reslut = call()
self.result_queue.put(reslut)
except:
pass
self.queue.task_done()
class WorkerPool( object ):
def __init__( self, threadnum ):
self.threadnum = threadnum
self.q = Queue.Queue()
self.result_q = Queue.Queue()
self.ts = [ Task(self.q, self.result_q) for i in range(threadnum) ]
self._registfunctions = {}
self.is_in_join = False
for t in self.ts :
t.setDaemon(True)
t.start()
def __del__(self):
try:
# 调用两次的意义在于, 第一次将所有线程的running置成false, 在让他们发一次queue的信号
# 偷懒没有写成两个接口
for t in self.ts:
t.cancel()
for t in self.ts:
t.cancel()
except:
pass
def __call__( self, work ):
if not self.is_in_join:
self.q.put( work )
else:
raise WorkerPoolError, 'Pool has been joined'
def join( self ):
self.is_in_join = True
self.q.join()
self.is_in_join = False
return
def runwithpool( self, _old ):
def _new( *args, **kwargs ):
self.q.put( lambda : _old( *args, **kwargs ) )
return _new
def registtopool( self, _old ):
if _old.__name__ in self._registfunctions :
raise WorkerPoolError, 'function name exists'
self._registfunctions[_old.__name__] = _old
return _old
def get_all_result(self):
result_list = []
while True:
try:
result_list.append(self.result_q.get_nowait())
except Exception as e:
if 0 == self.result_q.qsize():
break
else:
continue
return result_list
def __getattr__( self, name ):
if name in self._registfunctions :
return self._registfunctions[name]
raise AttributeError, '%s not found' % name
if __name__ == '__main__' :
import thread
p = WorkerPool(5)
@p.runwithpool
def foo( a ):
print 'foo>', thread.get_ident(), '>', a
return
@p.registtopool
def bar( b ):
print 'bar>', thread.get_ident(), '>', b
for i in range(10):
foo(i)
p.bar(i+100)
p( lambda : bar(200) )
p.join()
| hackshel/py-aluminium | src/__furture__/simplepool.py | Python | bsd-3-clause | 3,289 |
import os
from django_extensions.management.jobs import DailyJob
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert import HTMLExporter
class Job(DailyJob):
help = "Convert Jupyter Notebook in lowfat/reports to HTML page in lowfat/reports/html."
def execute(self):
print("Cleaning lowfat/reports/html ...")
old_reports = os.listdir("lowfat/reports/html")
for old_report in old_reports:
print("- Removing lowfat/reports/html/{}".format(old_report))
os.remove("lowfat/reports/html/{}".format(old_report))
print("Cleaning of lowfat/reports/html is complete.")
notebook_filenames = os.listdir("lowfat/reports")
for notebook_filename in notebook_filenames:
if not notebook_filename.endswith(".ipynb"):
continue
print("Processing lowfat/reports/{}".format(notebook_filename))
# Based on Executing notebooks, nbconvert Documentation by Jupyter Development Team.
# https://nbconvert.readthedocs.io/en/latest/execute_api.html
with open("lowfat/reports/{}".format(notebook_filename)) as file_:
notebook = nbformat.read(file_, as_version=4)
# Kernel is provided by https://github.com/django-extensions/django-extensions/
execute_preprocessor = ExecutePreprocessor(timeout=600, kernel_name='django_extensions')
execute_preprocessor.preprocess(notebook, {'metadata': {'path': '.'}})
html_exporter = HTMLExporter()
html_exporter.template_file = 'basic'
(body, dummy_resources) = html_exporter.from_notebook_node(notebook)
with open('lowfat/reports/html/{}.html'.format(notebook_filename), 'wt') as file_:
file_.write(body)
| softwaresaved/fat | lowfat/jobs/daily/report.py | Python | bsd-3-clause | 1,861 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Schema.immutable'
db.add_column('tardis_portal_schema', 'immutable', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'Schema.immutable'
db.delete_column('tardis_portal_schema', 'immutable')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tardis_portal.author_experiment': {
'Meta': {'ordering': "['order']", 'unique_together': "(('experiment', 'author'),)", 'object_name': 'Author_Experiment'},
'author': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'tardis_portal.datafileparameter': {
'Meta': {'ordering': "['name']", 'object_name': 'DatafileParameter'},
'datetime_value': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.ParameterName']"}),
'numerical_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'parameterset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.DatafileParameterSet']"}),
'string_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'tardis_portal.datafileparameterset': {
'Meta': {'ordering': "['id']", 'object_name': 'DatafileParameterSet'},
'dataset_file': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Dataset_File']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Schema']"})
},
'tardis_portal.dataset': {
'Meta': {'object_name': 'Dataset'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immutable': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'tardis_portal.dataset_file': {
'Meta': {'object_name': 'Dataset_File'},
'created_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Dataset']"}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'md5sum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'mimetype': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'protocol': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'size': ('django.db.models.fields.CharField', [], {'max_length': '400', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '400'})
},
'tardis_portal.datasetparameter': {
'Meta': {'ordering': "['name']", 'object_name': 'DatasetParameter'},
'datetime_value': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.ParameterName']"}),
'numerical_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'parameterset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.DatasetParameterSet']"}),
'string_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'tardis_portal.datasetparameterset': {
'Meta': {'ordering': "['id']", 'object_name': 'DatasetParameterSet'},
'dataset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Dataset']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Schema']"})
},
'tardis_portal.experiment': {
'Meta': {'object_name': 'Experiment'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'created_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'handle': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institution_name': ('django.db.models.fields.CharField', [], {'default': "'Monash University'", 'max_length': '400'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'update_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'tardis_portal.experimentacl': {
'Meta': {'ordering': "['experiment__id']", 'object_name': 'ExperimentACL'},
'aclOwnershipType': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'canDelete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'canRead': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'canWrite': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'effectiveDate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'entityId': ('django.db.models.fields.CharField', [], {'max_length': '320'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'expiryDate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isOwner': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'pluginId': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'tardis_portal.experimentparameter': {
'Meta': {'ordering': "['name']", 'object_name': 'ExperimentParameter'},
'datetime_value': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.ParameterName']"}),
'numerical_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'parameterset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.ExperimentParameterSet']"}),
'string_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'tardis_portal.experimentparameterset': {
'Meta': {'ordering': "['id']", 'object_name': 'ExperimentParameterSet'},
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Schema']"})
},
'tardis_portal.groupadmin': {
'Meta': {'object_name': 'GroupAdmin'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'tardis_portal.parametername': {
'Meta': {'ordering': "('order', 'name')", 'unique_together': "(('schema', 'name'),)", 'object_name': 'ParameterName'},
'choices': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'comparison_type': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'data_type': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immutable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_searchable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '9999', 'null': 'True', 'blank': 'True'}),
'schema': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Schema']"}),
'units': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'})
},
'tardis_portal.schema': {
'Meta': {'object_name': 'Schema'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immutable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'namespace': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '255'}),
'subtype': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
'tardis_portal.token': {
'Meta': {'object_name': 'Token'},
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.Experiment']"}),
'expiry_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date(2011, 10, 19)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'tardis_portal.userauthentication': {
'Meta': {'object_name': 'UserAuthentication'},
'authenticationMethod': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'userProfile': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tardis_portal.UserProfile']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'tardis_portal.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isDjangoAccount': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['tardis_portal']
| steveandroulakis/mytardis | tardis/tardis_portal/migrations/0005_auto__add_field_schema_immutable.py | Python | bsd-3-clause | 16,169 |
''' Display a variety of simple scatter marker shapes whose attributes
can be associated with data columns from ``ColumnDataSources``.
The full list of markers built into Bokeh is given below:
* :class:`~bokeh.models.markers.Asterisk`
* :class:`~bokeh.models.markers.Circle`
* :class:`~bokeh.models.markers.CircleCross`
* :class:`~bokeh.models.markers.CircleX`
* :class:`~bokeh.models.markers.Cross`
* :class:`~bokeh.models.markers.Diamond`
* :class:`~bokeh.models.markers.DiamondCross`
* :class:`~bokeh.models.markers.Hex`
* :class:`~bokeh.models.markers.InvertedTriangle`
* :class:`~bokeh.models.markers.Square`
* :class:`~bokeh.models.markers.SquareCross`
* :class:`~bokeh.models.markers.SquareX`
* :class:`~bokeh.models.markers.Triangle`
* :class:`~bokeh.models.markers.X`
Markers are all subclasses of ``Glyph``. Additionally, they all share the
same common interface providing fill and line properties provided by their
base class ``Marker``. Note that a few glyphs, ``Cross`` and ``X``, only
draw lines. For these the fill property values are ignored. Also note that
the ``Circle`` glyph has some additional properties such as ``radius`` that
other markers do not.
.. autoclass:: Marker
:members:
'''
from __future__ import absolute_import
from ..core.enums import enumeration
from ..core.has_props import abstract
from ..core.properties import AngleSpec, DistanceSpec, Enum, Include, NumberSpec, ScreenDistanceSpec
from ..core.property_mixins import FillProps, LineProps
from .glyphs import XYGlyph
@abstract
class Marker(XYGlyph):
''' Base class for glyphs that are simple markers with line and
fill properties, located at an (x, y) location with a specified
size.
.. note::
For simplicity, all markers have both line and fill properties
declared, however some markers (`Asterisk`, `Cross`, `X`) only
draw lines. For these markers, the fill values are simply
ignored.
'''
# a canonical order for positional args that can be used for any
# functions derived from this class
_args = ('x', 'y', 'size', 'angle')
x = NumberSpec(help="""
The x-axis coordinates for the center of the markers.
""")
y = NumberSpec(help="""
The y-axis coordinates for the center of the markers.
""")
size = ScreenDistanceSpec(default=4, help="""
The size (diameter) values for the markers in screen space units.
""")
angle = AngleSpec(default=0.0, help="""
The angles to rotate the markers.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the markers.
""")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the markers.
""")
class Asterisk(Marker):
''' Render asterisk '*' markers. '''
__example__ = "examples/reference/models/Asterisk.py"
class Circle(Marker):
''' Render circle markers. '''
__example__ = "examples/reference/models/Circle.py"
# a canonical order for positional args that can be used for any
# functions derived from this class
_args = ('x', 'y')
radius = DistanceSpec(None, help="""
The radius values for circle markers (in "data space" units, by default).
.. note::
Circle markers are slightly unusual in that they support specifying
a radius in addition to a size. Only one of ``radius`` or ``size``
should be given.
.. warning::
Note that ``Circle`` glyphs are always drawn as circles on the screen,
even in cases where the data space aspect ratio is not 1-1. In all
cases where radius values are specified, the "distance" for the radius
is measured along the dimension specified by ``radius_dimension``. If
the aspect ratio is very large or small, the drawn circles may appear
much larger or smaller than expected. See :bokeh-issue:`626` for more
information.
""")
radius_dimension = Enum(enumeration('x', 'y'), help="""
What dimension to measure circle radii along.
When the data space aspect ratio is not 1-1, then the size of the drawn
circles depends on what direction is used to measure the "distance" of
the radius. This property allows that direction to be controlled.
""")
class CircleCross(Marker):
''' Render circle markers with a '+' cross through the center. '''
__example__ = "examples/reference/models/CircleCross.py"
class CircleX(Marker):
''' Render circle markers with an 'X' cross through the center. '''
__example__ = "examples/reference/models/CircleX.py"
class Cross(Marker):
''' Render '+' cross markers. '''
__example__ = "examples/reference/models/Cross.py"
class Diamond(Marker):
''' Render diamond markers. '''
__example__ = "examples/reference/models/Diamond.py"
class DiamondCross(Marker):
''' Render diamond markers with a '+' cross through the center. '''
__example__ = "examples/reference/models/DiamondCross.py"
class Hex(Marker):
''' Render hexagon markers. '''
__example__ = "examples/reference/models/Hex.py"
class InvertedTriangle(Marker):
''' Render upside-down triangle markers. '''
__example__ = "examples/reference/models/InvertedTriangle.py"
class Square(Marker):
''' Render a square marker, optionally rotated. '''
__example__ = "examples/reference/models/Square.py"
class SquareCross(Marker):
''' Render square markers with a '+' cross through the center. '''
__example__ = "examples/reference/models/SquareCross.py"
class SquareX(Marker):
''' Render square markers with an 'X' cross through the center. '''
__example__ = "examples/reference/models/SquareX.py"
class Triangle(Marker):
''' Render triangle markers. '''
__example__ = "examples/reference/models/Triangle.py"
class X(Marker):
''' Render a 'X' cross markers. '''
__example__ = "examples/reference/models/X.py"
| Karel-van-de-Plassche/bokeh | bokeh/models/markers.py | Python | bsd-3-clause | 5,924 |
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles the "Console" unit format.
"""
from . import base, core, utils
class Console(base.Base):
"""
Output-only format for to display pretty formatting at the
console.
For example::
>>> import astropy.units as u
>>> print(u.Ry.decompose().to_string('console')) # doctest: +FLOAT_CMP
m^2 kg
2.1798721*10^-18 ------
s^2
"""
_times = "*"
_line = "-"
@classmethod
def _get_unit_name(cls, unit):
return unit.get_format_name('console')
@classmethod
def _format_superscript(cls, number):
return '^{0}'.format(number)
@classmethod
def _format_unit_list(cls, units):
out = []
for base, power in units:
if power == 1:
out.append(cls._get_unit_name(base))
else:
out.append('{0}{1}'.format(
cls._get_unit_name(base),
cls._format_superscript(
utils.format_power(power))))
return ' '.join(out)
@classmethod
def format_exponential_notation(cls, val):
m, ex = utils.split_mantissa_exponent(val)
parts = []
if m:
parts.append(m)
if ex:
parts.append("10{0}".format(
cls._format_superscript(ex)))
return cls._times.join(parts)
@classmethod
def to_string(cls, unit):
if isinstance(unit, core.CompositeUnit):
if unit.scale == 1:
s = ''
else:
s = cls.format_exponential_notation(unit.scale)
if len(unit.bases):
positives, negatives = utils.get_grouped_by_powers(
unit.bases, unit.powers)
if len(negatives):
if len(positives):
positives = cls._format_unit_list(positives)
else:
positives = '1'
negatives = cls._format_unit_list(negatives)
l = len(s)
r = max(len(positives), len(negatives))
f = "{{0:^{0}s}} {{1:^{1}s}}".format(l, r)
lines = [
f.format('', positives),
f.format(s, cls._line * r),
f.format('', negatives)
]
s = '\n'.join(lines)
else:
positives = cls._format_unit_list(positives)
s += positives
elif isinstance(unit, core.NamedUnit):
s = cls._get_unit_name(unit)
return s
| funbaker/astropy | astropy/units/format/console.py | Python | bsd-3-clause | 2,766 |
from django.core.management.base import BaseCommand, CommandError
from django.db import IntegrityError
import olympia.core.logger
from olympia.access.models import Group, GroupUser
from olympia.users.models import UserProfile
class Command(BaseCommand):
help = 'Add a new user to a group.'
log = olympia.core.logger.getLogger('z.users')
def add_arguments(self, parser):
parser.add_argument('user', type=unicode, help='User id or email')
parser.add_argument('group_id', type=int, help='Group id')
def handle(self, *args, **options):
do_adduser(options['user'], options['group_id'])
msg = 'Adding {user} to {group}\n'.format(
user=options['user'], group=options['group_id'])
self.log.info(msg)
self.stdout.write(msg)
def do_adduser(user, group):
try:
if '@' in user:
user = UserProfile.objects.get(email=user)
elif user.isdigit():
user = UserProfile.objects.get(pk=user)
else:
raise CommandError('Unknown input for user.')
group = Group.objects.get(pk=group)
GroupUser.objects.create(user=user, group=group)
except IntegrityError, e:
raise CommandError('User is already in that group? %s' % e)
except UserProfile.DoesNotExist:
raise CommandError('User ({user}) does not exist.'.format(user=user))
except Group.DoesNotExist:
raise CommandError('Group ({group}) does not exist.'
.format(group=group))
| harikishen/addons-server | src/olympia/zadmin/management/commands/addusertogroup.py | Python | bsd-3-clause | 1,528 |
# -*- coding: utf-8 -*-
from werkzeug.contrib import wrappers
from werkzeug import routing
from werkzeug.wrappers import Request, Response
def test_reverse_slash_behavior():
"""Test ReverseSlashBehaviorRequestMixin"""
class MyRequest(wrappers.ReverseSlashBehaviorRequestMixin, Request):
pass
req = MyRequest.from_values('/foo/bar', 'http://example.com/test')
assert req.url == 'http://example.com/test/foo/bar'
assert req.path == 'foo/bar'
assert req.script_root == '/test/'
# make sure the routing system works with the slashes in
# reverse order as well.
map = routing.Map([routing.Rule('/foo/bar', endpoint='foo')])
adapter = map.bind_to_environ(req.environ)
assert adapter.match() == ('foo', {})
adapter = map.bind(req.host, req.script_root)
assert adapter.match(req.path) == ('foo', {})
def test_dynamic_charset_request_mixin():
"""Test DynamicCharsetRequestMixin"""
class MyRequest(wrappers.DynamicCharsetRequestMixin, Request):
pass
env = {'CONTENT_TYPE': 'text/html'}
req = MyRequest(env)
assert req.charset == 'latin1'
env = {'CONTENT_TYPE': 'text/html; charset=utf-8'}
req = MyRequest(env)
assert req.charset == 'utf-8'
env = {'CONTENT_TYPE': 'application/octet-stream'}
req = MyRequest(env)
assert req.charset == 'latin1'
assert req.url_charset == 'latin1'
MyRequest.url_charset = 'utf-8'
env = {'CONTENT_TYPE': 'application/octet-stream'}
req = MyRequest(env)
assert req.charset == 'latin1'
assert req.url_charset == 'utf-8'
def return_ascii(x):
return "ascii"
env = {'CONTENT_TYPE': 'text/plain; charset=x-weird-charset'}
req = MyRequest(env)
req.unknown_charset = return_ascii
assert req.charset == 'ascii'
assert req.url_charset == 'utf-8'
def test_dynamic_charset_response_mixin():
"""Test DynamicCharsetResponseMixin"""
class MyResponse(wrappers.DynamicCharsetResponseMixin, Response):
default_charset = 'utf-7'
resp = MyResponse(mimetype='text/html')
assert resp.charset == 'utf-7'
resp.charset = 'utf-8'
assert resp.charset == 'utf-8'
assert resp.mimetype == 'text/html'
assert resp.mimetype_params == {'charset': 'utf-8'}
resp.mimetype_params['charset'] = 'iso-8859-15'
assert resp.charset == 'iso-8859-15'
resp.data = u'Hällo Wörld'
assert ''.join(resp.iter_encoded()) == \
u'Hällo Wörld'.encode('iso-8859-15')
del resp.headers['content-type']
try:
resp.charset = 'utf-8'
except TypeError, e:
pass
else:
assert False, 'expected type error on charset setting without ct'
| r-kitaev/lucid-python-werkzeug | tests/contrib/test_wrappers.py | Python | bsd-3-clause | 2,682 |
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Get-SQLServerInfo',
'Author': ['@_nullbind', '@0xbadjuju'],
'Description': ('Returns basic server and user information from target SQL Servers.'),
'Background' : True,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'Language' : 'powershell',
'MinPSVersion' : '2',
'MinLanguageVersion' : '2',
'Comments': [
'https://github.com/NetSPI/PowerUpSQL/blob/master/PowerUpSQL.ps1'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Username' : {
'Description' : 'SQL Server or domain account to authenticate with.',
'Required' : False,
'Value' : ''
},
'Password' : {
'Description' : 'SQL Server or domain account password to authenticate with.',
'Required' : False,
'Value' : ''
},
'Instance' : {
'Description' : 'SQL Server instance to connection to.',
'Required' : False,
'Value' : ''
},
'CheckAll' : {
'Description' : 'Check all systems retrieved by Get-SQLInstanceDomain',
'Required' : False,
'Value' : ''
}
}
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
username = self.options['Username']['Value']
password = self.options['Password']['Value']
instance = self.options['Instance']['Value']
check_all = self.options['CheckAll']['Value']
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/situational_awareness/network/Get-SQLServerInfo.ps1"
script = ""
if obfuscate:
helpers.obfuscate_module(moduleSource=moduleSource, obfuscationCommand=obfuscationCommand)
moduleSource = moduleSource.replace("module_source", "obfuscated_module_source")
try:
with open(moduleSource, 'r') as source:
script = source.read()
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
if check_all:
auxModuleSource = self.mainMenu.installPath + "data/module_source/situational_awareness/network/Get-SQLInstanceDomain.ps1"
if obfuscate:
helpers.obfuscate_module(moduleSource=auxModuleSource, obfuscationCommand=obfuscationCommand)
auxModuleSource = moduleSource.replace("module_source", "obfuscated_module_source")
try:
with open(auxModuleSource, 'r') as auxSource:
auxScript = auxSource.read()
script += " " + auxScript
except:
print helpers.color("[!] Could not read additional module source path at: " + str(auxModuleSource))
scriptEnd = " Get-SQLInstanceDomain "
if username != "":
scriptEnd += " -Username "+username
if password != "":
scriptEnd += " -Password "+password
scriptEnd += " | "
scriptEnd += " Get-SQLServerInfo"
if username != "":
scriptEnd += " -Username "+username
if password != "":
scriptEnd += " -Password "+password
if instance != "" and not check_all:
scriptEnd += " -Instance "+instance
if obfuscate:
scriptEnd = helpers.obfuscate(psScript=scriptEnd, obfuscationCommand=obfuscationCommand)
script += scriptEnd
return script
| frohoff/Empire | lib/modules/powershell/situational_awareness/network/get_sql_server_info.py | Python | bsd-3-clause | 4,512 |
"""
The :mod:`sklearn.pls` module implements Partial Least Squares (PLS).
"""
# Author: Edouard Duchesnay <edouard.duchesnay@cea.fr>
# License: BSD 3 clause
from ..base import BaseEstimator, RegressorMixin, TransformerMixin
from ..utils import check_arrays
from ..externals import six
import warnings
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy import linalg
from ..utils import arpack
__all__ = ['PLSCanonical', 'PLSRegression', 'PLSSVD']
def _nipals_twoblocks_inner_loop(X, Y, mode="A", max_iter=500, tol=1e-06,
norm_y_weights=False):
"""Inner loop of the iterative NIPALS algorithm.
Provides an alternative to the svd(X'Y); returns the first left and right
singular vectors of X'Y. See PLS for the meaning of the parameters. It is
similar to the Power method for determining the eigenvectors and
eigenvalues of a X'Y.
"""
y_score = Y[:, [0]]
x_weights_old = 0
ite = 1
X_pinv = Y_pinv = None
# Inner loop of the Wold algo.
while True:
# 1.1 Update u: the X weights
if mode == "B":
if X_pinv is None:
X_pinv = linalg.pinv(X) # compute once pinv(X)
x_weights = np.dot(X_pinv, y_score)
else: # mode A
# Mode A regress each X column on y_score
x_weights = np.dot(X.T, y_score) / np.dot(y_score.T, y_score)
# 1.2 Normalize u
x_weights /= np.sqrt(np.dot(x_weights.T, x_weights))
# 1.3 Update x_score: the X latent scores
x_score = np.dot(X, x_weights)
# 2.1 Update y_weights
if mode == "B":
if Y_pinv is None:
Y_pinv = linalg.pinv(Y) # compute once pinv(Y)
y_weights = np.dot(Y_pinv, x_score)
else:
# Mode A regress each Y column on x_score
y_weights = np.dot(Y.T, x_score) / np.dot(x_score.T, x_score)
## 2.2 Normalize y_weights
if norm_y_weights:
y_weights /= np.sqrt(np.dot(y_weights.T, y_weights))
# 2.3 Update y_score: the Y latent scores
y_score = np.dot(Y, y_weights) / np.dot(y_weights.T, y_weights)
## y_score = np.dot(Y, y_weights) / np.dot(y_score.T, y_score) ## BUG
x_weights_diff = x_weights - x_weights_old
if np.dot(x_weights_diff.T, x_weights_diff) < tol or Y.shape[1] == 1:
break
if ite == max_iter:
warnings.warn('Maximum number of iterations reached')
break
x_weights_old = x_weights
ite += 1
return x_weights, y_weights
def _svd_cross_product(X, Y):
C = np.dot(X.T, Y)
U, s, Vh = linalg.svd(C, full_matrices=False)
u = U[:, [0]]
v = Vh.T[:, [0]]
return u, v
def _center_scale_xy(X, Y, scale=True):
""" Center X, Y and scale if the scale parameter==True
Returns
-------
X, Y, x_mean, y_mean, x_std, y_std
"""
# center
x_mean = X.mean(axis=0)
X -= x_mean
y_mean = Y.mean(axis=0)
Y -= y_mean
# scale
if scale:
x_std = X.std(axis=0, ddof=1)
x_std[x_std == 0.0] = 1.0
X /= x_std
y_std = Y.std(axis=0, ddof=1)
y_std[y_std == 0.0] = 1.0
Y /= y_std
else:
x_std = np.ones(X.shape[1])
y_std = np.ones(Y.shape[1])
return X, Y, x_mean, y_mean, x_std, y_std
class _PLS(six.with_metaclass(ABCMeta), BaseEstimator, TransformerMixin,
RegressorMixin):
"""Partial Least Squares (PLS)
This class implements the generic PLS algorithm, constructors' parameters
allow to obtain a specific implementation such as:
- PLS2 regression, i.e., PLS 2 blocks, mode A, with asymmetric deflation
and unnormalized y weights such as defined by [Tenenhaus 1998] p. 132.
With univariate response it implements PLS1.
- PLS canonical, i.e., PLS 2 blocks, mode A, with symmetric deflation and
normalized y weights such as defined by [Tenenhaus 1998] (p. 132) and
[Wegelin et al. 2000]. This parametrization implements the original Wold
algorithm.
We use the terminology defined by [Wegelin et al. 2000].
This implementation uses the PLS Wold 2 blocks algorithm based on two
nested loops:
(i) The outer loop iterate over components.
(ii) The inner loop estimates the weights vectors. This can be done
with two algo. (a) the inner loop of the original NIPALS algo. or (b) a
SVD on residuals cross-covariance matrices.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q]
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
n_components : int, number of components to keep. (default 2).
scale : boolean, scale data? (default True)
deflation_mode : str, "canonical" or "regression". See notes.
mode : "A" classical PLS and "B" CCA. See notes.
norm_y_weights: boolean, normalize Y weights to one? (default False)
algorithm : string, "nipals" or "svd"
The algorithm used to estimate the weights. It will be called
n_components times, i.e. once for each iteration of the outer loop.
max_iter : an integer, the maximum number of iterations (default 500)
of the NIPALS inner loop (used only if algorithm="nipals")
tol : non-negative real, default 1e-06
The tolerance used in the iterative algorithm.
copy : boolean
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effects.
Attributes
----------
`x_weights_` : array, [p, n_components]
X block weights vectors.
`y_weights_` : array, [q, n_components]
Y block weights vectors.
`x_loadings_` : array, [p, n_components]
X block loadings vectors.
`y_loadings_` : array, [q, n_components]
Y block loadings vectors.
`x_scores_` : array, [n_samples, n_components]
X scores.
`y_scores_` : array, [n_samples, n_components]
Y scores.
`x_rotations_` : array, [p, n_components]
X block to latents rotations.
`y_rotations_` : array, [q, n_components]
Y block to latents rotations.
coefs: array, [p, q]
The coefficients of the linear model: Y = X coefs + Err
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
In French but still a reference:
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
See also
--------
PLSCanonical
PLSRegression
CCA
PLS_SVD
"""
@abstractmethod
def __init__(self, n_components=2, scale=True, deflation_mode="regression",
mode="A", algorithm="nipals", norm_y_weights=False,
max_iter=500, tol=1e-06, copy=True):
self.n_components = n_components
self.deflation_mode = deflation_mode
self.mode = mode
self.norm_y_weights = norm_y_weights
self.scale = scale
self.algorithm = algorithm
self.max_iter = max_iter
self.tol = tol
self.copy = copy
def fit(self, X, Y):
# copy since this will contains the residuals (deflated) matrices
X, Y = check_arrays(X, Y, dtype=np.float, copy=self.copy,
sparse_format='dense')
if X.ndim != 2:
raise ValueError('X must be a 2D array')
if Y.ndim == 1:
Y = Y.reshape((Y.size, 1))
if Y.ndim != 2:
raise ValueError('Y must be a 1D or a 2D array')
n = X.shape[0]
p = X.shape[1]
q = Y.shape[1]
if n != Y.shape[0]:
raise ValueError(
'Incompatible shapes: X has %s samples, while Y '
'has %s' % (X.shape[0], Y.shape[0]))
if self.n_components < 1 or self.n_components > p:
raise ValueError('invalid number of components')
if self.algorithm not in ("svd", "nipals"):
raise ValueError("Got algorithm %s when only 'svd' "
"and 'nipals' are known" % self.algorithm)
if self.algorithm == "svd" and self.mode == "B":
raise ValueError('Incompatible configuration: mode B is not '
'implemented with svd algorithm')
if not self.deflation_mode in ["canonical", "regression"]:
raise ValueError('The deflation mode is unknown')
# Scale (in place)
X, Y, self.x_mean_, self.y_mean_, self.x_std_, self.y_std_\
= _center_scale_xy(X, Y, self.scale)
# Residuals (deflated) matrices
Xk = X
Yk = Y
# Results matrices
self.x_scores_ = np.zeros((n, self.n_components))
self.y_scores_ = np.zeros((n, self.n_components))
self.x_weights_ = np.zeros((p, self.n_components))
self.y_weights_ = np.zeros((q, self.n_components))
self.x_loadings_ = np.zeros((p, self.n_components))
self.y_loadings_ = np.zeros((q, self.n_components))
# NIPALS algo: outer loop, over components
for k in range(self.n_components):
#1) weights estimation (inner loop)
# -----------------------------------
if self.algorithm == "nipals":
x_weights, y_weights = _nipals_twoblocks_inner_loop(
X=Xk, Y=Yk, mode=self.mode, max_iter=self.max_iter,
tol=self.tol, norm_y_weights=self.norm_y_weights)
elif self.algorithm == "svd":
x_weights, y_weights = _svd_cross_product(X=Xk, Y=Yk)
# compute scores
x_scores = np.dot(Xk, x_weights)
if self.norm_y_weights:
y_ss = 1
else:
y_ss = np.dot(y_weights.T, y_weights)
y_scores = np.dot(Yk, y_weights) / y_ss
# test for null variance
if np.dot(x_scores.T, x_scores) < np.finfo(np.double).eps:
warnings.warn('X scores are null at iteration %s' % k)
#2) Deflation (in place)
# ----------------------
# Possible memory footprint reduction may done here: in order to
# avoid the allocation of a data chunk for the rank-one
# approximations matrix which is then subtracted to Xk, we suggest
# to perform a column-wise deflation.
#
# - regress Xk's on x_score
x_loadings = np.dot(Xk.T, x_scores) / np.dot(x_scores.T, x_scores)
# - subtract rank-one approximations to obtain remainder matrix
Xk -= np.dot(x_scores, x_loadings.T)
if self.deflation_mode == "canonical":
# - regress Yk's on y_score, then subtract rank-one approx.
y_loadings = (np.dot(Yk.T, y_scores)
/ np.dot(y_scores.T, y_scores))
Yk -= np.dot(y_scores, y_loadings.T)
if self.deflation_mode == "regression":
# - regress Yk's on x_score, then subtract rank-one approx.
y_loadings = (np.dot(Yk.T, x_scores)
/ np.dot(x_scores.T, x_scores))
Yk -= np.dot(x_scores, y_loadings.T)
# 3) Store weights, scores and loadings # Notation:
self.x_scores_[:, k] = x_scores.ravel() # T
self.y_scores_[:, k] = y_scores.ravel() # U
self.x_weights_[:, k] = x_weights.ravel() # W
self.y_weights_[:, k] = y_weights.ravel() # C
self.x_loadings_[:, k] = x_loadings.ravel() # P
self.y_loadings_[:, k] = y_loadings.ravel() # Q
# Such that: X = TP' + Err and Y = UQ' + Err
# 4) rotations from input space to transformed space (scores)
# T = X W(P'W)^-1 = XW* (W* : p x k matrix)
# U = Y C(Q'C)^-1 = YC* (W* : q x k matrix)
self.x_rotations_ = np.dot(
self.x_weights_,
linalg.inv(np.dot(self.x_loadings_.T, self.x_weights_)))
if Y.shape[1] > 1:
self.y_rotations_ = np.dot(
self.y_weights_,
linalg.inv(np.dot(self.y_loadings_.T, self.y_weights_)))
else:
self.y_rotations_ = np.ones(1)
if True or self.deflation_mode == "regression":
# Estimate regression coefficient
# Regress Y on T
# Y = TQ' + Err,
# Then express in function of X
# Y = X W(P'W)^-1Q' + Err = XB + Err
# => B = W*Q' (p x q)
self.coefs = np.dot(self.x_rotations_, self.y_loadings_.T)
self.coefs = (1. / self.x_std_.reshape((p, 1)) * self.coefs *
self.y_std_)
return self
def transform(self, X, Y=None, copy=True):
"""Apply the dimension reduction learned on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
copy : boolean
Whether to copy X and Y, or perform in-place normalization.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
# Normalize
if copy:
Xc = (np.asarray(X) - self.x_mean_) / self.x_std_
if Y is not None:
Yc = (np.asarray(Y) - self.y_mean_) / self.y_std_
else:
X = np.asarray(X)
Xc -= self.x_mean_
Xc /= self.x_std_
if Y is not None:
Y = np.asarray(Y)
Yc -= self.y_mean_
Yc /= self.y_std_
# Apply rotation
x_scores = np.dot(Xc, self.x_rotations_)
if Y is not None:
y_scores = np.dot(Yc, self.y_rotations_)
return x_scores, y_scores
return x_scores
def predict(self, X, copy=True):
"""Apply the dimension reduction learned on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
copy : boolean
Whether to copy X and Y, or perform in-place normalization.
Notes
-----
This call requires the estimation of a p x q matrix, which may
be an issue in high dimensional space.
"""
# Normalize
if copy:
Xc = (np.asarray(X) - self.x_mean_)
else:
X = np.asarray(X)
Xc -= self.x_mean_
Xc /= self.x_std_
Ypred = np.dot(Xc, self.coefs)
return Ypred + self.y_mean_
def fit_transform(self, X, y=None, **fit_params):
"""Learn and apply the dimension reduction on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
copy : boolean
Whether to copy X and Y, or perform in-place normalization.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
return self.fit(X, y, **fit_params).transform(X, y)
class PLSRegression(_PLS):
"""PLS regression
PLSRegression implements the PLS 2 blocks regression known as PLS2 or PLS1
in case of one dimensional response.
This class inherits from _PLS with mode="A", deflation_mode="regression",
norm_y_weights=False and algorithm="nipals".
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q]
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
n_components : int, (default 2)
Number of components to keep.
scale : boolean, (default True)
whether to scale the data
max_iter : an integer, (default 500)
the maximum number of iterations of the NIPALS inner loop (used
only if algorithm="nipals")
tol : non-negative real
Tolerance used in the iterative algorithm default 1e-06.
copy : boolean, default True
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effect
Attributes
----------
`x_weights_` : array, [p, n_components]
X block weights vectors.
`y_weights_` : array, [q, n_components]
Y block weights vectors.
`x_loadings_` : array, [p, n_components]
X block loadings vectors.
`y_loadings_` : array, [q, n_components]
Y block loadings vectors.
`x_scores_` : array, [n_samples, n_components]
X scores.
`y_scores_` : array, [n_samples, n_components]
Y scores.
`x_rotations_` : array, [p, n_components]
X block to latents rotations.
`y_rotations_` : array, [q, n_components]
Y block to latents rotations.
coefs: array, [p, q]
The coefficients of the linear model: Y = X coefs + Err
Notes
-----
For each component k, find weights u, v that optimizes:
``max corr(Xk u, Yk v) * var(Xk u) var(Yk u)``, such that ``|u| = 1``
Note that it maximizes both the correlations between the scores and the
intra-block variances.
The residual matrix of X (Xk+1) block is obtained by the deflation on
the current X score: x_score.
The residual matrix of Y (Yk+1) block is obtained by deflation on the
current X score. This performs the PLS regression known as PLS2. This
mode is prediction oriented.
This implementation provides the same results that 3 PLS packages
provided in the R language (R-project):
- "mixOmics" with function pls(X, Y, mode = "regression")
- "plspm " with function plsreg2(X, Y)
- "pls" with function oscorespls.fit(X, Y)
Examples
--------
>>> from sklearn.cross_decomposition import PLSCanonical, PLSRegression
>>> X = [[0., 0., 1.], [1.,0.,0.], [2.,2.,2.], [2.,5.,4.]]
>>> Y = [[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]]
>>> pls2 = PLSRegression(n_components=2)
>>> pls2.fit(X, Y)
... # doctest: +NORMALIZE_WHITESPACE
PLSRegression(copy=True, max_iter=500, n_components=2, scale=True,
tol=1e-06)
>>> Y_pred = pls2.predict(X)
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
In french but still a reference:
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
"""
def __init__(self, n_components=2, scale=True,
max_iter=500, tol=1e-06, copy=True):
_PLS.__init__(self, n_components=n_components, scale=scale,
deflation_mode="regression", mode="A",
norm_y_weights=False, max_iter=max_iter, tol=tol,
copy=copy)
class PLSCanonical(_PLS):
""" PLSCanonical implements the 2 blocks canonical PLS of the original Wold
algorithm [Tenenhaus 1998] p.204, referred as PLS-C2A in [Wegelin 2000].
This class inherits from PLS with mode="A" and deflation_mode="canonical",
norm_y_weights=True and algorithm="nipals", but svd should provide similar
results up to numerical errors.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples is the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q]
Training vectors, where n_samples is the number of samples and
q is the number of response variables.
n_components : int, number of components to keep. (default 2).
scale : boolean, scale data? (default True)
algorithm : string, "nipals" or "svd"
The algorithm used to estimate the weights. It will be called
n_components times, i.e. once for each iteration of the outer loop.
max_iter : an integer, (default 500)
the maximum number of iterations of the NIPALS inner loop (used
only if algorithm="nipals")
tol : non-negative real, default 1e-06
the tolerance used in the iterative algorithm
copy : boolean, default True
Whether the deflation should be done on a copy. Let the default
value to True unless you don't care about side effect
Attributes
----------
`x_weights_` : array, shape = [p, n_components]
X block weights vectors.
`y_weights_` : array, shape = [q, n_components]
Y block weights vectors.
`x_loadings_` : array, shape = [p, n_components]
X block loadings vectors.
`y_loadings_` : array, shape = [q, n_components]
Y block loadings vectors.
`x_scores_` : array, shape = [n_samples, n_components]
X scores.
`y_scores_` : array, shape = [n_samples, n_components]
Y scores.
`x_rotations_` : array, shape = [p, n_components]
X block to latents rotations.
`y_rotations_` : array, shape = [q, n_components]
Y block to latents rotations.
Notes
-----
For each component k, find weights u, v that optimize::
max corr(Xk u, Yk v) * var(Xk u) var(Yk u), such that ``|u| = |v| = 1``
Note that it maximizes both the correlations between the scores and the
intra-block variances.
The residual matrix of X (Xk+1) block is obtained by the deflation on the
current X score: x_score.
The residual matrix of Y (Yk+1) block is obtained by deflation on the
current Y score. This performs a canonical symmetric version of the PLS
regression. But slightly different than the CCA. This is mostly used
for modeling.
This implementation provides the same results that the "plspm" package
provided in the R language (R-project), using the function plsca(X, Y).
Results are equal or colinear with the function
``pls(..., mode = "canonical")`` of the "mixOmics" package. The difference
relies in the fact that mixOmics implementation does not exactly implement
the Wold algorithm since it does not normalize y_weights to one.
Examples
--------
>>> from sklearn.cross_decomposition import PLSCanonical, PLSRegression
>>> X = [[0., 0., 1.], [1.,0.,0.], [2.,2.,2.], [2.,5.,4.]]
>>> Y = [[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]]
>>> plsca = PLSCanonical(n_components=2)
>>> plsca.fit(X, Y)
... # doctest: +NORMALIZE_WHITESPACE
PLSCanonical(algorithm='nipals', copy=True, max_iter=500, n_components=2,
scale=True, tol=1e-06)
>>> X_c, Y_c = plsca.transform(X, Y)
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
See also
--------
CCA
PLSSVD
"""
def __init__(self, n_components=2, scale=True, algorithm="nipals",
max_iter=500, tol=1e-06, copy=True):
_PLS.__init__(self, n_components=n_components, scale=scale,
deflation_mode="canonical", mode="A",
norm_y_weights=True, algorithm=algorithm,
max_iter=max_iter, tol=tol, copy=copy)
class PLSSVD(BaseEstimator, TransformerMixin):
"""Partial Least Square SVD
Simply perform a svd on the crosscovariance matrix: X'Y
There are no iterative deflation here.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vector, where n_samples is the number of samples and
p is the number of predictors. X will be centered before any analysis.
Y : array-like of response, shape = [n_samples, q]
Training vector, where n_samples is the number of samples and
q is the number of response variables. X will be centered before any
analysis.
n_components : int, (default 2).
number of components to keep.
scale : boolean, (default True)
whether to scale X and Y.
Attributes
----------
`x_weights_` : array, [p, n_components]
X block weights vectors.
`y_weights_` : array, [q, n_components]
Y block weights vectors.
`x_scores_` : array, [n_samples, n_components]
X scores.
`y_scores_` : array, [n_samples, n_components]
Y scores.
See also
--------
PLSCanonical
CCA
"""
def __init__(self, n_components=2, scale=True, copy=True):
self.n_components = n_components
self.scale = scale
self.copy = copy
def fit(self, X, Y):
# copy since this will contains the centered data
X, Y = check_arrays(X, Y, dtype=np.float, copy=self.copy,
sparse_format='dense')
n = X.shape[0]
p = X.shape[1]
if X.ndim != 2:
raise ValueError('X must be a 2D array')
if n != Y.shape[0]:
raise ValueError(
'Incompatible shapes: X has %s samples, while Y '
'has %s' % (X.shape[0], Y.shape[0]))
if self.n_components < 1 or self.n_components > p:
raise ValueError('invalid number of components')
# Scale (in place)
X, Y, self.x_mean_, self.y_mean_, self.x_std_, self.y_std_ =\
_center_scale_xy(X, Y, self.scale)
# svd(X'Y)
C = np.dot(X.T, Y)
# The arpack svds solver only works if the number of extracted
# components is smaller than rank(X) - 1. Hence, if we want to extract
# all the components (C.shape[1]), we have to use another one. Else,
# let's use arpacks to compute only the interesting components.
if self.n_components == C.shape[1]:
U, s, V = linalg.svd(C, full_matrices=False)
else:
U, s, V = arpack.svds(C, k=self.n_components)
V = V.T
self.x_scores_ = np.dot(X, U)
self.y_scores_ = np.dot(Y, V)
self.x_weights_ = U
self.y_weights_ = V
return self
def transform(self, X, Y=None):
"""Apply the dimension reduction learned on the train data."""
Xr = (X - self.x_mean_) / self.x_std_
x_scores = np.dot(Xr, self.x_weights_)
if Y is not None:
Yr = (Y - self.y_mean_) / self.y_std_
y_scores = np.dot(Yr, self.y_weights_)
return x_scores, y_scores
return x_scores
def fit_transform(self, X, y=None, **fit_params):
"""Learn and apply the dimension reduction on the train data.
Parameters
----------
X : array-like of predictors, shape = [n_samples, p]
Training vectors, where n_samples in the number of samples and
p is the number of predictors.
Y : array-like of response, shape = [n_samples, q], optional
Training vectors, where n_samples in the number of samples and
q is the number of response variables.
Returns
-------
x_scores if Y is not given, (x_scores, y_scores) otherwise.
"""
return self.fit(X, y, **fit_params).transform(X, y)
| depet/scikit-learn | sklearn/cross_decomposition/pls_.py | Python | bsd-3-clause | 28,547 |
import subprocess
def start(args, logfile, errfile):
subprocess.Popen(
"$PY3_GUNICORN --pid=gunicorn.pid hello.wsgi:application -c gunicorn_conf.py --env DJANGO_DB=mysql",
cwd="django", shell=True, stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
subprocess.call(
"kill `cat gunicorn.pid`",
cwd="django", shell=True, stderr=errfile, stdout=logfile)
return 0
| torhve/FrameworkBenchmarks | frameworks/Python/django/setup_py3.py | Python | bsd-3-clause | 430 |
import os
from setuptools import setup, find_packages
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
from watermarker import __version__
setup(
name='django-watermark',
version=__version__,
packages=find_packages(exclude=['example']),
include_package_data=True,
license='BSD License',
description="Quick and efficient way to apply watermarks to images in Django.",
long_description=README,
keywords='django, watermark, image, photo, logo',
url='http://github.com/bashu/django-watermark/',
author='Josh VanderLinden',
author_email='codekoala@gmail.com',
maintainer='Basil Shubin',
maintainer_email='basil.shubin@gmail.com',
install_requires=[
'django>=1.4',
'django-appconf',
'pillow',
'six',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Artistic Software',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics'
],
zip_safe=False
)
| lzanuz/django-watermark | setup.py | Python | bsd-3-clause | 1,660 |
# -*- coding: utf-8 -*-
"""Module for pull and push command."""
import abc
from base64 import b64decode
import six
from six.moves import configparser
from ..core.api import API
from ..core.commands import AbstractCommand
from ..core.models.terminal import clean_order
from .client.controllers import ApiController
from .client.cryptor import RNCryptor
from ..core.storage.strategies import RelatedGetStrategy, SyncSaveStrategy
@six.add_metaclass(abc.ABCMeta)
class CloudSynchronizationCommand(AbstractCommand):
"""Base class for pull and push commands."""
def extend_parser(self, parser):
"""Add more arguments to parser."""
parser.add_argument('-p', '--password', metavar='PASSWORD')
return parser
@abc.abstractmethod
def process_sync(self, api_controller):
"""Do sync staff here."""
pass
def take_action(self, parsed_args):
"""Process CLI call."""
encryption_salt = b64decode(self.config.get('User', 'salt'))
hmac_salt = b64decode(self.config.get('User', 'hmac_salt'))
password = parsed_args.password
if password is None:
password = self.prompt_password()
self.validate_password(password)
cryptor = RNCryptor()
cryptor.password = password
cryptor.encryption_salt = encryption_salt
cryptor.hmac_salt = hmac_salt
controller = ApiController(self.storage, self.config, cryptor)
with self.storage:
self.process_sync(controller)
def validate_password(self, password):
"""Raise an error when password invalid."""
username = self.config.get('User', 'username')
API().login(username, password)
class PushCommand(CloudSynchronizationCommand):
"""Push data to Termius cloud."""
get_strategy = RelatedGetStrategy
save_strategy = SyncSaveStrategy
def process_sync(self, api_controller):
"""Push outdated local instances."""
try:
api_controller.put_setting()
api_controller.post_bulk()
except (configparser.NoSectionError, configparser.NoOptionError):
self.log.error('Call pull at first.')
else:
self.log.info('Push data to Termius cloud.')
class PullCommand(CloudSynchronizationCommand):
"""Pull data from Termius cloud."""
save_strategy = SyncSaveStrategy
def process_sync(self, api_controller):
"""Pull updated remote instances."""
api_controller.get_settings()
api_controller.get_bulk()
self.log.info('Pull data from Termius cloud.')
class FullCleanCommand(CloudSynchronizationCommand):
"""Pull, delete all data and push to Termius cloud."""
get_strategy = RelatedGetStrategy
save_strategy = SyncSaveStrategy
supported_models = clean_order
def process_sync(self, api_controller):
"""Pull updated remote instances."""
api_controller.get_bulk()
with self.storage:
self.full_clean()
api_controller.post_bulk()
self.log.info('Full clean data from Termius cloud.')
def full_clean(self):
"""Remove all local and remote instances."""
for model in self.supported_models:
self.log.info('Start cleaning %s...', model)
instances = self.storage.get_all(model)
for i in instances:
self.storage.delete(i)
self.log.info('Complete cleaning')
class CryptoCommand(CloudSynchronizationCommand):
"""Command for crypting and decrypting text."""
def extend_parser(self, parser):
"""Add more arguments to parser."""
super(CryptoCommand, self).extend_parser(parser)
parser.add_argument(
'-d', '--decrypt',
action='store_const', const='decrypt',
dest='operation'
)
parser.add_argument(
'-e', '--encrypt',
action='store_const', const='encrypt',
dest='operation'
)
parser.add_argument('text', nargs=1, metavar='TEXT', action='store')
return parser
def process_sync(self, api_controller):
"""Do sync staff here."""
pass
def take_action(self, parsed_args):
"""Process decrypt and encrypt text."""
encryption_salt = b64decode(self.config.get('User', 'salt'))
hmac_salt = b64decode(self.config.get('User', 'hmac_salt'))
password = parsed_args.password
if password is None:
password = self.prompt_password()
self.validate_password(password)
cryptor = RNCryptor()
cryptor.password = password
cryptor.encryption_salt = encryption_salt
cryptor.hmac_salt = hmac_salt
for i in parsed_args.text:
result_text = getattr(cryptor, parsed_args.operation)(i)
self.app.stdout.write('{}\n'.format(result_text))
| EvgeneOskin/termius-cli | termius/cloud/commands.py | Python | bsd-3-clause | 4,879 |
import numpy as np
from scipy._lib._util import check_random_state
def rvs_ratio_uniforms(pdf, umax, vmin, vmax, size=1, c=0, random_state=None):
"""
Generate random samples from a probability density function using the
ratio-of-uniforms method.
Parameters
----------
pdf : callable
A function with signature `pdf(x)` that is proportional to the
probability density function of the distribution.
umax : float
The upper bound of the bounding rectangle in the u-direction.
vmin : float
The lower bound of the bounding rectangle in the v-direction.
vmax : float
The upper bound of the bounding rectangle in the v-direction.
size : int or tuple of ints, optional
Defining number of random variates (default is 1).
c : float, optional.
Shift parameter of ratio-of-uniforms method, see Notes. Default is 0.
random_state : {None, int, `numpy.random.Generator`,
`numpy.random.RandomState`}, optional
If `seed` is None (or `np.random`), the `numpy.random.RandomState`
singleton is used.
If `seed` is an int, a new ``RandomState`` instance is used,
seeded with `seed`.
If `seed` is already a ``Generator`` or ``RandomState`` instance then
that instance is used.
Returns
-------
rvs : ndarray
The random variates distributed according to the probability
distribution defined by the pdf.
Notes
-----
Given a univariate probability density function `pdf` and a constant `c`,
define the set ``A = {(u, v) : 0 < u <= sqrt(pdf(v/u + c))}``.
If `(U, V)` is a random vector uniformly distributed over `A`,
then `V/U + c` follows a distribution according to `pdf`.
The above result (see [1]_, [2]_) can be used to sample random variables
using only the pdf, i.e. no inversion of the cdf is required. Typical
choices of `c` are zero or the mode of `pdf`. The set `A` is a subset of
the rectangle ``R = [0, umax] x [vmin, vmax]`` where
- ``umax = sup sqrt(pdf(x))``
- ``vmin = inf (x - c) sqrt(pdf(x))``
- ``vmax = sup (x - c) sqrt(pdf(x))``
In particular, these values are finite if `pdf` is bounded and
``x**2 * pdf(x)`` is bounded (i.e. subquadratic tails).
One can generate `(U, V)` uniformly on `R` and return
`V/U + c` if `(U, V)` are also in `A` which can be directly
verified.
The algorithm is not changed if one replaces `pdf` by k * `pdf` for any
constant k > 0. Thus, it is often convenient to work with a function
that is proportional to the probability density function by dropping
unneccessary normalization factors.
Intuitively, the method works well if `A` fills up most of the
enclosing rectangle such that the probability is high that `(U, V)`
lies in `A` whenever it lies in `R` as the number of required
iterations becomes too large otherwise. To be more precise, note that
the expected number of iterations to draw `(U, V)` uniformly
distributed on `R` such that `(U, V)` is also in `A` is given by
the ratio ``area(R) / area(A) = 2 * umax * (vmax - vmin) / area(pdf)``,
where `area(pdf)` is the integral of `pdf` (which is equal to one if the
probability density function is used but can take on other values if a
function proportional to the density is used). The equality holds since
the area of `A` is equal to 0.5 * area(pdf) (Theorem 7.1 in [1]_).
If the sampling fails to generate a single random variate after 50000
iterations (i.e. not a single draw is in `A`), an exception is raised.
If the bounding rectangle is not correctly specified (i.e. if it does not
contain `A`), the algorithm samples from a distribution different from
the one given by `pdf`. It is therefore recommended to perform a
test such as `~scipy.stats.kstest` as a check.
References
----------
.. [1] L. Devroye, "Non-Uniform Random Variate Generation",
Springer-Verlag, 1986.
.. [2] W. Hoermann and J. Leydold, "Generating generalized inverse Gaussian
random variates", Statistics and Computing, 24(4), p. 547--557, 2014.
.. [3] A.J. Kinderman and J.F. Monahan, "Computer Generation of Random
Variables Using the Ratio of Uniform Deviates",
ACM Transactions on Mathematical Software, 3(3), p. 257--260, 1977.
Examples
--------
>>> from scipy import stats
>>> rng = np.random.default_rng()
Simulate normally distributed random variables. It is easy to compute the
bounding rectangle explicitly in that case. For simplicity, we drop the
normalization factor of the density.
>>> f = lambda x: np.exp(-x**2 / 2)
>>> v_bound = np.sqrt(f(np.sqrt(2))) * np.sqrt(2)
>>> umax, vmin, vmax = np.sqrt(f(0)), -v_bound, v_bound
>>> rvs = stats.rvs_ratio_uniforms(f, umax, vmin, vmax, size=2500,
... random_state=rng)
The K-S test confirms that the random variates are indeed normally
distributed (normality is not rejected at 5% significance level):
>>> stats.kstest(rvs, 'norm')[1]
0.250634764150542
The exponential distribution provides another example where the bounding
rectangle can be determined explicitly.
>>> rvs = stats.rvs_ratio_uniforms(lambda x: np.exp(-x), umax=1,
... vmin=0, vmax=2*np.exp(-1), size=1000,
... random_state=rng)
>>> stats.kstest(rvs, 'expon')[1]
0.21121052054580314
"""
if vmin >= vmax:
raise ValueError("vmin must be smaller than vmax.")
if umax <= 0:
raise ValueError("umax must be positive.")
size1d = tuple(np.atleast_1d(size))
N = np.prod(size1d) # number of rvs needed, reshape upon return
# start sampling using ratio of uniforms method
rng = check_random_state(random_state)
x = np.zeros(N)
simulated, i = 0, 1
# loop until N rvs have been generated: expected runtime is finite.
# to avoid infinite loop, raise exception if not a single rv has been
# generated after 50000 tries. even if the expected numer of iterations
# is 1000, the probability of this event is (1-1/1000)**50000
# which is of order 10e-22
while simulated < N:
k = N - simulated
# simulate uniform rvs on [0, umax] and [vmin, vmax]
u1 = umax * rng.uniform(size=k)
v1 = rng.uniform(vmin, vmax, size=k)
# apply rejection method
rvs = v1 / u1 + c
accept = (u1**2 <= pdf(rvs))
num_accept = np.sum(accept)
if num_accept > 0:
x[simulated:(simulated + num_accept)] = rvs[accept]
simulated += num_accept
if (simulated == 0) and (i*N >= 50000):
msg = ("Not a single random variate could be generated in {} "
"attempts. The ratio of uniforms method does not appear "
"to work for the provided parameters. Please check the "
"pdf and the bounds.".format(i*N))
raise RuntimeError(msg)
i += 1
return np.reshape(x, size1d)
| WarrenWeckesser/scipy | scipy/stats/_rvs_sampling.py | Python | bsd-3-clause | 7,177 |
from kraken.core.maths import Vec3
from kraken.core.maths.xfo import Xfo
from kraken.core.objects.components.base_example_component import BaseExampleComponent
from kraken.core.objects.attributes.attribute_group import AttributeGroup
from kraken.core.objects.attributes.scalar_attribute import ScalarAttribute
from kraken.core.objects.attributes.bool_attribute import BoolAttribute
from kraken.core.objects.attributes.string_attribute import StringAttribute
from kraken.core.objects.constraints.pose_constraint import PoseConstraint
from kraken.core.objects.component_group import ComponentGroup
from kraken.core.objects.hierarchy_group import HierarchyGroup
from kraken.core.objects.locator import Locator
from kraken.core.objects.joint import Joint
from kraken.core.objects.ctrlSpace import CtrlSpace
from kraken.core.objects.control import Control
from kraken.core.objects.operators.kl_operator import KLOperator
from kraken.core.profiler import Profiler
from kraken.helpers.utility_methods import logHierarchy
class FabriceClavicle(BaseExampleComponent):
"""Clavicle Component Base"""
def __init__(self, name='clavicle', parent=None):
super(FabriceClavicle, self).__init__(name, parent)
# ===========
# Declare IO
# ===========
# Declare Inputs Xfos
self.spineEndInputTgt = self.createInput('spineEnd', dataType='Xfo', parent=self.inputHrcGrp).getTarget()
# Declare Output Xfos
self.clavicleOutputTgt = self.createOutput('clavicle', dataType='Xfo', parent=self.outputHrcGrp).getTarget()
# Declare Input Attrs
self.drawDebugInputAttr = self.createInput('drawDebug', dataType='Boolean', value=False, parent=self.cmpInputAttrGrp).getTarget()
self.rigScaleInputAttr = self.createInput('rigScale', dataType='Float', value=1.0, parent=self.cmpInputAttrGrp).getTarget()
# Declare Output Attrs
class FabriceClavicleGuide(FabriceClavicle):
"""Clavicle Component Guide"""
def __init__(self, name='clavicle', parent=None):
Profiler.getInstance().push("Construct Clavicle Guide Component:" + name)
super(FabriceClavicleGuide, self).__init__(name, parent)
# =========
# Controls
# =========
# Guide Controls
guideSettingsAttrGrp = AttributeGroup("GuideSettings", parent=self)
self.clavicleCtrl = Control('clavicle', parent=self.ctrlCmpGrp, shape="cube")
self.clavicleCtrl.alignOnXAxis()
self.clavicleCtrl.scalePoints(Vec3(1.0, 0.25, 0.25))
data = {
"name": name,
"location": "L",
"clavicleXfo": Xfo(Vec3(0.1322, 15.403, -0.5723)),
'clavicleCtrlCrvData': self.clavicleCtrl.getCurveData()
}
self.loadData(data)
Profiler.getInstance().pop()
# =============
# Data Methods
# =============
def saveData(self):
"""Save the data for the component to be persisted.
Return:
The JSON data object
"""
data = super(FabriceClavicleGuide, self).saveData()
data['clavicleXfo'] = self.clavicleCtrl.xfo
data['clavicleCtrlCrvData'] = self.clavicleCtrl.getCurveData()
return data
def loadData(self, data):
"""Load a saved guide representation from persisted data.
Arguments:
data -- object, The JSON data object.
Return:
True if successful.
"""
super(FabriceClavicleGuide, self).loadData( data )
self.clavicleCtrl.xfo = data['clavicleXfo']
self.clavicleCtrl.setCurveData(data['clavicleCtrlCrvData'])
return True
def getRigBuildData(self):
"""Returns the Guide data used by the Rig Component to define the layout of the final rig..
Return:
The JSON rig data object.
"""
data = super(FabriceClavicleGuide, self).getRigBuildData()
data['clavicleXfo'] = self.clavicleCtrl.xfo
data['clavicleCtrlCrvData'] = self.clavicleCtrl.getCurveData()
return data
# ==============
# Class Methods
# ==============
@classmethod
def getComponentType(cls):
"""Enables introspection of the class prior to construction to determine if it is a guide component.
Return:
The true if this component is a guide component.
"""
return 'Guide'
@classmethod
def getRigComponentClass(cls):
"""Returns the corresponding rig component class for this guide component class
Return:
The rig component class.
"""
return FabriceClavicleRig
class FabriceClavicleRig(FabriceClavicle):
"""Clavicle Component"""
def __init__(self, name='Clavicle', parent=None):
Profiler.getInstance().push("Construct Clavicle Rig Component:" + name)
super(FabriceClavicleRig, self).__init__(name, parent)
# =========
# Controls
# =========
# Clavicle
self.clavicleCtrlSpace = CtrlSpace('clavicle', parent=self.ctrlCmpGrp)
self.clavicleCtrl = Control('clavicle', parent=self.clavicleCtrlSpace, shape="cube")
self.clavicleCtrl.alignOnXAxis()
# ==========
# Deformers
# ==========
deformersLayer = self.getOrCreateLayer('deformers')
defCmpGrp = ComponentGroup(self.getName(), self, parent=deformersLayer)
self.addItem('defCmpGrp', self.defCmpGrp)
self.clavicleDef = Joint('clavicle', parent=defCmpGrp)
self.clavicleDef.setComponent(self)
# ==============
# Constrain I/O
# ==============
# Constraint inputs
clavicleInputConstraint = PoseConstraint('_'.join([self.clavicleCtrl.getName(), 'To', self.spineEndInputTgt.getName()]))
clavicleInputConstraint.setMaintainOffset(True)
clavicleInputConstraint.addConstrainer(self.spineEndInputTgt)
self.clavicleCtrlSpace.addConstraint(clavicleInputConstraint)
# Constraint outputs
clavicleConstraint = PoseConstraint('_'.join([self.clavicleOutputTgt.getName(), 'To', self.clavicleCtrl.getName()]))
clavicleConstraint.addConstrainer(self.clavicleCtrl)
self.clavicleOutputTgt.addConstraint(clavicleConstraint)
# ===============
# Add Canvas Ops
# ===============
# Add Deformer Canvas Op
self.defConstraintOp = KLOperator('defConstraint', 'PoseConstraintSolver', 'Kraken')
self.addOperator(self.defConstraintOp)
# Add Att Inputs
self.defConstraintOp.setInput('drawDebug', self.drawDebugInputAttr)
self.defConstraintOp.setInput('rigScale', self.rigScaleInputAttr)
# Add Xfo Inputs
self.defConstraintOp.setInput('constrainer', self.clavicleOutputTgt)
# Add Xfo Outputs
self.defConstraintOp.setOutput('constrainee', self.clavicleDef)
Profiler.getInstance().pop()
def loadData(self, data=None):
"""Load a saved guide representation from persisted data.
Arguments:
data -- object, The JSON data object.
Return:
True if successful.
"""
super(FabriceClavicleRig, self).loadData( data )
self.clavicleCtrlSpace.xfo = data['clavicleXfo']
self.clavicleCtrl.xfo = data['clavicleXfo']
self.clavicleCtrl.setCurveData(data['clavicleCtrlCrvData'])
# Set IO Xfos
self.spineEndInputTgt.xfo = data['clavicleXfo']
self.clavicleOutputTgt.xfo = data['clavicleXfo']
# Eval Operators
self.defConstraintOp.evaluate()
from kraken.core.kraken_system import KrakenSystem
ks = KrakenSystem.getInstance()
ks.registerComponent(FabriceClavicleGuide)
ks.registerComponent(FabriceClavicleRig)
| goshow-jp/Kraken | Python/kraken_components/fabrice/fabrice_clavicle.py | Python | bsd-3-clause | 7,805 |
from sympy import I, sqrt, log, exp, sin, asin, factorial
from sympy.core import Symbol, S, Rational, Integer, Dummy, Wild, Pow
from sympy.core.facts import InconsistentAssumptions
from sympy import simplify
from sympy.core.compatibility import range
from sympy.utilities.pytest import raises, XFAIL
def test_symbol_unset():
x = Symbol('x', real=True, integer=True)
assert x.is_real is True
assert x.is_integer is True
assert x.is_imaginary is False
assert x.is_noninteger is False
assert x.is_number is False
def test_zero():
z = Integer(0)
assert z.is_commutative is True
assert z.is_integer is True
assert z.is_rational is True
assert z.is_algebraic is True
assert z.is_transcendental is False
assert z.is_real is True
assert z.is_complex is True
assert z.is_noninteger is False
assert z.is_irrational is False
assert z.is_imaginary is False
assert z.is_positive is False
assert z.is_negative is False
assert z.is_nonpositive is True
assert z.is_nonnegative is True
assert z.is_even is True
assert z.is_odd is False
assert z.is_finite is True
assert z.is_infinite is False
assert z.is_comparable is True
assert z.is_prime is False
assert z.is_composite is False
assert z.is_number is True
def test_one():
z = Integer(1)
assert z.is_commutative is True
assert z.is_integer is True
assert z.is_rational is True
assert z.is_algebraic is True
assert z.is_transcendental is False
assert z.is_real is True
assert z.is_complex is True
assert z.is_noninteger is False
assert z.is_irrational is False
assert z.is_imaginary is False
assert z.is_positive is True
assert z.is_negative is False
assert z.is_nonpositive is False
assert z.is_nonnegative is True
assert z.is_even is False
assert z.is_odd is True
assert z.is_finite is True
assert z.is_infinite is False
assert z.is_comparable is True
assert z.is_prime is False
assert z.is_number is True
assert z.is_composite is False # issue 8807
def test_negativeone():
z = Integer(-1)
assert z.is_commutative is True
assert z.is_integer is True
assert z.is_rational is True
assert z.is_algebraic is True
assert z.is_transcendental is False
assert z.is_real is True
assert z.is_complex is True
assert z.is_noninteger is False
assert z.is_irrational is False
assert z.is_imaginary is False
assert z.is_positive is False
assert z.is_negative is True
assert z.is_nonpositive is True
assert z.is_nonnegative is False
assert z.is_even is False
assert z.is_odd is True
assert z.is_finite is True
assert z.is_infinite is False
assert z.is_comparable is True
assert z.is_prime is False
assert z.is_composite is False
assert z.is_number is True
def test_infinity():
oo = S.Infinity
assert oo.is_commutative is True
assert oo.is_integer is None
assert oo.is_rational is None
assert oo.is_algebraic is None
assert oo.is_transcendental is None
assert oo.is_real is True
assert oo.is_complex is True
assert oo.is_noninteger is None
assert oo.is_irrational is None
assert oo.is_imaginary is False
assert oo.is_positive is True
assert oo.is_negative is False
assert oo.is_nonpositive is False
assert oo.is_nonnegative is True
assert oo.is_even is None
assert oo.is_odd is None
assert oo.is_finite is False
assert oo.is_infinite is True
assert oo.is_comparable is True
assert oo.is_prime is False
assert oo.is_composite is None
assert oo.is_number is True
def test_neg_infinity():
mm = S.NegativeInfinity
assert mm.is_commutative is True
assert mm.is_integer is None
assert mm.is_rational is None
assert mm.is_algebraic is None
assert mm.is_transcendental is None
assert mm.is_real is True
assert mm.is_complex is True
assert mm.is_noninteger is None
assert mm.is_irrational is None
assert mm.is_imaginary is False
assert mm.is_positive is False
assert mm.is_negative is True
assert mm.is_nonpositive is True
assert mm.is_nonnegative is False
assert mm.is_even is None
assert mm.is_odd is None
assert mm.is_finite is False
assert mm.is_infinite is True
assert mm.is_comparable is True
assert mm.is_prime is False
assert mm.is_composite is False
assert mm.is_number is True
def test_nan():
nan = S.NaN
assert nan.is_commutative is True
assert nan.is_integer is None
assert nan.is_rational is None
assert nan.is_algebraic is None
assert nan.is_transcendental is None
assert nan.is_real is None
assert nan.is_complex is None
assert nan.is_noninteger is None
assert nan.is_irrational is None
assert nan.is_imaginary is None
assert nan.is_positive is None
assert nan.is_negative is None
assert nan.is_nonpositive is None
assert nan.is_nonnegative is None
assert nan.is_even is None
assert nan.is_odd is None
assert nan.is_finite is None
assert nan.is_infinite is None
assert nan.is_comparable is False
assert nan.is_prime is None
assert nan.is_composite is None
assert nan.is_number is True
def test_pos_rational():
r = Rational(3, 4)
assert r.is_commutative is True
assert r.is_integer is False
assert r.is_rational is True
assert r.is_algebraic is True
assert r.is_transcendental is False
assert r.is_real is True
assert r.is_complex is True
assert r.is_noninteger is True
assert r.is_irrational is False
assert r.is_imaginary is False
assert r.is_positive is True
assert r.is_negative is False
assert r.is_nonpositive is False
assert r.is_nonnegative is True
assert r.is_even is False
assert r.is_odd is False
assert r.is_finite is True
assert r.is_infinite is False
assert r.is_comparable is True
assert r.is_prime is False
assert r.is_composite is False
r = Rational(1, 4)
assert r.is_nonpositive is False
assert r.is_positive is True
assert r.is_negative is False
assert r.is_nonnegative is True
r = Rational(5, 4)
assert r.is_negative is False
assert r.is_positive is True
assert r.is_nonpositive is False
assert r.is_nonnegative is True
r = Rational(5, 3)
assert r.is_nonnegative is True
assert r.is_positive is True
assert r.is_negative is False
assert r.is_nonpositive is False
def test_neg_rational():
r = Rational(-3, 4)
assert r.is_positive is False
assert r.is_nonpositive is True
assert r.is_negative is True
assert r.is_nonnegative is False
r = Rational(-1, 4)
assert r.is_nonpositive is True
assert r.is_positive is False
assert r.is_negative is True
assert r.is_nonnegative is False
r = Rational(-5, 4)
assert r.is_negative is True
assert r.is_positive is False
assert r.is_nonpositive is True
assert r.is_nonnegative is False
r = Rational(-5, 3)
assert r.is_nonnegative is False
assert r.is_positive is False
assert r.is_negative is True
assert r.is_nonpositive is True
def test_pi():
z = S.Pi
assert z.is_commutative is True
assert z.is_integer is False
assert z.is_rational is False
assert z.is_algebraic is False
assert z.is_transcendental is True
assert z.is_real is True
assert z.is_complex is True
assert z.is_noninteger is True
assert z.is_irrational is True
assert z.is_imaginary is False
assert z.is_positive is True
assert z.is_negative is False
assert z.is_nonpositive is False
assert z.is_nonnegative is True
assert z.is_even is False
assert z.is_odd is False
assert z.is_finite is True
assert z.is_infinite is False
assert z.is_comparable is True
assert z.is_prime is False
assert z.is_composite is False
def test_E():
z = S.Exp1
assert z.is_commutative is True
assert z.is_integer is False
assert z.is_rational is False
assert z.is_algebraic is False
assert z.is_transcendental is True
assert z.is_real is True
assert z.is_complex is True
assert z.is_noninteger is True
assert z.is_irrational is True
assert z.is_imaginary is False
assert z.is_positive is True
assert z.is_negative is False
assert z.is_nonpositive is False
assert z.is_nonnegative is True
assert z.is_even is False
assert z.is_odd is False
assert z.is_finite is True
assert z.is_infinite is False
assert z.is_comparable is True
assert z.is_prime is False
assert z.is_composite is False
def test_I():
z = S.ImaginaryUnit
assert z.is_commutative is True
assert z.is_integer is False
assert z.is_rational is False
assert z.is_algebraic is True
assert z.is_transcendental is False
assert z.is_real is False
assert z.is_complex is True
assert z.is_noninteger is False
assert z.is_irrational is False
assert z.is_imaginary is True
assert z.is_positive is False
assert z.is_negative is False
assert z.is_nonpositive is False
assert z.is_nonnegative is False
assert z.is_even is False
assert z.is_odd is False
assert z.is_finite is True
assert z.is_infinite is False
assert z.is_comparable is False
assert z.is_prime is False
assert z.is_composite is False
def test_symbol_real():
# issue 3848
a = Symbol('a', real=False)
assert a.is_real is False
assert a.is_integer is False
assert a.is_negative is False
assert a.is_positive is False
assert a.is_nonnegative is False
assert a.is_nonpositive is False
assert a.is_zero is False
def test_symbol_zero():
x = Symbol('x', zero=True)
assert x.is_positive is False
assert x.is_nonpositive
assert x.is_negative is False
assert x.is_nonnegative
assert x.is_zero is True
assert x.is_nonzero is False
assert x.is_finite is True
def test_symbol_positive():
x = Symbol('x', positive=True)
assert x.is_positive is True
assert x.is_nonpositive is False
assert x.is_negative is False
assert x.is_nonnegative is True
assert x.is_zero is False
assert x.is_nonzero is True
def test_neg_symbol_positive():
x = -Symbol('x', positive=True)
assert x.is_positive is False
assert x.is_nonpositive is True
assert x.is_negative is True
assert x.is_nonnegative is False
assert x.is_zero is False
assert x.is_nonzero is True
def test_symbol_nonpositive():
x = Symbol('x', nonpositive=True)
assert x.is_positive is False
assert x.is_nonpositive is True
assert x.is_negative is None
assert x.is_nonnegative is None
assert x.is_zero is None
assert x.is_nonzero is None
def test_neg_symbol_nonpositive():
x = -Symbol('x', nonpositive=True)
assert x.is_positive is None
assert x.is_nonpositive is None
assert x.is_negative is False
assert x.is_nonnegative is True
assert x.is_zero is None
assert x.is_nonzero is None
def test_symbol_falsepositive():
x = Symbol('x', positive=False)
assert x.is_positive is False
assert x.is_nonpositive is None
assert x.is_negative is None
assert x.is_nonnegative is None
assert x.is_zero is None
assert x.is_nonzero is None
def test_neg_symbol_falsepositive():
x = -Symbol('x', positive=False)
assert x.is_positive is None
assert x.is_nonpositive is None
assert x.is_negative is False
assert x.is_nonnegative is None
assert x.is_zero is None
assert x.is_nonzero is None
def test_symbol_falsepositive_real():
x = Symbol('x', positive=False, real=True)
assert x.is_positive is False
assert x.is_nonpositive is True
assert x.is_negative is None
assert x.is_nonnegative is None
assert x.is_zero is None
assert x.is_nonzero is None
def test_neg_symbol_falsepositive_real():
x = -Symbol('x', positive=False, real=True)
assert x.is_positive is None
assert x.is_nonpositive is None
assert x.is_negative is False
assert x.is_nonnegative is True
assert x.is_zero is None
assert x.is_nonzero is None
def test_symbol_falsenonnegative():
x = Symbol('x', nonnegative=False)
assert x.is_positive is False
assert x.is_nonpositive is None
assert x.is_negative is None
assert x.is_nonnegative is False
assert x.is_zero is False
assert x.is_nonzero is True
@XFAIL
def test_neg_symbol_falsenonnegative():
x = -Symbol('x', nonnegative=False)
assert x.is_positive is None
assert x.is_nonpositive is False # this currently returns None
assert x.is_negative is False # this currently returns None
assert x.is_nonnegative is None
assert x.is_zero is False # this currently returns None
assert x.is_nonzero is True # this currently returns None
def test_symbol_falsenonnegative_real():
x = Symbol('x', nonnegative=False, real=True)
assert x.is_positive is False
assert x.is_nonpositive is True
assert x.is_negative is True
assert x.is_nonnegative is False
assert x.is_zero is False
assert x.is_nonzero is True
def test_neg_symbol_falsenonnegative_real():
x = -Symbol('x', nonnegative=False, real=True)
assert x.is_positive is True
assert x.is_nonpositive is False
assert x.is_negative is False
assert x.is_nonnegative is True
assert x.is_zero is False
assert x.is_nonzero is True
def test_prime():
assert S(-1).is_prime is False
assert S(-2).is_prime is False
assert S(-4).is_prime is False
assert S(0).is_prime is False
assert S(1).is_prime is False
assert S(2).is_prime is True
assert S(17).is_prime is True
assert S(4).is_prime is False
def test_composite():
assert S(-1).is_composite is False
assert S(-2).is_composite is False
assert S(-4).is_composite is False
assert S(0).is_composite is False
assert S(2).is_composite is False
assert S(17).is_composite is False
assert S(4).is_composite is True
x = Dummy(integer=True, positive=True, prime=False)
assert x.is_composite is None # x could be 1
assert (x + 1).is_composite is None
def test_prime_symbol():
x = Symbol('x', prime=True)
assert x.is_prime is True
assert x.is_integer is True
assert x.is_positive is True
assert x.is_negative is False
assert x.is_nonpositive is False
assert x.is_nonnegative is True
x = Symbol('x', prime=False)
assert x.is_prime is False
assert x.is_integer is None
assert x.is_positive is None
assert x.is_negative is None
assert x.is_nonpositive is None
assert x.is_nonnegative is None
def test_symbol_noncommutative():
x = Symbol('x', commutative=True)
assert x.is_complex is None
x = Symbol('x', commutative=False)
assert x.is_integer is False
assert x.is_rational is False
assert x.is_algebraic is False
assert x.is_irrational is False
assert x.is_real is False
assert x.is_complex is False
def test_other_symbol():
x = Symbol('x', integer=True)
assert x.is_integer is True
assert x.is_real is True
x = Symbol('x', integer=True, nonnegative=True)
assert x.is_integer is True
assert x.is_nonnegative is True
assert x.is_negative is False
assert x.is_positive is None
x = Symbol('x', integer=True, nonpositive=True)
assert x.is_integer is True
assert x.is_nonpositive is True
assert x.is_positive is False
assert x.is_negative is None
x = Symbol('x', odd=True)
assert x.is_odd is True
assert x.is_even is False
assert x.is_integer is True
x = Symbol('x', odd=False)
assert x.is_odd is False
assert x.is_even is None
assert x.is_integer is None
x = Symbol('x', even=True)
assert x.is_even is True
assert x.is_odd is False
assert x.is_integer is True
x = Symbol('x', even=False)
assert x.is_even is False
assert x.is_odd is None
assert x.is_integer is None
x = Symbol('x', integer=True, nonnegative=True)
assert x.is_integer is True
assert x.is_nonnegative is True
x = Symbol('x', integer=True, nonpositive=True)
assert x.is_integer is True
assert x.is_nonpositive is True
with raises(AttributeError):
x.is_real = False
x = Symbol('x', algebraic=True)
assert x.is_transcendental is False
x = Symbol('x', transcendental=True)
assert x.is_algebraic is False
assert x.is_rational is False
assert x.is_integer is False
def test_issue_3825():
"""catch: hash instability"""
x = Symbol("x")
y = Symbol("y")
a1 = x + y
a2 = y + x
a2.is_comparable
h1 = hash(a1)
h2 = hash(a2)
assert h1 == h2
def test_issue_4822():
z = (-1)**Rational(1, 3)*(1 - I*sqrt(3))
assert z.is_real in [True, None]
def test_hash_vs_typeinfo():
"""seemingly different typeinfo, but in fact equal"""
# the following two are semantically equal
x1 = Symbol('x', even=True)
x2 = Symbol('x', integer=True, odd=False)
assert hash(x1) == hash(x2)
assert x1 == x2
def test_hash_vs_typeinfo_2():
"""different typeinfo should mean !eq"""
# the following two are semantically different
x = Symbol('x')
x1 = Symbol('x', even=True)
assert x != x1
assert hash(x) != hash(x1) # This might fail with very low probability
def test_hash_vs_eq():
"""catch: different hash for equal objects"""
a = 1 + S.Pi # important: do not fold it into a Number instance
ha = hash(a) # it should be Add/Mul/... to trigger the bug
a.is_positive # this uses .evalf() and deduces it is positive
assert a.is_positive is True
# be sure that hash stayed the same
assert ha == hash(a)
# now b should be the same expression
b = a.expand(trig=True)
hb = hash(b)
assert a == b
assert ha == hb
def test_Add_is_pos_neg():
# these cover lines not covered by the rest of tests in core
n = Symbol('n', negative=True, infinite=True)
nn = Symbol('n', nonnegative=True, infinite=True)
np = Symbol('n', nonpositive=True, infinite=True)
p = Symbol('p', positive=True, infinite=True)
r = Dummy(real=True, finite=False)
x = Symbol('x')
xf = Symbol('xb', finite=True)
assert (n + p).is_positive is None
assert (n + x).is_positive is None
assert (p + x).is_positive is None
assert (n + p).is_negative is None
assert (n + x).is_negative is None
assert (p + x).is_negative is None
assert (n + xf).is_positive is False
assert (p + xf).is_positive is True
assert (n + xf).is_negative is True
assert (p + xf).is_negative is False
assert (x - S.Infinity).is_negative is None # issue 7798
# issue 8046, 16.2
assert (p + nn).is_positive
assert (n + np).is_negative
assert (p + r).is_positive is None
def test_Add_is_imaginary():
nn = Dummy(nonnegative=True)
assert (I*nn + I).is_imaginary # issue 8046, 17
def test_Add_is_algebraic():
a = Symbol('a', algebraic=True)
b = Symbol('a', algebraic=True)
na = Symbol('na', algebraic=False)
nb = Symbol('nb', algebraic=False)
x = Symbol('x')
assert (a + b).is_algebraic
assert (na + nb).is_algebraic is None
assert (a + na).is_algebraic is False
assert (a + x).is_algebraic is None
assert (na + x).is_algebraic is None
def test_Mul_is_algebraic():
a = Symbol('a', algebraic=True)
b = Symbol('a', algebraic=True)
na = Symbol('na', algebraic=False)
an = Symbol('an', algebraic=True, nonzero=True)
nb = Symbol('nb', algebraic=False)
x = Symbol('x')
assert (a*b).is_algebraic
assert (na*nb).is_algebraic is None
assert (a*na).is_algebraic is None
assert (an*na).is_algebraic is False
assert (a*x).is_algebraic is None
assert (na*x).is_algebraic is None
def test_Pow_is_algebraic():
e = Symbol('e', algebraic=True)
assert Pow(1, e, evaluate=False).is_algebraic
assert Pow(0, e, evaluate=False).is_algebraic
a = Symbol('a', algebraic=True)
na = Symbol('na', algebraic=False)
ia = Symbol('ia', algebraic=True, irrational=True)
ib = Symbol('ib', algebraic=True, irrational=True)
r = Symbol('r', rational=True)
x = Symbol('x')
assert (a**r).is_algebraic
assert (a**x).is_algebraic is None
assert (na**r).is_algebraic is False
assert (ia**r).is_algebraic
assert (ia**ib).is_algebraic is False
assert (a**e).is_algebraic is None
# Gelfond-Schneider constant:
assert Pow(2, sqrt(2), evaluate=False).is_algebraic is False
assert Pow(S.GoldenRatio, sqrt(3), evaluate=False).is_algebraic is False
def test_Mul_is_infinite():
x = Symbol('x')
f = Symbol('f', finite=True)
i = Symbol('i', infinite=True)
z = Dummy(zero=True)
nzf = Dummy(finite=True, zero=False)
from sympy import Mul
assert (x*f).is_finite is None
assert (x*i).is_finite is None
assert (f*i).is_finite is False
assert (x*f*i).is_finite is None
assert (z*i).is_finite is False
assert (nzf*i).is_finite is False
assert (z*f).is_finite is True
assert Mul(0, f, evaluate=False).is_finite is True
assert Mul(0, i, evaluate=False).is_finite is False
assert (x*f).is_infinite is None
assert (x*i).is_infinite is None
assert (f*i).is_infinite is None
assert (x*f*i).is_infinite is None
assert (z*i).is_infinite is S.NaN.is_infinite
assert (nzf*i).is_infinite is True
assert (z*f).is_infinite is False
assert Mul(0, f, evaluate=False).is_infinite is False
assert Mul(0, i, evaluate=False).is_infinite is S.NaN.is_infinite
def test_special_is_rational():
i = Symbol('i', integer=True)
i2 = Symbol('i2', integer=True)
ni = Symbol('ni', integer=True, nonzero=True)
r = Symbol('r', rational=True)
rn = Symbol('r', rational=True, nonzero=True)
nr = Symbol('nr', irrational=True)
x = Symbol('x')
assert sqrt(3).is_rational is False
assert (3 + sqrt(3)).is_rational is False
assert (3*sqrt(3)).is_rational is False
assert exp(3).is_rational is False
assert exp(ni).is_rational is False
assert exp(rn).is_rational is False
assert exp(x).is_rational is None
assert exp(log(3), evaluate=False).is_rational is True
assert log(exp(3), evaluate=False).is_rational is True
assert log(3).is_rational is False
assert log(ni + 1).is_rational is False
assert log(rn + 1).is_rational is False
assert log(x).is_rational is None
assert (sqrt(3) + sqrt(5)).is_rational is None
assert (sqrt(3) + S.Pi).is_rational is False
assert (x**i).is_rational is None
assert (i**i).is_rational is True
assert (i**i2).is_rational is None
assert (r**i).is_rational is None
assert (r**r).is_rational is None
assert (r**x).is_rational is None
assert (nr**i).is_rational is None # issue 8598
assert (nr**Symbol('z', zero=True)).is_rational
assert sin(1).is_rational is False
assert sin(ni).is_rational is False
assert sin(rn).is_rational is False
assert sin(x).is_rational is None
assert asin(r).is_rational is False
assert sin(asin(3), evaluate=False).is_rational is True
@XFAIL
def test_issue_6275():
x = Symbol('x')
# both zero or both Muls...but neither "change would be very appreciated.
# This is similar to x/x => 1 even though if x = 0, it is really nan.
assert isinstance(x*0, type(0*S.Infinity))
if 0*S.Infinity is S.NaN:
b = Symbol('b', finite=None)
assert (b*0).is_zero is None
def test_sanitize_assumptions():
# issue 6666
for cls in (Symbol, Dummy, Wild):
x = cls('x', real=1, positive=0)
assert x.is_real is True
assert x.is_positive is False
assert cls('', real=True, positive=None).is_positive is None
raises(ValueError, lambda: cls('', commutative=None))
raises(ValueError, lambda: Symbol._sanitize(dict(commutative=None)))
def test_special_assumptions():
e = -3 - sqrt(5) + (-sqrt(10)/2 - sqrt(2)/2)**2
assert simplify(e < 0) is S.false
assert simplify(e > 0) is S.false
assert (e == 0) is False # it's not a literal 0
assert e.equals(0) is True
def test_inconsistent():
# cf. issues 5795 and 5545
raises(InconsistentAssumptions, lambda: Symbol('x', real=True,
commutative=False))
def test_issue_6631():
assert ((-1)**(I)).is_real is True
assert ((-1)**(I*2)).is_real is True
assert ((-1)**(I/2)).is_real is True
assert ((-1)**(I*S.Pi)).is_real is True
assert (I**(I + 2)).is_real is True
def test_issue_2730():
assert (1/(1 + I)).is_real is False
def test_issue_4149():
assert (3 + I).is_complex
assert (3 + I).is_imaginary is False
assert (3*I + S.Pi*I).is_imaginary
# as Zero.is_imaginary is False, see issue 7649
y = Symbol('y', real=True)
assert (3*I + S.Pi*I + y*I).is_imaginary is None
p = Symbol('p', positive=True)
assert (3*I + S.Pi*I + p*I).is_imaginary
n = Symbol('n', negative=True)
assert (-3*I - S.Pi*I + n*I).is_imaginary
i = Symbol('i', imaginary=True)
assert ([(i**a).is_imaginary for a in range(4)] ==
[False, True, False, True])
# tests from the PR #7887:
e = S("-sqrt(3)*I/2 + 0.866025403784439*I")
assert e.is_real is False
assert e.is_imaginary
def test_issue_2920():
n = Symbol('n', negative=True)
assert sqrt(n).is_imaginary
def test_issue_7899():
x = Symbol('x', real=True)
assert (I*x).is_real is None
assert ((x - I)*(x - 1)).is_zero is None
assert ((x - I)*(x - 1)).is_real is None
@XFAIL
def test_issue_7993():
x = Dummy(integer=True)
y = Dummy(noninteger=True)
assert (x - y).is_zero is False
def test_issue_8075():
raises(InconsistentAssumptions, lambda: Dummy(zero=True, finite=False))
raises(InconsistentAssumptions, lambda: Dummy(zero=True, infinite=True))
def test_issue_8642():
x = Symbol('x', real=True, integer=False)
assert (x*2).is_integer is None
def test_issues_8632_8633_8638_8675_8992():
p = Dummy(integer=True, positive=True)
nn = Dummy(integer=True, nonnegative=True)
assert (p - S.Half).is_positive
assert (p - 1).is_nonnegative
assert (nn + 1).is_positive
assert (-p + 1).is_nonpositive
assert (-nn - 1).is_negative
prime = Dummy(prime=True)
assert (prime - 2).is_nonnegative
assert (prime - 3).is_nonnegative is None
even = Dummy(positive=True, even=True)
assert (even - 2).is_nonnegative
p = Dummy(positive=True)
assert (p/(p + 1) - 1).is_negative
assert ((p + 2)**3 - S.Half).is_positive
n = Dummy(negative=True)
assert (n - 3).is_nonpositive
def test_issue_9115():
n = Dummy('n', integer=True, nonnegative=True)
assert (factorial(n) >= 1) == True
assert (factorial(n) < 1) == False
def test_issue_9165():
z = Symbol('z', zero=True)
f = Symbol('f', finite=False)
assert 0/z == S.NaN
assert 0*(1/z) == S.NaN
assert 0*f == S.NaN
| sahilshekhawat/sympy | sympy/core/tests/test_assumptions.py | Python | bsd-3-clause | 27,164 |
###############################################################################
##
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
name = "R"
identifier = "org.vistrails.vistrails.rpy"
version = "0.1.2"
old_identifiers = ["edu.utah.sci.vistrails.rpy"]
| Nikea/VisTrails | vistrails/packages/rpy/__init__.py | Python | bsd-3-clause | 2,002 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-15 18:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ims', '0010_auto_20151227_1147'),
]
operations = [
migrations.AlterField(
model_name='site',
name='address1',
field=models.CharField(blank=True, default=b'', help_text=b'First street address of this site', max_length=50),
),
]
| grovesr/django-ims | ims/migrations/0011_auto_20160115_1328.py | Python | bsd-3-clause | 517 |
'''
This file holds globally useful utility classes and functions, i.e., classes and
functions that are generic enough not to be specific to one app.
'''
import logging
import os
import re
import sys
from datetime import tzinfo, timedelta
from django.conf import settings
# Setup logging support.
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(settings.LOG_HANDLER)
# try to import the `fcntl` module for locking support through the `Lock` class
# below
try:
import fcntl
except ImportError:
LOGGER.warn("Locking support is not available for your (non-Unix?) system. "
"Using multiple processes might not be safe.")
def get_class_by_name(module_name, class_name):
'''
Given the name of a module (e.g., 'metashare.resedit.admin')
and the name of a class (e.g., 'ContactSMI'),
return the class type object (in the example, the class ContactSMI).
If no such class exists, throws an AttributeError
'''
try:
class_type = getattr(sys.modules[module_name], class_name)
return class_type
except AttributeError:
raise AttributeError("Module '{0}' has no class '{1}'".format(module_name, class_name))
def verify_subclass(subclass, superclass):
'''
Verify that subclass is indeed a subclass of superclass.
If that is not the case, a TypeError is raised.
'''
if not issubclass(subclass, superclass):
raise TypeError('class {0} is not a subclass of class {1}'.format(subclass, superclass))
def prettify_camel_case_string(cc_str):
'''
Prettifies the given camelCase string so that it is better readable.
For example, "speechAnnotation-soundToTextAlignment" is converted to "Speech
Annotation - Sound To Text Alignment". N.B.: The conversion currently only
recognizes boundaries with ASCII letters.
'''
result = cc_str
if len(result) > 1:
# result = result.replace('-', ' - ') AtA
result = result.replace('_', ' ')
result = result.replace('AtA', 'At a')
result = re.sub(r'(..)(?=[A-Z][a-z])', r'\1 ', result)
result = ' '.join([(len(token) > 1 and (token[0].upper() + token[1:]))
or token[0].upper() for token in result.split()])
return result
def create_breadcrumb_template_params(model, action):
'''
Create a dictionary for breadcrumb templates.
'''
opts = model._meta
dictionary = {
'app_label': opts.app_label,
'verbose_name': opts.verbose_name,
'action': action,
}
return dictionary
class Lock():
"""
Each instance of this class can be used to acquire an exclusive, system-wide
(multi-process) lock on a particular name.
This class will only work on Unix systems viz. systems that provide the
`fcntl` module. On other systems the class will silently do nothing.
"""
def __init__(self, lock_name):
"""
Create a `Lock` object which can create an exclusive lock on the given
name.
"""
if 'fcntl' in sys.modules:
self.handle = open(os.path.join(settings.LOCK_DIR, lock_name), 'w')
else:
self.handle = None
def acquire(self):
"""
Acquire a lock on the name for which this `Lock` was created.
"""
if self.handle:
fcntl.flock(self.handle, fcntl.LOCK_EX)
def release(self):
"""
Release any lock on the name for which this `Lock` was created.
"""
if self.handle:
fcntl.flock(self.handle, fcntl.LOCK_UN)
def __del__(self):
if self.handle:
self.handle.close()
class SimpleTimezone(tzinfo):
"""
A fixed offset timezone with an unknown name and an unknown DST adjustment.
"""
def __init__(self, offset):
self.__offset = timedelta(minutes=offset)
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return None
def dst(self, dt):
return None | MiltosD/CEF-ELRC | metashare/utils.py | Python | bsd-3-clause | 4,061 |
# -*- coding: utf-8 -*-
"""
Werkzeug
========
Werkzeug started as simple collection of various utilities for WSGI
applications and has become one of the most advanced WSGI utility
modules. It includes a powerful debugger, full featured request and
response objects, HTTP utilities to handle entity tags, cache control
headers, HTTP dates, cookie handling, file uploads, a powerful URL
routing system and a bunch of community contributed addon modules.
Werkzeug is unicode aware and doesn't enforce a specific template
engine, database adapter or anything else. It doesn't even enforce
a specific way of handling requests and leaves all that up to the
developer. It's most useful for end user applications which should work
on as many server environments as possible (such as blogs, wikis,
bulletin boards, etc.).
Details and example applications are available on the
`Werkzeug website <http://werkzeug.pocoo.org/>`_.
Features
--------
- unicode awareness
- request and response objects
- various utility functions for dealing with HTTP headers such as
`Accept` and `Cache-Control` headers.
- thread local objects with proper cleanup at request end
- an interactive debugger
- A simple WSGI server with support for threading and forking
with an automatic reloader.
- a flexible URL routing system with REST support.
- fully WSGI compatible
Development Version
-------------------
The Werkzeug development version can be installed by cloning the git
repository from `github`_::
git clone git@github.com:mitsuhiko/werkzeug.git
.. _github: http://github.com/mitsuhiko/werkzeug
"""
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='Werkzeug',
version='0.10-dev',
url='http://werkzeug.pocoo.org/',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='The Swiss Army knife of Python web development',
long_description=__doc__,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=['werkzeug', 'werkzeug.debug', 'werkzeug.contrib',
'werkzeug.testsuite', 'werkzeug.testsuite.contrib'],
include_package_data=True,
test_suite='werkzeug.testsuite.suite',
zip_safe=False,
platforms='any'
)
| 0x19/werkzeug | setup.py | Python | bsd-3-clause | 2,732 |
from django.db import models
from .mixins import FileMixin
class BaseCategory(models.Model):
title = models.CharField(max_length=100, unique=True)
class Meta:
abstract = True
ordering = ('title',)
def __str__(self):
return self.title
class FileCategory(BaseCategory):
class Meta(BaseCategory.Meta):
verbose_name_plural = 'file categories'
class File(FileMixin, models.Model):
category = models.ForeignKey(FileCategory)
title = models.CharField(max_length=100, db_index=True)
file = models.FileField(upload_to='assets/file')
file_size = models.PositiveIntegerField(default=0, editable=False)
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
class ImageCategory(BaseCategory):
class Meta(BaseCategory.Meta):
verbose_name_plural = 'image categories'
class Image(FileMixin, models.Model):
category = models.ForeignKey(ImageCategory, blank=True, null=True)
title = models.CharField(max_length=100, db_index=True)
file = models.ImageField(
'Image', upload_to='assets/image', height_field='image_height', width_field='image_width'
)
image_height = models.PositiveIntegerField(editable=False)
image_width = models.PositiveIntegerField(editable=False)
file_size = models.PositiveIntegerField(default=0, editable=False)
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
| developersociety/django-glitter | glitter/assets/models.py | Python | bsd-3-clause | 1,508 |
# -*- coding: utf-8 -*-
#
# django-notes documentation build configuration file, created by
# sphinx-quickstart on Wed May 13 10:34:42 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-notes'
copyright = u'2010, Roman Dolgiy'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
from pdb import set_trace
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from django_notes import __version__ as release
version = '.'.join(release.split('.')[:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-notes-doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-notes.tex', u'django-notes Documentation',
u'Roman Dolgiy', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| BraveToasterStudio/django-notes | docs/conf.py | Python | bsd-3-clause | 6,470 |
import copy
from types import GeneratorType
class MergeDict(object):
"""
A simple class for creating new "virtual" dictionaries that actually look
up values in more than one dictionary, passed in the constructor.
If a key appears in more than one of the given dictionaries, only the
first occurrence will be used.
"""
def __init__(self, *dicts):
self.dicts = dicts
def __getitem__(self, key):
for dict_ in self.dicts:
try:
return dict_[key]
except KeyError:
pass
raise KeyError
def __copy__(self):
return self.__class__(*self.dicts)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def getlist(self, key):
for dict_ in self.dicts:
if key in dict_.keys():
return dict_.getlist(key)
return []
def iteritems(self):
seen = set()
for dict_ in self.dicts:
for item in dict_.iteritems():
k, v = item
if k in seen:
continue
seen.add(k)
yield item
def iterkeys(self):
for k, v in self.iteritems():
yield k
def itervalues(self):
for k, v in self.iteritems():
yield v
def items(self):
return list(self.iteritems())
def keys(self):
return list(self.iterkeys())
def values(self):
return list(self.itervalues())
def has_key(self, key):
for dict_ in self.dicts:
if key in dict_:
return True
return False
__contains__ = has_key
__iter__ = iterkeys
def copy(self):
"""Returns a copy of this object."""
return self.__copy__()
def __str__(self):
'''
Returns something like
"{'key1': 'val1', 'key2': 'val2', 'key3': 'val3'}"
instead of the generic "<object meta-data>" inherited from object.
'''
return str(dict(self.items()))
def __repr__(self):
'''
Returns something like
MergeDict({'key1': 'val1', 'key2': 'val2'}, {'key3': 'val3'})
instead of generic "<object meta-data>" inherited from object.
'''
dictreprs = ', '.join(repr(d) for d in self.dicts)
return '%s(%s)' % (self.__class__.__name__, dictreprs)
class SortedDict(dict):
"""
A dictionary that keeps its keys in the order in which they're inserted.
"""
def __new__(cls, *args, **kwargs):
instance = super(SortedDict, cls).__new__(cls, *args, **kwargs)
instance.keyOrder = []
return instance
def __init__(self, data=None):
if data is None:
data = {}
elif isinstance(data, GeneratorType):
# Unfortunately we need to be able to read a generator twice. Once
# to get the data into self with our super().__init__ call and a
# second time to setup keyOrder correctly
data = list(data)
super(SortedDict, self).__init__(data)
if isinstance(data, dict):
self.keyOrder = data.keys()
else:
self.keyOrder = []
seen = set()
for key, value in data:
if key not in seen:
self.keyOrder.append(key)
seen.add(key)
def __deepcopy__(self, memo):
return self.__class__([(key, copy.deepcopy(value, memo))
for key, value in self.iteritems()])
def __setitem__(self, key, value):
if key not in self:
self.keyOrder.append(key)
super(SortedDict, self).__setitem__(key, value)
def __delitem__(self, key):
super(SortedDict, self).__delitem__(key)
self.keyOrder.remove(key)
def __iter__(self):
return iter(self.keyOrder)
def pop(self, k, *args):
result = super(SortedDict, self).pop(k, *args)
try:
self.keyOrder.remove(k)
except ValueError:
# Key wasn't in the dictionary in the first place. No problem.
pass
return result
def popitem(self):
result = super(SortedDict, self).popitem()
self.keyOrder.remove(result[0])
return result
def items(self):
return zip(self.keyOrder, self.values())
def iteritems(self):
for key in self.keyOrder:
yield key, self[key]
def keys(self):
return self.keyOrder[:]
def iterkeys(self):
return iter(self.keyOrder)
def values(self):
return map(self.__getitem__, self.keyOrder)
def itervalues(self):
for key in self.keyOrder:
yield self[key]
def update(self, dict_):
for k, v in dict_.iteritems():
self[k] = v
def setdefault(self, key, default):
if key not in self:
self.keyOrder.append(key)
return super(SortedDict, self).setdefault(key, default)
def value_for_index(self, index):
"""Returns the value of the item at the given zero-based index."""
return self[self.keyOrder[index]]
def insert(self, index, key, value):
"""Inserts the key, value pair before the item with the given index."""
if key in self.keyOrder:
n = self.keyOrder.index(key)
del self.keyOrder[n]
if n < index:
index -= 1
self.keyOrder.insert(index, key)
super(SortedDict, self).__setitem__(key, value)
def copy(self):
"""Returns a copy of this object."""
# This way of initializing the copy means it works for subclasses, too.
obj = self.__class__(self)
obj.keyOrder = self.keyOrder[:]
return obj
def __repr__(self):
"""
Replaces the normal dict.__repr__ with a version that returns the keys
in their sorted order.
"""
return '{%s}' % ', '.join(['%r: %r' % (k, v) for k, v in self.items()])
def clear(self):
super(SortedDict, self).clear()
self.keyOrder = []
class MultiValueDictKeyError(KeyError):
pass
class MultiValueDict(dict):
"""
A subclass of dictionary customized to handle multiple values for the
same key.
>>> d = MultiValueDict({'name': ['Adrian', 'Simon'], 'position': ['Developer']})
>>> d['name']
'Simon'
>>> d.getlist('name')
['Adrian', 'Simon']
>>> d.get('lastname', 'nonexistent')
'nonexistent'
>>> d.setlist('lastname', ['Holovaty', 'Willison'])
This class exists to solve the irritating problem raised by cgi.parse_qs,
which returns a list for every key, even though most Web forms submit
single name-value pairs.
"""
def __init__(self, key_to_list_mapping=()):
super(MultiValueDict, self).__init__(key_to_list_mapping)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__,
super(MultiValueDict, self).__repr__())
def __getitem__(self, key):
"""
Returns the last data value for this key, or [] if it's an empty list;
raises KeyError if not found.
"""
try:
list_ = super(MultiValueDict, self).__getitem__(key)
except KeyError:
raise MultiValueDictKeyError("Key %r not found in %r" % (key, self))
try:
return list_[-1]
except IndexError:
return []
def __setitem__(self, key, value):
super(MultiValueDict, self).__setitem__(key, [value])
def __copy__(self):
return self.__class__([
(k, v[:])
for k, v in self.lists()
])
def __deepcopy__(self, memo=None):
if memo is None:
memo = {}
result = self.__class__()
memo[id(self)] = result
for key, value in dict.items(self):
dict.__setitem__(result, copy.deepcopy(key, memo),
copy.deepcopy(value, memo))
return result
def __getstate__(self):
obj_dict = self.__dict__.copy()
obj_dict['_data'] = dict([(k, self.getlist(k)) for k in self])
return obj_dict
def __setstate__(self, obj_dict):
data = obj_dict.pop('_data', {})
for k, v in data.items():
self.setlist(k, v)
self.__dict__.update(obj_dict)
def get(self, key, default=None):
"""
Returns the last data value for the passed key. If key doesn't exist
or value is an empty list, then default is returned.
"""
try:
val = self[key]
except KeyError:
return default
if val == []:
return default
return val
def getlist(self, key):
"""
Returns the list of values for the passed key. If key doesn't exist,
then an empty list is returned.
"""
try:
return super(MultiValueDict, self).__getitem__(key)
except KeyError:
return []
def setlist(self, key, list_):
super(MultiValueDict, self).__setitem__(key, list_)
def setdefault(self, key, default=None):
if key not in self:
self[key] = default
return self[key]
def setlistdefault(self, key, default_list=()):
if key not in self:
self.setlist(key, default_list)
return self.getlist(key)
def appendlist(self, key, value):
"""Appends an item to the internal list associated with key."""
self.setlistdefault(key, [])
super(MultiValueDict, self).__setitem__(key, self.getlist(key) + [value])
def items(self):
"""
Returns a list of (key, value) pairs, where value is the last item in
the list associated with the key.
"""
return [(key, self[key]) for key in self.keys()]
def iteritems(self):
"""
Yields (key, value) pairs, where value is the last item in the list
associated with the key.
"""
for key in self.keys():
yield (key, self[key])
def lists(self):
"""Returns a list of (key, list) pairs."""
return super(MultiValueDict, self).items()
def iterlists(self):
"""Yields (key, list) pairs."""
return super(MultiValueDict, self).iteritems()
def values(self):
"""Returns a list of the last value on every key list."""
return [self[key] for key in self.keys()]
def itervalues(self):
"""Yield the last value on every key list."""
for key in self.iterkeys():
yield self[key]
def copy(self):
"""Returns a shallow copy of this object."""
return copy.copy(self)
def update(self, *args, **kwargs):
"""
update() extends rather than replaces existing key lists.
Also accepts keyword args.
"""
if len(args) > 1:
raise TypeError("update expected at most 1 arguments, got %d" % len(args))
if args:
other_dict = args[0]
if isinstance(other_dict, MultiValueDict):
for key, value_list in other_dict.lists():
self.setlistdefault(key, []).extend(value_list)
else:
try:
for key, value in other_dict.items():
self.setlistdefault(key, []).append(value)
except TypeError:
raise ValueError("MultiValueDict.update() takes either a MultiValueDict or dictionary")
for key, value in kwargs.iteritems():
self.setlistdefault(key, []).append(value)
class DotExpandedDict(dict):
"""
A special dictionary constructor that takes a dictionary in which the keys
may contain dots to specify inner dictionaries. It's confusing, but this
example should make sense.
>>> d = DotExpandedDict({'person.1.firstname': ['Simon'], \
'person.1.lastname': ['Willison'], \
'person.2.firstname': ['Adrian'], \
'person.2.lastname': ['Holovaty']})
>>> d
{'person': {'1': {'lastname': ['Willison'], 'firstname': ['Simon']}, '2': {'lastname': ['Holovaty'], 'firstname': ['Adrian']}}}
>>> d['person']
{'1': {'lastname': ['Willison'], 'firstname': ['Simon']}, '2': {'lastname': ['Holovaty'], 'firstname': ['Adrian']}}
>>> d['person']['1']
{'lastname': ['Willison'], 'firstname': ['Simon']}
# Gotcha: Results are unpredictable if the dots are "uneven":
>>> DotExpandedDict({'c.1': 2, 'c.2': 3, 'c': 1})
{'c': 1}
"""
def __init__(self, key_to_list_mapping):
for k, v in key_to_list_mapping.items():
current = self
bits = k.split('.')
for bit in bits[:-1]:
current = current.setdefault(bit, {})
# Now assign value to current position
try:
current[bits[-1]] = v
except TypeError: # Special-case if current isn't a dict.
current = {bits[-1]: v}
class ImmutableList(tuple):
"""
A tuple-like object that raises useful errors when it is asked to mutate.
Example::
>>> a = ImmutableList(range(5), warning="You cannot mutate this.")
>>> a[3] = '4'
Traceback (most recent call last):
...
AttributeError: You cannot mutate this.
"""
def __new__(cls, *args, **kwargs):
if 'warning' in kwargs:
warning = kwargs['warning']
del kwargs['warning']
else:
warning = 'ImmutableList object is immutable.'
self = tuple.__new__(cls, *args, **kwargs)
self.warning = warning
return self
def complain(self, *wargs, **kwargs):
if isinstance(self.warning, Exception):
raise self.warning
else:
raise AttributeError(self.warning)
# All list mutation functions complain.
__delitem__ = complain
__delslice__ = complain
__iadd__ = complain
__imul__ = complain
__setitem__ = complain
__setslice__ = complain
append = complain
extend = complain
insert = complain
pop = complain
remove = complain
sort = complain
reverse = complain
class DictWrapper(dict):
"""
Wraps accesses to a dictionary so that certain values (those starting with
the specified prefix) are passed through a function before being returned.
The prefix is removed before looking up the real value.
Used by the SQL construction code to ensure that values are correctly
quoted before being used.
"""
def __init__(self, data, func, prefix):
super(DictWrapper, self).__init__(data)
self.func = func
self.prefix = prefix
def __getitem__(self, key):
"""
Retrieves the real value after stripping the prefix string (if
present). If the prefix is present, pass the value through self.func
before returning, otherwise return the raw value.
"""
if key.startswith(self.prefix):
use_func = True
key = key[len(self.prefix):]
else:
use_func = False
value = super(DictWrapper, self).__getitem__(key)
if use_func:
return self.func(value)
return value
| skevy/django | django/utils/datastructures.py | Python | bsd-3-clause | 15,444 |
from pyglet.window import key, mouse
from pyglet.libs.darwin.quartzkey import keymap, charmap
from pyglet.libs.darwin.cocoapy import *
NSTrackingArea = ObjCClass('NSTrackingArea')
# Event data helper functions.
def getMouseDelta(nsevent):
dx = nsevent.deltaX()
dy = nsevent.deltaY()
return int(dx), int(dy)
def getMousePosition(self, nsevent):
in_window = nsevent.locationInWindow()
in_window = self.convertPoint_fromView_(in_window, None)
x = int(in_window.x)
y = int(in_window.y)
# Must record mouse position for BaseWindow.draw_mouse_cursor to work.
self._window._mouse_x = x
self._window._mouse_y = y
return x, y
def getModifiers(nsevent):
modifiers = 0
modifierFlags = nsevent.modifierFlags()
if modifierFlags & NSAlphaShiftKeyMask:
modifiers |= key.MOD_CAPSLOCK
if modifierFlags & NSShiftKeyMask:
modifiers |= key.MOD_SHIFT
if modifierFlags & NSControlKeyMask:
modifiers |= key.MOD_CTRL
if modifierFlags & NSAlternateKeyMask:
modifiers |= key.MOD_ALT
modifiers |= key.MOD_OPTION
if modifierFlags & NSCommandKeyMask:
modifiers |= key.MOD_COMMAND
if modifierFlags & NSFunctionKeyMask:
modifiers |= key.MOD_FUNCTION
return modifiers
def getSymbol(nsevent):
keycode = nsevent.keyCode()
return keymap[keycode]
class PygletView_Implementation(object):
PygletView = ObjCSubclass('NSView', 'PygletView')
@PygletView.method(b'@'+NSRectEncoding+PyObjectEncoding)
def initWithFrame_cocoaWindow_(self, frame, window):
# The tracking area is used to get mouseEntered, mouseExited, and cursorUpdate
# events so that we can custom set the mouse cursor within the view.
self._tracking_area = None
self = ObjCInstance(send_super(self, 'initWithFrame:', frame, argtypes=[NSRect]))
if not self:
return None
# CocoaWindow object.
self._window = window
self.updateTrackingAreas()
# Create an instance of PygletTextView to handle text events.
# We must do this because NSOpenGLView doesn't conform to the
# NSTextInputClient protocol by default, and the insertText: method will
# not do the right thing with respect to translating key sequences like
# "Option-e", "e" if the protocol isn't implemented. So the easiest
# thing to do is to subclass NSTextView which *does* implement the
# protocol and let it handle text input.
PygletTextView = ObjCClass('PygletTextView')
self._textview = PygletTextView.alloc().initWithCocoaWindow_(window)
# Add text view to the responder chain.
self.addSubview_(self._textview)
return self
@PygletView.method('v')
def dealloc(self):
self._window = None
#send_message(self.objc_self, 'removeFromSuperviewWithoutNeedingDisplay')
self._textview.release()
self._textview = None
self._tracking_area.release()
self._tracking_area = None
send_super(self, 'dealloc')
@PygletView.method('v')
def updateTrackingAreas(self):
# This method is called automatically whenever the tracking areas need to be
# recreated, for example when window resizes.
if self._tracking_area:
self.removeTrackingArea_(self._tracking_area)
self._tracking_area.release()
self._tracking_area = None
tracking_options = NSTrackingMouseEnteredAndExited | NSTrackingActiveInActiveApp | NSTrackingCursorUpdate
frame = self.frame()
self._tracking_area = NSTrackingArea.alloc().initWithRect_options_owner_userInfo_(
frame, # rect
tracking_options, # options
self, # owner
None) # userInfo
self.addTrackingArea_(self._tracking_area)
@PygletView.method('B')
def canBecomeKeyView(self):
return True
@PygletView.method('B')
def isOpaque(self):
return True
## Event responders.
# This method is called whenever the view changes size.
@PygletView.method(b'v'+NSSizeEncoding)
def setFrameSize_(self, size):
send_super(self, 'setFrameSize:', size, argtypes=[NSSize])
# This method is called when view is first installed as the
# contentView of window. Don't do anything on first call.
# This also helps ensure correct window creation event ordering.
if not self._window.context.canvas:
return
width, height = int(size.width), int(size.height)
self._window.switch_to()
self._window.context.update_geometry()
self._window.dispatch_event("on_resize", width, height)
self._window.dispatch_event("on_expose")
# Can't get app.event_loop.enter_blocking() working with Cocoa, because
# when mouse clicks on the window's resize control, Cocoa enters into a
# mini-event loop that only responds to mouseDragged and mouseUp events.
# This means that using NSTimer to call idle() won't work. Our kludge
# is to override NSWindow's nextEventMatchingMask_etc method and call
# idle() from there.
if self.inLiveResize():
from pyglet import app
if app.event_loop is not None:
app.event_loop.idle()
@PygletView.method('v@')
def pygletKeyDown_(self, nsevent):
symbol = getSymbol(nsevent)
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_key_press', symbol, modifiers)
@PygletView.method('v@')
def pygletKeyUp_(self, nsevent):
symbol = getSymbol(nsevent)
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_key_release', symbol, modifiers)
@PygletView.method('v@')
def pygletFlagsChanged_(self, nsevent):
# Handles on_key_press and on_key_release events for modifier keys.
# Note that capslock is handled differently than other keys; it acts
# as a toggle, so on_key_release is only sent when it's turned off.
# TODO: Move these constants somewhere else.
# Undocumented left/right modifier masks found by experimentation:
NSLeftShiftKeyMask = 1 << 1
NSRightShiftKeyMask = 1 << 2
NSLeftControlKeyMask = 1 << 0
NSRightControlKeyMask = 1 << 13
NSLeftAlternateKeyMask = 1 << 5
NSRightAlternateKeyMask = 1 << 6
NSLeftCommandKeyMask = 1 << 3
NSRightCommandKeyMask = 1 << 4
maskForKey = { key.LSHIFT : NSLeftShiftKeyMask,
key.RSHIFT : NSRightShiftKeyMask,
key.LCTRL : NSLeftControlKeyMask,
key.RCTRL : NSRightControlKeyMask,
key.LOPTION : NSLeftAlternateKeyMask,
key.ROPTION : NSRightAlternateKeyMask,
key.LCOMMAND : NSLeftCommandKeyMask,
key.RCOMMAND : NSRightCommandKeyMask,
key.CAPSLOCK : NSAlphaShiftKeyMask,
key.FUNCTION : NSFunctionKeyMask }
symbol = getSymbol(nsevent)
# Ignore this event if symbol is not a modifier key. We must check this
# because e.g., we receive a flagsChanged message when using CMD-tab to
# switch applications, with symbol == "a" when command key is released.
if symbol not in maskForKey:
return
modifiers = getModifiers(nsevent)
modifierFlags = nsevent.modifierFlags()
if symbol and modifierFlags & maskForKey[symbol]:
self._window.dispatch_event('on_key_press', symbol, modifiers)
else:
self._window.dispatch_event('on_key_release', symbol, modifiers)
# Overriding this method helps prevent system beeps for unhandled events.
@PygletView.method('B@')
def performKeyEquivalent_(self, nsevent):
# Let arrow keys and certain function keys pass through the responder
# chain so that the textview can handle on_text_motion events.
modifierFlags = nsevent.modifierFlags()
if modifierFlags & NSNumericPadKeyMask:
return False
if modifierFlags & NSFunctionKeyMask:
ch = cfstring_to_string(nsevent.charactersIgnoringModifiers())
if ch in (NSHomeFunctionKey, NSEndFunctionKey,
NSPageUpFunctionKey, NSPageDownFunctionKey):
return False
# Send the key equivalent to the main menu to perform menu items.
NSApp = ObjCClass('NSApplication').sharedApplication()
NSApp.mainMenu().performKeyEquivalent_(nsevent)
# Indicate that we've handled the event so system won't beep.
return True
@PygletView.method('v@')
def mouseMoved_(self, nsevent):
if self._window._mouse_ignore_motion:
self._window._mouse_ignore_motion = False
return
# Don't send on_mouse_motion events if we're not inside the content rectangle.
if not self._window._mouse_in_window:
return
x, y = getMousePosition(self, nsevent)
dx, dy = getMouseDelta(nsevent)
self._window.dispatch_event('on_mouse_motion', x, y, dx, dy)
@PygletView.method('v@')
def scrollWheel_(self, nsevent):
x, y = getMousePosition(self, nsevent)
scroll_x, scroll_y = getMouseDelta(nsevent)
self._window.dispatch_event('on_mouse_scroll', x, y, scroll_x, scroll_y)
@PygletView.method('v@')
def mouseDown_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.LEFT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_press', x, y, buttons, modifiers)
@PygletView.method('v@')
def mouseDragged_(self, nsevent):
x, y = getMousePosition(self, nsevent)
dx, dy = getMouseDelta(nsevent)
buttons = mouse.LEFT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_drag', x, y, dx, dy, buttons, modifiers)
@PygletView.method('v@')
def mouseUp_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.LEFT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_release', x, y, buttons, modifiers)
@PygletView.method('v@')
def rightMouseDown_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.RIGHT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_press', x, y, buttons, modifiers)
@PygletView.method('v@')
def rightMouseDragged_(self, nsevent):
x, y = getMousePosition(self, nsevent)
dx, dy = getMouseDelta(nsevent)
buttons = mouse.RIGHT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_drag', x, y, dx, dy, buttons, modifiers)
@PygletView.method('v@')
def rightMouseUp_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.RIGHT
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_release', x, y, buttons, modifiers)
@PygletView.method('v@')
def otherMouseDown_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.MIDDLE
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_press', x, y, buttons, modifiers)
@PygletView.method('v@')
def otherMouseDragged_(self, nsevent):
x, y = getMousePosition(self, nsevent)
dx, dy = getMouseDelta(nsevent)
buttons = mouse.MIDDLE
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_drag', x, y, dx, dy, buttons, modifiers)
@PygletView.method('v@')
def otherMouseUp_(self, nsevent):
x, y = getMousePosition(self, nsevent)
buttons = mouse.MIDDLE
modifiers = getModifiers(nsevent)
self._window.dispatch_event('on_mouse_release', x, y, buttons, modifiers)
@PygletView.method('v@')
def mouseEntered_(self, nsevent):
x, y = getMousePosition(self, nsevent)
self._window._mouse_in_window = True
# Don't call self._window.set_mouse_platform_visible() from here.
# Better to do it from cursorUpdate:
self._window.dispatch_event('on_mouse_enter', x, y)
@PygletView.method('v@')
def mouseExited_(self, nsevent):
x, y = getMousePosition(self, nsevent)
self._window._mouse_in_window = False
if not self._window._is_mouse_exclusive:
self._window.set_mouse_platform_visible()
self._window.dispatch_event('on_mouse_leave', x, y)
@PygletView.method('v@')
def cursorUpdate_(self, nsevent):
# Called when mouse cursor enters view. Unlike mouseEntered:,
# this method will be called if the view appears underneath a
# motionless mouse cursor, as can happen during window creation,
# or when switching into fullscreen mode.
# BUG: If the mouse enters the window via the resize control at the
# the bottom right corner, the resize control will set the cursor
# to the default arrow and screw up our cursor tracking.
self._window._mouse_in_window = True
if not self._window._is_mouse_exclusive:
self._window.set_mouse_platform_visible()
PygletView = ObjCClass('PygletView')
| mammadori/pyglet | pyglet/window/cocoa/pyglet_view.py | Python | bsd-3-clause | 13,557 |
file = "../_ser/lininteg.py"
out = ["namespace mfem {"]
fid = open(file, 'r')
for line in fid.readlines():
if line.startswith("class"):
cname = (line.split(' ')[1]).split('(')[0]
if line.startswith(" def __init__"):
pp = ""
if line.find("*args") != -1:
pp = " self._coeff = args"
elif line.find(", QG") != -1:
pp = " self._coeff = QG"
elif line.find(", QF)") != -1:
pp = " self._coeff = QF"
elif line.find(", F)") != -1:
pp = " self._coeff = F"
elif line.find(", f, s=1.0, ir=None)") != -1:
pp = " self._coeff = (f, ir)"
elif line.find(", uD_, lambda_, mu_, alpha_, kappa_)") != -1:
pp = " self._coeff = uD_"
elif line.find("(self)") != -1:
pass
else:
print(cname)
print(line)
assert False, "No recipt for this pattern "
if pp != "":
out.append("%pythonappend " + cname + "::" + cname + " %{")
out.append(pp)
out.append("%}")
fid.close()
out.append("}")
fid = open("lininteg_ext.i", "w")
fid.write("\n".join(out))
fid.close()
| mfem/PyMFEM | mfem/common/generate_lininteg_ext.py | Python | bsd-3-clause | 1,212 |