repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
leiyangleon/FSH | refs/heads/master | scripts/write_file_type.py | 1 | # write_file_type.py
# Tracy Whelen, Microwave Remote Sensing Lab, University of Massachusetts
# November 17, 2015
# Yang Lei, Jet Propulsion Labortary, California Institute of Technology
# May 18, 2017
# This script writes the input array (the tree height map or diff_height map) to a file, with the file type depending on input parameters.
# Current output types are: .gif, .json, .kml, .mat, .tif (input without the "." so that ".kml" becomes "kml")
#!/usr/bin/python
from numpy import *
import scipy.io as sio
import json
from osgeo import gdal, osr
import simplekml
from PIL import Image
import os.path
import read_geo_data as rgd
import pdb
# Define write_file_type function
# Input parameters are the array, output type (ex. stand height, diff_height, etc), output filename, output directory, and file type (inputted as a string of the file extension)
def write_file_type(data, outtype, filename, directory, filetype, coords, ref_file=""):
# print "orig filename = " + filename
# Use if/else to determine the desired type of file output
if(filename[-8:] == "_255_255"):
outfilename = filename[:-4]
elif((filename[-4:] == "_fsh") or (filename[-5:] == "_diff") or (filename[-4:] == "_255")):
outfilename = filename
else:
if(outtype == "stand_height"):
outfilename = filename + "_fsh"
elif(outtype == "diff_height"):
outfilename = filename + "_diff"
# print "outfilename = " + outfilename + "\n"
# Use if/else to determine the desired type of file output
# Create .gif output
if(filetype == "gif"):
# Check if a 0-255 .tif with the same filename already exists, and if not create it.
if (os.path.isfile(directory + outfilename + "_255.tif") == True):
gif_img = Image.open(directory + outfilename + "_255.tif")
else:
# Set array in a 0-255 range for gif/kml
# Get dimensions of array and then flatten for use with nonzero()
(row, col) = data.shape
data = data.flatten()
# Get the nonzero indices and min/max
nz_IND = nonzero(data)
nz_min = data[nz_IND[0]].min()
nz_max = data[nz_IND[0]].max()
# Set the scaled values
data255 = data.copy()
data255[nz_IND[0]] = (data[nz_IND[0]] - nz_min) * (255 / (nz_max - nz_min)) + 1
# Reshape the array of scaled values
data255 = reshape(data255, (row, col))
data = reshape(data, (row, col))
# Write 0-255 .tif
write_file_type(data255, outtype, outfilename + "_255", directory, "tif", coords, ref_file)
gif_img = Image.open(directory + outfilename + "_255.tif")
# Create the .gif
gif_img.save(directory + outfilename + "_255.gif", "GIF", transparency=0)
# Create .json output
elif(filetype == "json"):
jsonfile = open(directory + outfilename + '.json', 'w')
json.dump([data.tolist()], jsonfile)
jsonfile.close()
# Create .kml output
elif(filetype == "kml"):
# Determine the realname based on whether or not a single image is being processed or a pair
if(filename[3] == "_"): # pair
realname = filename[:31]
else:
realname = filename[:23]
## realname = filename
# Read geo location information in from a text or geotiff file depending on outtype
if(outtype == "stand_height"):
(width, lines, north, west, lat_step, long_step) = rgd.read_geo_data(realname + "_geo.txt", directory)
north = coords[0]
west = coords[2]
south = coords[1]
east = coords[3]
elif(outtype == "diff_height"):
(width, lines, north, west, lat_step, long_step) = rgd.read_geo_data(ref_file, directory[:-10])
south = north + (lat_step * lines)
east = west + (long_step * width)
## lat_step = -2.77777777778 * (10**-4)
## long_step = 2.77777777778 * (10**-4)
# Check if a .gif with the same filename does not already exist then create it.
if (os.path.isfile(directory + outfilename + "_255.gif") == False):
write_file_type(data, outtype, outfilename, directory, "gif", coords, ref_file)
# Create the .kml
kml = simplekml.Kml()
arraykml = kml.newgroundoverlay(name=outfilename)
arraykml.icon.href = directory + outfilename + "_255.gif"
arraykml.latlonbox.north = north
arraykml.latlonbox.south = south
arraykml.latlonbox.east = east
arraykml.latlonbox.west = west
kml.save(directory + outfilename + "_255.kml")
# Create .mat output
elif(filetype == "mat"):
sio.savemat(directory + outfilename + '.mat', {'data':data})
# Create .tif output
elif(filetype == "tif"):
# Determine the realname based on whether or not a single image is being processed or a pair
if(filename[3] == "_"): # pair
realname = filename[:31]
else:
realname = filename[:23]
## realname = filename
# Read geo location information in from a text or geotiff file depending on outtype
if(outtype == "stand_height"):
(cols, rows, corner_lat, corner_long, lat_step, long_step) = rgd.read_geo_data(realname + "_geo.txt", directory)
corner_lat = coords[0]
corner_long = coords[2]
lat_step = -2.77777777778 * (10**-4)
long_step = 2.77777777778 * (10**-4)
elif(outtype == "diff_height"):
(cols, rows, corner_lat, corner_long, lat_step, long_step) = rgd.read_geo_data(ref_file, directory[:-10])
selffile_data = sio.loadmat(directory[:-10] + "output/" + "self.mat")
image1 = selffile_data['I1']
cols = int(image1.shape[0])
rows = int(image1.shape[1])
lat_step = -2.77777778 * (10**-4)
long_step = 2.77777778 * (10**-4)
# pdb.set_trace()
# Create the GeoTiff
driver = gdal.GetDriverByName('GTiff')
outRaster = driver.Create(directory + outfilename + ".tif", cols, rows)
# outRaster = driver.Create(directory + outfilename + ".tif", cols, rows, 1, gdal.GDT_Float32)
outRaster.SetGeoTransform([corner_long, long_step, 0, corner_lat, 0, lat_step])
outband = outRaster.GetRasterBand(1)
outband.WriteArray(data)
outRasterSRS = osr.SpatialReference()
outRasterSRS.ImportFromEPSG(4326)
outRaster.SetProjection(outRasterSRS.ExportToWkt())
outband.FlushCache()
else:
# Error message
print "Error: The selected file type is invalid. Please try again and choose a different output format."
print "You selected %s" % filetype
print "File types available: .gif, .json, .kml, .mat, .tif -- input without the ., such as kml instead of .kml\n"
|
SohKai/ChronoLogger | refs/heads/master | web/flask/lib/python2.7/site-packages/pip-1.5.4-py2.7.egg/pip/_vendor/requests/packages/charade/universaldetector.py | 200 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
import codecs
from .latin1prober import Latin1Prober # windows-1252
from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets
from .sbcsgroupprober import SBCSGroupProber # single-byte character sets
from .escprober import EscCharSetProber # ISO-2122, etc.
import re
MINIMUM_THRESHOLD = 0.20
ePureAscii = 0
eEscAscii = 1
eHighbyte = 2
class UniversalDetector:
def __init__(self):
self._highBitDetector = re.compile(b'[\x80-\xFF]')
self._escDetector = re.compile(b'(\033|~{)')
self._mEscCharSetProber = None
self._mCharSetProbers = []
self.reset()
def reset(self):
self.result = {'encoding': None, 'confidence': 0.0}
self.done = False
self._mStart = True
self._mGotData = False
self._mInputState = ePureAscii
self._mLastChar = b''
if self._mEscCharSetProber:
self._mEscCharSetProber.reset()
for prober in self._mCharSetProbers:
prober.reset()
def feed(self, aBuf):
if self.done:
return
aLen = len(aBuf)
if not aLen:
return
if not self._mGotData:
# If the data starts with BOM, we know it is UTF
if aBuf[:3] == codecs.BOM:
# EF BB BF UTF-8 with BOM
self.result = {'encoding': "UTF-8", 'confidence': 1.0}
elif aBuf[:4] in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
# FF FE 00 00 UTF-32, little-endian BOM
# 00 00 FE FF UTF-32, big-endian BOM
self.result = {'encoding': "UTF-32", 'confidence': 1.0}
elif aBuf[:4] == b'\xFE\xFF\x00\x00':
# FE FF 00 00 UCS-4, unusual octet order BOM (3412)
self.result = {
'encoding': "X-ISO-10646-UCS-4-3412",
'confidence': 1.0
}
elif aBuf[:4] == b'\x00\x00\xFF\xFE':
# 00 00 FF FE UCS-4, unusual octet order BOM (2143)
self.result = {
'encoding': "X-ISO-10646-UCS-4-2143",
'confidence': 1.0
}
elif aBuf[:2] == codecs.BOM_LE or aBuf[:2] == codecs.BOM_BE:
# FF FE UTF-16, little endian BOM
# FE FF UTF-16, big endian BOM
self.result = {'encoding': "UTF-16", 'confidence': 1.0}
self._mGotData = True
if self.result['encoding'] and (self.result['confidence'] > 0.0):
self.done = True
return
if self._mInputState == ePureAscii:
if self._highBitDetector.search(aBuf):
self._mInputState = eHighbyte
elif ((self._mInputState == ePureAscii) and
self._escDetector.search(self._mLastChar + aBuf)):
self._mInputState = eEscAscii
self._mLastChar = aBuf[-1:]
if self._mInputState == eEscAscii:
if not self._mEscCharSetProber:
self._mEscCharSetProber = EscCharSetProber()
if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
self.result = {
'encoding': self._mEscCharSetProber.get_charset_name(),
'confidence': self._mEscCharSetProber.get_confidence()
}
self.done = True
elif self._mInputState == eHighbyte:
if not self._mCharSetProbers:
self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
Latin1Prober()]
for prober in self._mCharSetProbers:
if prober.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': prober.get_charset_name(),
'confidence': prober.get_confidence()}
self.done = True
break
def close(self):
if self.done:
return
if not self._mGotData:
if constants._debug:
sys.stderr.write('no data received!\n')
return
self.done = True
if self._mInputState == ePureAscii:
self.result = {'encoding': 'ascii', 'confidence': 1.0}
return self.result
if self._mInputState == eHighbyte:
proberConfidence = None
maxProberConfidence = 0.0
maxProber = None
for prober in self._mCharSetProbers:
if not prober:
continue
proberConfidence = prober.get_confidence()
if proberConfidence > maxProberConfidence:
maxProberConfidence = proberConfidence
maxProber = prober
if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
self.result = {'encoding': maxProber.get_charset_name(),
'confidence': maxProber.get_confidence()}
return self.result
if constants._debug:
sys.stderr.write('no probers hit minimum threshhold\n')
for prober in self._mCharSetProbers[0].mProbers:
if not prober:
continue
sys.stderr.write('%s confidence = %s\n' %
(prober.get_charset_name(),
prober.get_confidence()))
|
lyft/incubator-airflow | refs/heads/master | airflow/contrib/hooks/mongo_hook.py | 5 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.mongo.hooks.mongo`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.mongo.hooks.mongo import MongoHook # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.mongo.hooks.mongo`.",
DeprecationWarning, stacklevel=2
)
|
benrudolph/commcare-hq | refs/heads/master | custom/ewsghana/forms.py | 2 | from django import forms
class InputStockForm(forms.Form):
product_id = forms.CharField(widget=forms.HiddenInput())
product = forms.CharField(widget=forms.HiddenInput(), required=False)
stock_on_hand = forms.IntegerField(min_value=0, required=False)
receipts = forms.IntegerField(min_value=0, initial=0, required=False)
units = forms.CharField(required=False)
monthly_consumption = forms.IntegerField(required=False, widget=forms.HiddenInput())
|
FHannes/intellij-community | refs/heads/master | python/testData/refactoring/extractmethod/ElseBody.before.py | 83 | def foo():
for arg in sys.argv[1:]:
try:
f = open(arg, 'r')
except IOError:
print('cannot open', arg)
else:
<selection>length = len(f.readlines()) #<---extract something from here
print("hi from else")</selection>
#anything else you need |
MichaelNedzelsky/intellij-community | refs/heads/master | python/testData/refactoring/rename/renameUpdatesImportReferences/after/baz.py | 336 | def f():
pass
|
Coburn37/js-beautify | refs/heads/master | python/cssbeautifier/__init__.py | 7 | #
# The MIT License (MIT)
# Copyright (c) 2007-2017 Einar Lielmanis, Liam Newman, and contributors.
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import print_function
import sys
import re
import copy
from jsbeautifier.__version__ import __version__
from cssbeautifier.css.options import BeautifierOptions
from cssbeautifier.css.beautifier import Beautifier
def default_options():
return BeautifierOptions()
def beautify(string, opts=default_options()):
b = Beautifier(string, opts)
return b.beautify()
def beautify_file(file_name, opts=default_options()):
if file_name == '-': # stdin
stream = sys.stdin
else:
stream = open(file_name)
content = ''.join(stream.readlines())
b = Beautifier(content, opts)
return b.beautify()
def usage(stream=sys.stdout):
print("cssbeautifier.py@" + __version__ + """
CSS beautifier (http://jsbeautifier.org/)
""", file=stream)
if stream == sys.stderr:
return 1
else:
return 0
|
yskmt/texteditpad | refs/heads/master | texteditpad.py | 1 | """Simple textbox editing widget with Emacs-like keybindings."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from builtins import chr
from builtins import range
from builtins import object
import sys
import curses
import curses.ascii
# from six.moves import range
import locale
locale.setlocale(locale.LC_ALL, '')
def rectangle(win, uly, ulx, lry, lrx):
"""Draw a rectangle with corners at the provided upper-left
and lower-right coordinates.
"""
win.vline(uly + 1, ulx, curses.ACS_VLINE, lry - uly - 1)
win.hline(uly, ulx + 1, curses.ACS_HLINE, lrx - ulx - 1)
win.hline(lry, ulx + 1, curses.ACS_HLINE, lrx - ulx - 1)
win.vline(uly + 1, lrx, curses.ACS_VLINE, lry - uly - 1)
win.addch(uly, ulx, curses.ACS_ULCORNER)
win.addch(uly, lrx, curses.ACS_URCORNER)
win.addch(lry, lrx, curses.ACS_LRCORNER)
win.addch(lry, ulx, curses.ACS_LLCORNER)
class Textbox(object):
"""Editing widget using the interior of a window object.
Supports the following Emacs-like key bindings:
Ctrl-A Go to left edge of window.
Ctrl-B Cursor left, wrapping to previous line if appropriate.
Ctrl-D Delete character under cursor.
Ctrl-E Go to right edge (stripspaces off) or end of line (stripspaces on).
Ctrl-F Cursor right, wrapping to next line when appropriate.
Ctrl-G Terminate, returning the window contents.
Ctrl-H Delete character backward.
Ctrl-J Terminate if the window is 1 line, otherwise insert newline.
Ctrl-K If line is blank, delete it, otherwise clear to end of line.
Ctrl-L Refresh screen.
Ctrl-N Cursor down; move down one line.
Ctrl-O Insert a blank line at cursor location.
Ctrl-P Cursor up; move up one line.
Move operations do nothing if the cursor is at an edge where the movement
is not possible. The following synonyms are supported where possible:
KEY_LEFT = Ctrl-B, KEY_RIGHT = Ctrl-F, KEY_UP = Ctrl-P, KEY_DOWN = Ctrl-N
KEY_BACKSPACE = Ctrl-h
"""
def __init__(self, win, stdscr=0, text='', n_sc=1,
insert_mode=True, resize_mode=False):
self.win = win
self.stdscr = stdscr
self.insert_mode = insert_mode
self.resize_mode = resize_mode
self.lastcmd = None
self.text = text.split('\n')
# virtual position of the beginning of the physical lines
self.lcount = [1]
self.ppos = (0, 0) # physical position of the cursor
self.vpos = (0, 0) # virtual position of the cursor
self.vptl = (0, 0) # virtual position of the top-left corner
self.n_sc = n_sc # how many unit to scroll each time
(self.maxy, self.maxx) = self._getmaxyx()
(self.height, self.width) = (self.maxy + 1, self.maxx + 1)
self.refresh()
win.keypad(1)
def _getmaxyx(self):
(maxy, maxx) = self.win.getmaxyx()
return maxy - 1, maxx - 1
def _addch(self, y, x, ch):
"self.win.addch fix: problem at lower-right corner"
# TODO: unicode support: ch.encode('utf-8')?
try:
self.win.addch(y, x, ch.encode())
except:
pass
def do_command(self, ch):
"Process a single editing command."
self.nlines = sum(self.lcount)
self.lastcmd = ch
if curses.ascii.isprint(ch):
if self._insert_printable_char(ch) == 0:
curses.beep()
elif ch == curses.KEY_RESIZE:
self.refresh()
elif ch == curses.ascii.SOH: # ^a
self.move_front()
elif ch == curses.ascii.ENQ: # ^e
self.move_end()
elif ch in (curses.ascii.STX, curses.KEY_LEFT): # ^b <-
self.move_left()
elif ch in (curses.ascii.ACK, curses.KEY_RIGHT): # ^f ->
self.move_right()
elif ch in (curses.ascii.SO, curses.KEY_DOWN): # ^n down
self.move_down()
elif ch in (curses.ascii.DLE, curses.KEY_UP): # ^p up
self.move_up()
elif ch == curses.ascii.NL: # ^j
if self.height == 1:
return 0
else:
if self.ppos[0] == self.maxy:
self.scroll(self.n_sc)
self.newline()
elif ch == curses.ascii.SI: # ^o
if self.ppos[0] == self.maxy:
self.scroll(self.n_sc)
self.newline()
elif ch == curses.ascii.EOT: # ^d
self.delete()
elif ch in (curses.ascii.BS, curses.KEY_BACKSPACE, curses.ascii.DEL):
if (self.vpos[0] == 0) and (self.vpos[1] == 0):
curses.beep()
else:
# move one left and del
self.move_left()
self.delete()
elif ch == curses.ascii.VT: # ^k
if len(self.text[self.vpos[0]]) == 0:
# if there is nothing in the vline
self.delete()
else:
self.clear_right()
elif ch == curses.ascii.FF: # ^l
self.refresh()
elif ch == curses.ascii.HT: # ^i
self.insert_mode = not self.insert_mode
elif ch == curses.ascii.BEL: # ^g
return 0
return 1
def _insert_printable_char(self, ch):
trailingstr = self.text[self.vpos[0]][self.vpos[1]:]
# overwrite mode
if self.insert_mode == False:
self.text[self.vpos[0]]\
= self.text[self.vpos[0]][:self.vpos[1]] + chr(ch) \
+ trailingstr[1:]
self._addch(self.ppos[0], self.ppos[1], chr(ch).encode())
self.ppos = self.win.getyx()
self.vpos = (self.vpos[0], self.vpos[1]+1)
return 1
# update text
self.text[self.vpos[0]]\
= self.text[self.vpos[0]][:self.vpos[1]] + chr(ch) \
+ trailingstr
# update line count
self.lcount[self.vpos[0]] = len(
self.text[self.vpos[0]]) // self.width + 1
self.nlines = sum(self.lcount)
# redraw!
if self.ppos[0] == self.maxy and self.ppos[1] == self.maxx:
self.scroll(self.n_sc)
(backy, backx) = self.win.getyx()
else:
(backy, backx) = self.win.getyx()
self.redraw_vlines(self.vpos, self.ppos)
# update cursor position
if backx + 1 == self.width:
self.ppos = (backy + 1, 0)
else:
self.ppos = (backy, backx + 1)
self.vpos = (self.vpos[0], self.vpos[1] + 1)
self.win.move(*self.ppos)
return 1
def redraw_vlines(self, vpos, ppos):
"Redraw vlines starting from vpos to the end at ppos"
# clear the redrawn part
for i in range(ppos[1], self.width):
self._addch(ppos[0], i, ' ')
for l in range(ppos[0] + 1, self.height):
self.clear_line(l)
# now draw each characters
ln = ppos[0]
cn = ppos[1] % self.width
# first vline: continuation from the existing vline
for j in range(vpos[1], len(self.text[vpos[0]])):
self._addch(ln, cn, self.text[vpos[0]][j])
if cn + 1 == self.width:
ln += 1
if ln == self.height:
break
cn = (cn + 1) % self.width
cn = 0
ln += 1
# the rest of the vlines
for i in range(vpos[0] + 1, len(self.text)):
for j in range(len(self.text[i])):
if ln == self.height:
return
self._addch(ln, cn, self.text[i][j])
if cn + 1 == self.width:
ln += 1
cn = (cn + 1) % self.width
cn = 0
ln += 1
return
def move_front(self):
self.ppos = (self.ppos[0], 0)
self.vpos = (
self.vpos[0], self.vpos[1] // self.width * self.width)
self.win.move(self.ppos[0], self.ppos[1])
def move_end(self):
# within a vline
if (self.vpos[1] // self.width + 1) < self.lcount[self.vpos[0]]:
self.ppos = (self.ppos[0], self.maxx)
self.vpos = (self.vpos[0],
int((self.vpos[1] // self.width + 1) * self.width - 1))
# at the end of vline
else:
self.ppos = (self.ppos[0],
len(self.text[self.vpos[0]]) % self.width)
self.vpos = (self.vpos[0], len(self.text[self.vpos[0]]))
self.win.move(self.ppos[0], self.ppos[1])
def move_left(self):
if self.ppos[1] > 0:
self.ppos = (self.ppos[0], self.ppos[1] - 1)
self.vpos = (self.vpos[0], self.vpos[1] - 1)
# no space to move
elif self.vpos[0] == 0 and self.vpos[1] == 0:
curses.beep()
return
else:
if self.ppos[0] == 0:
self.scroll(-self.n_sc)
# move up to previous vline
if self.vpos[1] == 0:
ll = len(self.text[self.vpos[0] - 1])
self.vpos = (self.vpos[0] - 1, ll)
self.ppos = (self.ppos[0] - 1, ll % (self.width))
# move up within the same vline
else:
self.vpos = (self.vpos[0], self.vpos[1] - 1)
self.ppos = (self.ppos[0] - 1, self.maxx)
self.win.move(self.ppos[0], self.ppos[1])
def move_right(self):
ll = len(self.text[self.vpos[0]])
if (self.ppos[1] < self.maxx) and (self.vpos[1] < ll):
self.ppos = (self.ppos[0], self.ppos[1] + 1)
self.vpos = (self.vpos[0], self.vpos[1] + 1)
# no space to move
elif ((self.vpos[0] + 1) == len(self.text)) \
and (self.vpos[1] == ll):
curses.beep()
return
else:
if self.ppos[0] == self.maxy:
self.scroll(self.n_sc)
# move down to next vline
if self.vpos[1] == ll:
self.vpos = (self.vpos[0] + 1, 0)
self.ppos = (self.ppos[0] + 1, 0)
# move down within the same vline
else:
self.vpos = (self.vpos[0], self.vpos[1] + 1)
self.ppos = (self.ppos[0] + 1, 0)
self.win.move(self.ppos[0], self.ppos[1])
def move_down(self):
# no more space to move down
if (self.vpos[0] + 1) == len(self.text)\
and (self.vpos[1] // self.width + 1) == self.lcount[self.vpos[0]]:
curses.beep()
return
else:
# cursor at the bottom: scroll down
if self.ppos[0] == self.maxy:
self.scroll(self.n_sc)
# within the same vline
if (self.vpos[1] // self.width + 1) < self.lcount[self.vpos[0]]:
ll = len(self.text[self.vpos[0]])
vpos1 = min(self.vpos[1] + self.width, ll)
self.vpos = (
self.vpos[0], vpos1)
self.ppos = (self.ppos[0] + 1,
vpos1 % self.width)
# move to next vline
else:
ll = len(self.text[self.vpos[0] + 1])
vpos1 = min(self.vpos[1] % self.width, ll)
self.vpos = (self.vpos[0] + 1,
vpos1)
self.ppos = (self.ppos[0] + 1, vpos1 % self.width)
self.win.move(self.ppos[0], self.ppos[1])
def move_up(self):
# cursor at the top
if self.ppos[0] == 0:
if self.vpos[0] == 0 and self.vpos[1] < self.width:
curses.beep()
return
else:
self.scroll(-self.n_sc)
# move to previous vline
if self.vpos[1] < self.width:
ll = len(self.text[self.vpos[0] - 1])
vpos1 = min(int((ll // self.width) * self.width) + self.vpos[1],
ll)
self.vpos = (self.vpos[0] - 1, vpos1)
self.ppos = (self.ppos[0] - 1,
min(self.ppos[1], ll % self.width))
# within the same vline
else:
self.vpos = (self.vpos[0], self.vpos[1] - self.width)
self.ppos = (self.ppos[0] - 1, self.ppos[1])
self.win.move(self.ppos[0], self.ppos[1])
def scroll(self, n):
"Scroll down/up by n unit (positive for down)"
# scroll up to previous vline
if (self.vptl[1] + self.width * n) < 0:
self.vptl = (self.vptl[0] + n,
len(self.text[self.vptl[0] + n]) / self.width * self.width)
# scroll up/down within the same vline
elif (self.vptl[1] + self.width * n) <= len(self.text[self.vptl[0]]):
self.vptl = (self.vptl[0], self.vptl[1] + self.width * n)
# scroll down to next vline
else:
self.vptl = (self.vptl[0] + n, 0)
self.ppos = (self.ppos[0] - n, self.ppos[1])
self.redraw_vlines(self.vptl, (0, 0))
self.win.move(*self.ppos)
def delat(self, vpos):
"Delete chracter at position vpos"
# del within a vline
if vpos[1] < len(self.text[vpos[0]]):
self.text[vpos[0]]\
= self.text[vpos[0]][:vpos[1]]\
+ self.text[vpos[0]][vpos[1] + 1:]
# del at the end of a line
else:
self.text[vpos[0]]\
+= self.text[vpos[0] + 1]
self.text.pop(vpos[0] + 1)
self.lcount.pop(vpos[0] + 1)
self.lcount[vpos[0]] = len(self.text[vpos[0]]) // self.width + 1
self.nlines = sum(self.lcount)
for i in range(self.ppos[1], self.width):
self._addch(self.ppos[0], i, ' ')
self.redraw_vlines(vpos, self.ppos)
def delete(self):
if (self.vpos[0] == len(self.text) - 1)\
and (self.vpos[1] == len(self.text[self.vpos[0]])):
curses.beep()
else:
backy, backx = self.ppos
self.delat(self.vpos)
self.ppos = (backy, backx)
self.win.move(self.ppos[0], self.ppos[1])
def clear_line(self, ln):
"Clear one line at the line number ln"
for i in range(self.width):
self._addch(ln, i, ' ')
def clear_right(self):
"Clear right side of the cursor."
backy, backx = self.ppos
# update text
self.text[self.vpos[0]]\
= self.text[self.vpos[0]][:self.vpos[1]]
# update line count
self.lcount[self.vpos[0]] = len(
self.text[self.vpos[0]]) // self.width + 1
self.nlines = sum(self.lcount)
# redraw the vlines
self.redraw_vlines(self.vpos, self.ppos)
# set the cursor back
self.ppos = (backy, backx)
self.win.move(self.ppos[0], self.ppos[1])
def newline(self):
"Insert a new line. Move lines below by one."
# update texts
self.text.insert(
self.vpos[0] + 1, self.text[self.vpos[0]][self.vpos[1]:])
self.text[self.vpos[0]] = self.text[
self.vpos[0]][:self.vpos[1]]
# update the line counts
self.lcount.insert(self.vpos[0] + 1,
len(self.text[self.vpos[0] + 1]) // self.width + 1)
self.lcount[self.vpos[0]] = len(
self.text[self.vpos[0]]) // self.width + 1
self.nlines = sum(self.lcount)
# clear the right part of the pline
for c in range(self.ppos[1], self.width):
self._addch(self.ppos[0], c, ' ')
# move p- and v- cursors
self.ppos = (self.ppos[0] + 1, 0)
backy, backx = self.ppos
self.vpos = (self.vpos[0] + 1, 0)
# redraw the bottom lines
self.redraw_vlines(self.vpos, self.ppos)
# move the cursor position back
self.ppos = (backy, backx)
self.win.move(*self.ppos)
def refresh(self):
# NOTE: texteditpad does not take care of the region outside
# the Textbox. You need to manually erase characters there
if self.resize_mode:
# resize/move window to fit to the new screen size
# self.stdscr.clear()
# self.stdscr.refresh()
ymax, xmax = self.win.getmaxyx()
self.height, self.width = (ymax + 1, xmax + 1)
self.vpos = (0, 0)
self.ppos = (0, 0)
self.vptl = (0, 0)
self.lcount = [1] * len(self.text)
for i in range(len(self.text)):
self.lcount[i] = len(self.text[i]) // self.width + 1
self.nlines = sum(self.lcount)
# ymax, xmax = self.stdscr.getmaxyx()
# ncols, nlines = xmax - 5, ymax - 3
# self.win.resize(nlines, ncols)
# uly, ulx = 2, 2
# self.win.mvwin(uly, ulx)
self.win.refresh()
# recalcualte the line count
(self.maxy, self.maxx) = self._getmaxyx()
(self.height, self.width) = (self.maxy + 1, self.maxx + 1)
self.lcount = list([(len(x) // self.width + 1) for x in self.text])
self.nlines = sum(self.lcount)
# redraw the texteditbox
self.redraw_vlines(self.vptl, (0, 0))
# replace the cursor
self.win.move(*self.ppos)
def edit(self, validate=None, debug_mode=False):
"Edit in the widget window and collect the results."
while 1:
# unicode support: currently disabled
if sys.version_info > (3,3):
# ch = self.win.get_wch()
ch = self.win.getch()
else:
ch = self.win.getch()
if validate:
ch = validate(ch)
if not ch:
continue
if not self.do_command(ch):
break
if debug_mode:
(backy, backx) = self.win.getyx()
maxy, maxx = self._getmaxyx()
self.win.addstr(maxy, 0, ' ' * maxx)
self.win.addstr(maxy, 0, '%d %d %d %d %d'
% (ch, self.vpos[0], self.vpos[1],
self.ppos[0], self.ppos[1]))
# self.win.addstr(maxy, 0, str(self.lnbg))
self.win.refresh()
self.win.move(backy, backx)
return '\n'.join(self.text)
class EscapePressed(Exception):
pass
def validate(ch):
"Filters characters for special key sequences"
if ch == curses.ascii.ESC:
raise EscapePressed
# Fix backspace for iterm
if ch == curses.ascii.DEL:
ch = curses.KEY_BACKSPACE
return ch
if __name__ == '__main__':
def test_editbox(stdscr):
with open("texteditpad.py", "r") as testfile:
testtext = testfile.read()
curses.use_default_colors()
ymax, xmax = stdscr.getmaxyx()
# ncols, nlines = xmax - 5, ymax - 3
ncols, nlines = 40, 20
uly, ulx = 2, 2
stdscr.addstr(uly - 2, ulx, "Use Ctrl-G to end editing.")
win = curses.newwin(nlines, ncols, uly, ulx)
rectangle(stdscr, uly - 1, ulx - 1, uly + nlines, ulx + ncols)
stdscr.refresh()
try:
out = Textbox(win, stdscr=stdscr, text='',
resize_mode=True, insert_mode=True)\
.edit(validate=validate, debug_mode=False)
except EscapePressed:
out = None
return out
text = curses.wrapper(test_editbox)
print('Contents of text box:')
print(text)
|
titienmiami/mmc.repository | refs/heads/master | plugin.video.tvalacarta/tvalacarta/channels/meristation.py | 1 | # -*- coding: iso-8859-1 -*-
#------------------------------------------------------------
# tvalacarta - XBMC Plugin
# Canal para Meristation
# http://blog.tvalacarta.info/plugin-xbmc/tvalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
import sys
import xbmc
import xbmcgui
import xbmcplugin
from core import scrapertools
import binascii
from platformcode.xbmc import xbmctools
try:
pluginhandle = int( sys.argv[ 1 ] )
except:
pluginhandle = ""
xbmc.output("[meristation.py] init")
DEBUG = True
CHANNELNAME = "Meristation"
CHANNELCODE = "meristation"
def mainlist(params,url,category):
xbmc.output("[meristation.py] mainlist")
# Añade al listado de XBMC
xbmctools.addnewfolder( CHANNELCODE , "ultimosvideos" , CHANNELNAME , "Últimos vídeos" , "http://www.meristation.com/v3/GEN_videos.php" , "" , "" )
xbmctools.addnewfolder( CHANNELCODE , "listaconsolas" , CHANNELNAME , "Listado por consola", "http://www.meristation.com/v3/GEN_videos.php" , "" , "" )
xbmctools.addnewfolder( CHANNELCODE , "listaletras" , CHANNELNAME , "Listado alfabético" , "http://www.meristation.com/v3/GEN_videos.php" , "" , "" )
xbmctools.addnewfolder( CHANNELCODE , "listageneros" , CHANNELNAME , "Listado por género" , "http://www.meristation.com/v3/GEN_videos.php" , "" , "" )
xbmctools.addnewfolder( CHANNELCODE , "search" , CHANNELNAME , "Buscar" , "" , "" , "" )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def search(params,url,category):
xbmc.output("[meristation.py] search")
keyboard = xbmc.Keyboard('')
keyboard.doModal()
if (keyboard.isConfirmed()):
tecleado = keyboard.getText()
if len(tecleado)>0:
#convert to HTML
tecleado = tecleado.replace(" ", "+")
searchUrl = "http://www.meristation.com/v3/resultado_busqueda.php?busca="+tecleado+"&tipo=10&palabras=1&pic=GEN"
searchresults(params,searchUrl,category)
def searchresults(params,url,category):
xbmc.output("[meristation.py] searchresults")
# Descarga la página
xbmc.output("[meristation.py] url="+url)
data = scrapertools.downloadpagewithcookies(url)
#xbmc.output(data)
# Extrae las entradas (carpetas)
'''
onMouseOut="this.style.background='#ffffff'">
<td class="tabla_borde_down" valign="top" width="250">
<font face="Arial, Helvetica, sans-serif" size="2">
<a href="des_videos.php?pic=WII&idj=cw45ba12c3a8156&COD=cw4b002ff355067" class="mslink9">
<b>MeriStation TV Noticias 3x11</b></a></font>
<font face="Arial, Helvetica, sans-serif" size="2">
<a href="WII_portada.php" class="mslink8">
<font color="#3366CC"><b>WII</b></font></a><span class="mstrucos"></span>
<br>
<a href="empresa.php?pic=GEN&id=cw428d365050c81" class="mslink9">
Nintendo</a></font>
<font face="Arial, Helvetica, sans-serif" size="2"></font>
<font face="Arial, Helvetica, sans-serif" size="2">
</font>
</td>
<td class="tabla_borde_down" valign="top" width="100">
<font face="Arial, Helvetica, sans-serif" size="2">
<a href="GEN_.php" class="mslink9">
Simulador</a></font>
<font face="Arial, Helvetica, sans-serif" size="2"></font><br>
<span class=fecha>
16/11/09 </span>
</td>
<td class="tabla_borde_down" valign="top" width="200">
<a href="shopping.php?idj=cw45ba12c3a8156" target="_blank">
<img src="imgs/icono_busqueda_carrito1.gif" width="22" height="20" alt="Comprar" border="0"></a>
<a href="listado_imagenes.php?pic=WII&idj=cw45ba12c3a8156">
<img src="imgs/icono_busqueda_imagenes.gif" width="22" height="20" alt="Galería de Imágenes" border="0"></a>
<a href="des_avances.php?pic=WII&pes=1&idj=cw45ba12c3a8156" >
<img src="imgs/icono_busqueda_avances.gif" width="22" height="20" alt="Avance" border="0"></a>
<a href="des_videos.php?pic=WII&pes=1&idj=cw45ba12c3a8156" >
<img src="imgs/icono_busqueda_videos.gif" width="22" height="20" alt="Vídeos" border="0"></a>
</td>
<td class="tabla_borde_down" width="50" valign="top" align="center">
<font face="Arial, Helvetica, sans-serif" size="2">
<b>--</b></a></font>
</td>
'''
patron = '<tr onMouseOver="this.style.background =\'\' "; this.style.cursor = \'hand\'"(.*?)</tr>'
matches = re.compile(patron,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
for match in matches:
patron2 = '<td class="tabla_borde_down" valign="top" width="250">[^<]+'
patron2 += '<font face="Arial, Helvetica, sans-serif" size="2">[^<]+'
patron2 += '<a href="([^"]+)" class="mslink9">[^<]+'
patron2 += '<b>([^<]+)</b></a></font>[^<]+'
patron2 += '<font face="Arial, Helvetica, sans-serif" size="2">[^<]+'
patron2 += '<a href="[^"]+" class="mslink8">[^<]+'
patron2 += '<font color="[^"]+"><b>([^<]+)</b></font></a><span class="mstrucos"></span>[^<]+'
patron2 += '<br>[^<]+'
patron2 += '<a href="empresa.php[^"]+" class="mslink9">([^<]+)</a></font>[^<]+'
matches2 = re.compile(patron2,re.DOTALL).findall(match)
for match2 in matches2:
# Atributos del vídeo
scrapedtitle = match2[1].strip()+" ["+match2[2].strip()+"] ["+match2[3].strip()+"]"
scrapedurl = urlparse.urljoin(url,match2[0])
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): xbmc.output("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
xbmctools.addnewvideo( CHANNELCODE , "play" , category , "Directo" , scrapedtitle , scrapedurl , scrapedthumbnail , scrapedplot )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=int( sys.argv[ 1 ] ), category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=int( sys.argv[ 1 ] ), succeeded=True )
def letraresults(params,url,category):
xbmc.output("[meristation.py] letraresults")
# Descarga la página
xbmc.output("[meristation.py] url="+url)
data = scrapertools.downloadpagewithcookies(url)
#xbmc.output(data)
# Extrae las entradas (carpetas)
'''
<tr>
<td valign="top"><font face="Arial, Helvetica, sans-serif" size="2">
<a href="des_videos.php?id=cw4b30babc22255&idj=cw4a697e97d6fe4&pic=GEN" class="mslink9"><b>Army Of Two: 40th Day</b></a></font>
<font face="Arial, Helvetica, sans-serif" size="2">
<a href="PS3_portada.php" class="mslink8">
<font color="#999999"><b>PS3</b></font></a><span class="mstrucos"></span>
<br>
<a href="empresa.php?pic=GEN&id=cw45772dad6567c" class="mslink9">
EA Montreal</a></font>
<font face="Arial, Helvetica, sans-serif" size="2"></font>
<font face="Arial, Helvetica, sans-serif" size="2">
</font></td>
<td valign="top" width="100">
<font face="Arial, Helvetica, sans-serif" size="2">
<a href="GEN_accion.php" class="mslink9">
Acción</a></font><font face="Arial, Helvetica, sans-serif" size="2"></font><br>
<span class=fecha>20/12/09</span> </td>
<td width="50" valign="top" align="center">
<font face="Arial, Helvetica, sans-serif" size="2">
<span class="mslink9"><b>1232</b></span></font></td>
</tr>
'''
patron = '<tr>[^<]+'
patron += '<td valign="top"><font face="Arial, Helvetica, sans-serif" size="2">[^<]+'
patron += '<a href="([^"]+)" class="mslink9"><b>([^<]+)</b></a></font>[^<]+'
patron += '<font face="Arial, Helvetica, sans-serif" size="2">[^<]+'
patron += '<a href="[^"]+" class="mslink8">[^<]+'
patron += '<font color="[^"]+"><b>([^<]+)</b></font></a><span class="mstrucos"></span>[^<]+'
patron += '<br>[^<]+'
patron += '<a href="[^"]+" class="mslink9">([^<]+)</a></font>[^<]+'
patron += '<font face="Arial, Helvetica, sans-serif" size="2"></font>[^<]+'
patron += '<font face="Arial, Helvetica, sans-serif" size="2"> [^<]+'
patron += '</font></td>[^<]+'
patron += '<td valign="top" width="100">[^<]+'
patron += '<font face="Arial, Helvetica, sans-serif" size="2">[^<]+'
patron += '<a href="[^"]+" class="mslink9">([^<]+)</a></font><font face="Arial, Helvetica, sans-serif" size="2"></font><br>[^<]+'
patron += '<span class=fecha>([^<]+)</span>'
matches = re.compile(patron,re.DOTALL).findall(data)
for match in matches:
# Atributos del vídeo
scrapedtitle = match[1].strip()+" ["+match[2].strip()+"] ["+match[3].strip()+"] ["+match[4].strip()+"]"
scrapedurl = urlparse.urljoin(url,match[0])
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): xbmc.output("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
xbmctools.addnewvideo( CHANNELCODE , "play" , category , "Directo" , scrapedtitle , scrapedurl , scrapedthumbnail , scrapedplot )
patron = '<a href="([^"]+)" class="mslink9">[^<]+<b>Siguiente</b></a></font> <img src="imgs/flecha_derecha.gif" width="4" height="6">'
matches = re.compile(patron,re.DOTALL).findall(data)
for match in matches:
# Atributos del vídeo
scrapedtitle = "Página siguiente"
scrapedurl = urlparse.urljoin(url,match)
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): xbmc.output("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
xbmctools.addnewfolder( CHANNELCODE , "letraresults" , CHANNELNAME , scrapedtitle , scrapedurl , scrapedthumbnail , scrapedplot )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=int( sys.argv[ 1 ] ), category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=int( sys.argv[ 1 ] ), succeeded=True )
def ultimosvideos(params,url,category):
xbmc.output("[meristation.py] ultimosvideos")
# Descarga la página
xbmc.output("[meristation.py] url="+url)
data = scrapertools.downloadpagewithcookies(url)
#xbmc.output(data)
# Ultimos vídeos
xbmc.output("[meristation.py] recientes")
'''
<td valign="top" align="center">
<a href="des_videos.php?id=cw4b39e7ef51a6f&pic=PC&idj=cw49a26c7a07937" class="mslink9news"><b>Mass Effect 2 </b></a> <span class="mslink8">|</span>
<a href="PC_portada.php" class="mslink8"><b><font color="#990066">PC</font></b></a><span class="fecha"></span>
<br>
<div class=fecha> 29 Dic 2009 | <font face="Arial, Helvetica, sans-serif" size="2" color="#000000">
<span class="fecha"><a href="PC_rol.php" class="mslink8news">
<font color="#666666">Rol</font></a></span></font>
</div>
</td>
'''
patron = '<td valign="top" align="center">[^<]+'
patron += '<a href="([^"]+)" class="mslink9news"><b>([^<]+)</b></a> <span class="mslink8">.</span>[^<]+'
patron += '<a href="[^"]+" class="mslink8"><b><font color="[^"]+">([^<]+)</font></b></a><span class="fecha"></span>[^<]+'
patron += '<br>[^<]+'
patron += '<div class=fecha>([^\|]+)\| <font face="Arial, Helvetica, sans-serif" size="2" color="#000000">[^<]+'
patron += '<span class="fecha"><a href="[^"]+" class="mslink8news">[^<]+'
patron += '<font color="[^"]+">([^<]+)</font>'
matches = re.compile(patron,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
for match in matches:
# Atributos del vídeo
scrapedtitle = match[1]+" ("+match[2]+")"+" ("+match[3].strip()+")"+" ("+match[4]+")"
scrapedurl = urlparse.urljoin(url,match[0])
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): xbmc.output("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
xbmctools.addnewvideo( CHANNELCODE , "play" , category , "Directo" , scrapedtitle , scrapedurl , scrapedthumbnail , scrapedplot )
# Ultimos vídeos
xbmc.output("[meristation.py] ultimos")
'''
<tr valign="middle" bgcolor="#F2F2F2">
<td width="15" valgin="middle"><font face="Arial, Helvetica, sans-serif" size="2"><img src="imgs/trucos/top10_mantiene.gif" width="11" height="10"></font></td>
<td width="55" valign="middle"><span class=fecha>11/12/09</span></td>
<td width="230" valign="middle"><a href="des_videos.php?id=cw4b27546152101&pic=360&idj=cw4a1fa3d144f98" class="mslink9news"><b>BRINK, Ciudad Parte 1</b></a></td>
<td width="55" valign="middle">
<div align="center"><font face="Arial, Helvetica, sans-serif" size="2" color="#000000"><span class="fecha"><a href="360_portada.php" class="mslink8"><font color="#99CC00"><b>360</b></font></a></span></font> </div>
</td>
<td width="75" valign="middle"><font face="Arial, Helvetica, sans-serif" size="2" color="#000000"><a href="360_accion.php" class="mslink8news">Acción</a></font></td>
</tr>
'''
patron = '<tr valign="middle" bgcolor="[^"]+">[^<]+'
patron += '<td width="15" valgin="middle"><font face="Arial, Helvetica, sans-serif" size="2"><img[^>]+></font></td>[^<]+'
patron += '<td width="55" valign="middle"><span class=fecha>([^<]+)</span></td>[^<]+'
patron += '<td width="230" valign="middle"><a href="([^"]+)" class="mslink9news"><b>([^<]+)</b></a></td>[^<]+'
patron += '<td width="55" valign="middle"> [^<]+'
patron += '<div align="center"><font face="Arial, Helvetica, sans-serif" size="2" color="#000000"><span class="fecha"><a href="[^"]+" class="mslink8"><font color="[^"]+"><b>([^<]+)</b></font></a></span></font> </div>[^<]+'
patron += '</td>[^<]+'
patron += '<td width="75" valign="middle"><font face="Arial, Helvetica, sans-serif" size="2" color="#000000"><a href="[^"]+" class="mslink8news">([^<]+)</a></font></td>[^<]+'
matches = re.compile(patron,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
for match in matches:
# Atributos del vídeo
scrapedtitle = match[2]+" ("+match[0]+")"+" ("+match[3]+")"+" ("+match[4]+")"
scrapedurl = urlparse.urljoin(url,match[1])
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): xbmc.output("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
xbmctools.addnewvideo( CHANNELCODE , "play" , category , "Directo" , scrapedtitle , scrapedurl , scrapedthumbnail , scrapedplot )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=int( sys.argv[ 1 ] ), category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=int( sys.argv[ 1 ] ), succeeded=True )
def listaletras(params,url,category):
xbmc.output("[meristation.py] listaalfabetica")
# --------------------------------------------------------
# Descarga la página
# --------------------------------------------------------
data = scrapertools.cachePage(url)
#xbmc.output(data)
# --------------------------------------------------------
# Extrae las categorias (carpetas)
# --------------------------------------------------------
patron = '<a href="([^"]+)" class="mr_link11negro">([^<]+)</a>'
matches = re.compile(patron,re.DOTALL).findall(data)
if DEBUG: scrapertools.printMatches(matches)
for match in matches:
scrapedtitle = match[1]
scrapedurl = urlparse.urljoin(url,match[0])
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): xbmc.output("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
#addvideo( scrapedtitle , scrapedurl , category )
xbmctools.addnewfolder( CHANNELCODE , "letraresults" , CHANNELNAME , scrapedtitle , scrapedurl , scrapedthumbnail, scrapedplot )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def listageneros(params,url,category):
xbmc.output("[meristation.py] listaporgenero")
# --------------------------------------------------------
# Descarga la página
# --------------------------------------------------------
data = scrapertools.cachePage(url)
#xbmc.output(data)
# --------------------------------------------------------
# Extrae las categorias (carpetas)
# --------------------------------------------------------
patron = '<option value="([^"]+)">([^<]+)</option>'
matches = re.compile(patron,re.DOTALL).findall(data)
if DEBUG: scrapertools.printMatches(matches)
for match in matches:
scrapedtitle = match[1].replace(" ","").strip()
scrapedurl = urlparse.urljoin(url,match[0])
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): xbmc.output("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
#addvideo( scrapedtitle , scrapedurl , category )
xbmctools.addnewfolder( CHANNELCODE , "letraresults" , CHANNELNAME , scrapedtitle , scrapedurl , scrapedthumbnail, scrapedplot )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def listaconsolas(params,url,category):
xbmc.output("[meristation.py] listaporconsola")
url = 'http://www.meristation.com/v3/GEN_videos.php'
# --------------------------------------------------------
# Descarga la página
# --------------------------------------------------------
data = scrapertools.cachePage(url)
#xbmc.output(data)
# --------------------------------------------------------
# Extrae las categorias (carpetas)
# --------------------------------------------------------
patron = '<a href="([^"]+)" class="mslink8">[^<]+<font color="[^"]+"><b>([^<]+)</b></font></a><span class="mstrucos">'
matches = re.compile(patron,re.DOTALL).findall(data)
if DEBUG: scrapertools.printMatches(matches)
for match in matches:
scrapedtitle = match[1]
scrapedurl = urlparse.urljoin(url,match[0])
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): xbmc.output("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
#addvideo( scrapedtitle , scrapedurl , category )
xbmctools.addnewfolder( CHANNELCODE , "detalleconsola" , CHANNELNAME , scrapedtitle , scrapedurl , scrapedthumbnail, scrapedplot )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def detalleconsola(params,url,category):
xbmc.output("[meristation.py] mainlist")
title = urllib.unquote_plus( params.get("title") )
# Añade al listado de XBMC
xbmctools.addnewfolder( CHANNELCODE , "ultimosvideos" , CHANNELNAME , "Últimos vídeos "+title , url , "" , "" )
xbmctools.addnewfolder( CHANNELCODE , "listaletras" , CHANNELNAME , "Listado alfabético "+title , url , "" , "" )
xbmctools.addnewfolder( CHANNELCODE , "listageneros" , CHANNELNAME , "Listado por género "+title , url , "" , "" )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def play(params,url,category):
xbmc.output("[meristation.py] play")
title = unicode( xbmc.getInfoLabel( "ListItem.Title" ), "utf-8" )
thumbnail = urllib.unquote_plus( params.get("thumbnail") )
plot = unicode( xbmc.getInfoLabel( "ListItem.Plot" ), "utf-8" )
server = "Directo"
# URL de detalle
# http://www.meristation.com/v3/des_videos.php?pic=WII&idj=cw49944ba621067&COD=cw4a8d04e8e355d
# URL con el vídeo
# http://www.meristation.com/v3/des_videos.php?id=cw4a8d04e8e355d&c=1&pic=WII&idj=cw49944ba621067
# URL descargar vídeo
# http://www.meristation.com/v3/des_videos.php?id=cw4a8d04e8e355d&c=1&pic=WII&idj=cw49944ba621067
# XML
# http://www.meristation.com/v3/video_player.php?vid=cw48fc48c0d0da9&res=alta&format=xml&version=1.5.002
# Extrae el código del vídeo
xbmc.output("[meristation.py] url="+url)
patron = 'http\://www.meristation.com/v3/des_videos.php.*?\&COD\=([^$]+)$'
matches = re.compile(patron,re.DOTALL).findall(url)
scrapertools.printMatches(matches)
if len(matches)==0:
patron = 'id\=([^\&]+)\&'
matches = re.compile(patron,re.DOTALL).findall(url)
scrapertools.printMatches(matches)
if len(matches)==0:
patron = 'http\://www.meristation.com/v3/des_videos.php.*?\&id\=([^$]+)$'
matches = re.compile(patron,re.DOTALL).findall(url)
scrapertools.printMatches(matches)
if len(matches)==0:
xbmctools.alertnodisponible()
return
# Descarga la página
xbmc.output("[meristation.py] vid="+matches[0])
url = 'http://www.meristation.com/v3/video_player.php?id='+matches[0]+'&format=xml'
xbmc.output("[meristation.py] url="+url)
data = scrapertools.downloadpagewithcookies(url)
xbmc.output(data[:200])
# Extrae las entradas (carpetas)
patron = '<location>([^<]+)</location>'
matches = re.compile(patron,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
if len(matches)==0:
return
url = matches[0]
url = url.replace(" ","%20")
xbmctools.playvideo(CHANNELCODE,server,url,category,title,thumbnail,plot)
|
joebowen/ChannelWorm | refs/heads/master | channelworm/ion_channel/urls.py | 1 | """channelworm URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from ion_channel import views
from ion_channel.views import *
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^experiment/create/$', ExperimentCreate.as_view(), name='experiment-create'),
url(r'^home$', ExperimentList.as_view(), name='home'),
url(r'^experiment$', ExperimentList.as_view(), name='experiment-index'),
url(r'^experiment/update/(?P<pk>[0-9]+)$', ExperimentUpdate.as_view(), name='experiment-update'),
url(r'^experiment/delete/(?P<pk>[0-9]+)$', ExperimentDelete.as_view(), name='experiment-delete'),
url(r'^channel_model$', IonChannelList.as_view(), name='ion-channel-index'),
url(r'^channel_model/create$', IonChannelCreate.as_view(), name='ion-channel-create'),
url(r'^channel_model/update/(?P<pk>[0-9]+)$', IonChannelUpdate.as_view(), name='ion-channel-update'),
url(r'^channel_model/delete/(?P<pk>[0-9]+)$', IonChannelDelete.as_view(), name='ion-channel-delete'),
url(r'^patch_clamp$', PatchClampList.as_view(), name='patch-clamp-index'),
url(r'^patch_clamp/create$', PatchClampCreate.as_view(), name='patch-clamp-create'),
url(r'^patch_clamp/update/(?P<pk>[0-9]+)$', PatchClampUpdate.as_view(), name='patch-clamp-update'),
url(r'^patch_clamp/delete/(?P<pk>[0-9]+)$', PatchClampDelete.as_view(), name='patch-clamp-delete'),
url(r'^graph$', GraphList.as_view(), name='graph-index'),
url(r'^graph/create$', GraphCreate.as_view(), name='graph-create'),
url(r'^graph/update/(?P<pk>[0-9]+)$', GraphUpdate.as_view(), name='graph-update'),
url(r'^graph/delete/(?P<pk>[0-9]+)$', GraphDelete.as_view(), name='graph-delete'),
url(r'^graph_data/(?P<graph_id>[0-9]+)$', GraphDataList.as_view(), name='graph-data-index'),
url(r'^graph_data/create$', save_graph_data, name='graph-data-create'),
url(r'^graph_data/delete/(?P<graph_id>[0-9]+)/(?P<pk>[0-9]+)/$', GraphDataDelete.as_view(), name='graph-data-delete'),
]
|
ihipi/Sick-Beard | refs/heads/development | lib/hachoir_core/field/integer.py | 90 | """
Integer field classes:
- UInt8, UInt16, UInt24, UInt32, UInt64: unsigned integer of 8, 16, 32, 64 bits ;
- Int8, Int16, Int24, Int32, Int64: signed integer of 8, 16, 32, 64 bits.
"""
from lib.hachoir_core.field import Bits, FieldError
class GenericInteger(Bits):
"""
Generic integer class used to generate other classes.
"""
def __init__(self, parent, name, signed, size, description=None):
if not (8 <= size <= 256):
raise FieldError("Invalid integer size (%s): have to be in 8..256" % size)
Bits.__init__(self, parent, name, size, description)
self.signed = signed
def createValue(self):
return self._parent.stream.readInteger(
self.absolute_address, self.signed, self._size, self._parent.endian)
def integerFactory(name, is_signed, size, doc):
class Integer(GenericInteger):
__doc__ = doc
static_size = size
def __init__(self, parent, name, description=None):
GenericInteger.__init__(self, parent, name, is_signed, size, description)
cls = Integer
cls.__name__ = name
return cls
UInt8 = integerFactory("UInt8", False, 8, "Unsigned integer of 8 bits")
UInt16 = integerFactory("UInt16", False, 16, "Unsigned integer of 16 bits")
UInt24 = integerFactory("UInt24", False, 24, "Unsigned integer of 24 bits")
UInt32 = integerFactory("UInt32", False, 32, "Unsigned integer of 32 bits")
UInt64 = integerFactory("UInt64", False, 64, "Unsigned integer of 64 bits")
Int8 = integerFactory("Int8", True, 8, "Signed integer of 8 bits")
Int16 = integerFactory("Int16", True, 16, "Signed integer of 16 bits")
Int24 = integerFactory("Int24", True, 24, "Signed integer of 24 bits")
Int32 = integerFactory("Int32", True, 32, "Signed integer of 32 bits")
Int64 = integerFactory("Int64", True, 64, "Signed integer of 64 bits")
|
nikitasingh981/scikit-learn | refs/heads/master | sklearn/utils/tests/test_extmath.py | 19 | # Authors: Olivier Grisel <olivier.grisel@ensta.org>
# Mathieu Blondel <mathieu@mblondel.org>
# Denis Engemann <denis-alexander.engemann@inria.fr>
#
# License: BSD 3 clause
import numpy as np
from scipy import sparse
from scipy import linalg
from scipy import stats
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import skip_if_32bit
from sklearn.utils.testing import SkipTest
from sklearn.utils.fixes import np_version
from sklearn.utils.extmath import density
from sklearn.utils.extmath import logsumexp
from sklearn.utils.extmath import norm, squared_norm
from sklearn.utils.extmath import randomized_svd
from sklearn.utils.extmath import row_norms
from sklearn.utils.extmath import weighted_mode
from sklearn.utils.extmath import cartesian
from sklearn.utils.extmath import log_logistic
from sklearn.utils.extmath import fast_dot, _fast_dot
from sklearn.utils.extmath import svd_flip
from sklearn.utils.extmath import _incremental_mean_and_var
from sklearn.utils.extmath import _deterministic_vector_sign_flip
from sklearn.utils.extmath import softmax
from sklearn.utils.extmath import stable_cumsum
from sklearn.datasets.samples_generator import make_low_rank_matrix
def test_density():
rng = np.random.RandomState(0)
X = rng.randint(10, size=(10, 5))
X[1, 2] = 0
X[5, 3] = 0
X_csr = sparse.csr_matrix(X)
X_csc = sparse.csc_matrix(X)
X_coo = sparse.coo_matrix(X)
X_lil = sparse.lil_matrix(X)
for X_ in (X_csr, X_csc, X_coo, X_lil):
assert_equal(density(X_), density(X))
def test_uniform_weights():
# with uniform weights, results should be identical to stats.mode
rng = np.random.RandomState(0)
x = rng.randint(10, size=(10, 5))
weights = np.ones(x.shape)
for axis in (None, 0, 1):
mode, score = stats.mode(x, axis)
mode2, score2 = weighted_mode(x, weights, axis)
assert_array_equal(mode, mode2)
assert_array_equal(score, score2)
def test_random_weights():
# set this up so that each row should have a weighted mode of 6,
# with a score that is easily reproduced
mode_result = 6
rng = np.random.RandomState(0)
x = rng.randint(mode_result, size=(100, 10))
w = rng.random_sample(x.shape)
x[:, :5] = mode_result
w[:, :5] += 1
mode, score = weighted_mode(x, w, axis=1)
assert_array_equal(mode, mode_result)
assert_array_almost_equal(score.ravel(), w[:, :5].sum(1))
def test_logsumexp():
# Try to add some smallish numbers in logspace
x = np.array([1e-40] * 1000000)
logx = np.log(x)
assert_almost_equal(np.exp(logsumexp(logx)), x.sum())
X = np.vstack([x, x])
logX = np.vstack([logx, logx])
assert_array_almost_equal(np.exp(logsumexp(logX, axis=0)), X.sum(axis=0))
assert_array_almost_equal(np.exp(logsumexp(logX, axis=1)), X.sum(axis=1))
def test_randomized_svd_low_rank():
# Check that extmath.randomized_svd is consistent with linalg.svd
n_samples = 100
n_features = 500
rank = 5
k = 10
# generate a matrix X of approximate effective rank `rank` and no noise
# component (very structured signal):
X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features,
effective_rank=rank, tail_strength=0.0,
random_state=0)
assert_equal(X.shape, (n_samples, n_features))
# compute the singular values of X using the slow exact method
U, s, V = linalg.svd(X, full_matrices=False)
for normalizer in ['auto', 'LU', 'QR']: # 'none' would not be stable
# compute the singular values of X using the fast approximate method
Ua, sa, Va = \
randomized_svd(X, k, power_iteration_normalizer=normalizer,
random_state=0)
assert_equal(Ua.shape, (n_samples, k))
assert_equal(sa.shape, (k,))
assert_equal(Va.shape, (k, n_features))
# ensure that the singular values of both methods are equal up to the
# real rank of the matrix
assert_almost_equal(s[:k], sa)
# check the singular vectors too (while not checking the sign)
assert_almost_equal(np.dot(U[:, :k], V[:k, :]), np.dot(Ua, Va))
# check the sparse matrix representation
X = sparse.csr_matrix(X)
# compute the singular values of X using the fast approximate method
Ua, sa, Va = \
randomized_svd(X, k, power_iteration_normalizer=normalizer,
random_state=0)
assert_almost_equal(s[:rank], sa[:rank])
def test_norm_squared_norm():
X = np.random.RandomState(42).randn(50, 63)
X *= 100 # check stability
X += 200
assert_almost_equal(np.linalg.norm(X.ravel()), norm(X))
assert_almost_equal(norm(X) ** 2, squared_norm(X), decimal=6)
assert_almost_equal(np.linalg.norm(X), np.sqrt(squared_norm(X)), decimal=6)
def test_row_norms():
X = np.random.RandomState(42).randn(100, 100)
for dtype in (np.float32, np.float64):
if dtype is np.float32:
precision = 4
else:
precision = 5
X = X.astype(dtype)
sq_norm = (X ** 2).sum(axis=1)
assert_array_almost_equal(sq_norm, row_norms(X, squared=True),
precision)
assert_array_almost_equal(np.sqrt(sq_norm), row_norms(X), precision)
Xcsr = sparse.csr_matrix(X, dtype=dtype)
assert_array_almost_equal(sq_norm, row_norms(Xcsr, squared=True),
precision)
assert_array_almost_equal(np.sqrt(sq_norm), row_norms(Xcsr), precision)
def test_randomized_svd_low_rank_with_noise():
# Check that extmath.randomized_svd can handle noisy matrices
n_samples = 100
n_features = 500
rank = 5
k = 10
# generate a matrix X wity structure approximate rank `rank` and an
# important noisy component
X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features,
effective_rank=rank, tail_strength=0.1,
random_state=0)
assert_equal(X.shape, (n_samples, n_features))
# compute the singular values of X using the slow exact method
_, s, _ = linalg.svd(X, full_matrices=False)
for normalizer in ['auto', 'none', 'LU', 'QR']:
# compute the singular values of X using the fast approximate
# method without the iterated power method
_, sa, _ = randomized_svd(X, k, n_iter=0,
power_iteration_normalizer=normalizer,
random_state=0)
# the approximation does not tolerate the noise:
assert_greater(np.abs(s[:k] - sa).max(), 0.01)
# compute the singular values of X using the fast approximate
# method with iterated power method
_, sap, _ = randomized_svd(X, k,
power_iteration_normalizer=normalizer,
random_state=0)
# the iterated power method is helping getting rid of the noise:
assert_almost_equal(s[:k], sap, decimal=3)
def test_randomized_svd_infinite_rank():
# Check that extmath.randomized_svd can handle noisy matrices
n_samples = 100
n_features = 500
rank = 5
k = 10
# let us try again without 'low_rank component': just regularly but slowly
# decreasing singular values: the rank of the data matrix is infinite
X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features,
effective_rank=rank, tail_strength=1.0,
random_state=0)
assert_equal(X.shape, (n_samples, n_features))
# compute the singular values of X using the slow exact method
_, s, _ = linalg.svd(X, full_matrices=False)
for normalizer in ['auto', 'none', 'LU', 'QR']:
# compute the singular values of X using the fast approximate method
# without the iterated power method
_, sa, _ = randomized_svd(X, k, n_iter=0,
power_iteration_normalizer=normalizer)
# the approximation does not tolerate the noise:
assert_greater(np.abs(s[:k] - sa).max(), 0.1)
# compute the singular values of X using the fast approximate method
# with iterated power method
_, sap, _ = randomized_svd(X, k, n_iter=5,
power_iteration_normalizer=normalizer)
# the iterated power method is still managing to get most of the
# structure at the requested rank
assert_almost_equal(s[:k], sap, decimal=3)
def test_randomized_svd_transpose_consistency():
# Check that transposing the design matrix has limited impact
n_samples = 100
n_features = 500
rank = 4
k = 10
X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features,
effective_rank=rank, tail_strength=0.5,
random_state=0)
assert_equal(X.shape, (n_samples, n_features))
U1, s1, V1 = randomized_svd(X, k, n_iter=3, transpose=False,
random_state=0)
U2, s2, V2 = randomized_svd(X, k, n_iter=3, transpose=True,
random_state=0)
U3, s3, V3 = randomized_svd(X, k, n_iter=3, transpose='auto',
random_state=0)
U4, s4, V4 = linalg.svd(X, full_matrices=False)
assert_almost_equal(s1, s4[:k], decimal=3)
assert_almost_equal(s2, s4[:k], decimal=3)
assert_almost_equal(s3, s4[:k], decimal=3)
assert_almost_equal(np.dot(U1, V1), np.dot(U4[:, :k], V4[:k, :]),
decimal=2)
assert_almost_equal(np.dot(U2, V2), np.dot(U4[:, :k], V4[:k, :]),
decimal=2)
# in this case 'auto' is equivalent to transpose
assert_almost_equal(s2, s3)
def test_randomized_svd_power_iteration_normalizer():
# randomized_svd with power_iteration_normalized='none' diverges for
# large number of power iterations on this dataset
rng = np.random.RandomState(42)
X = make_low_rank_matrix(100, 500, effective_rank=50, random_state=rng)
X += 3 * rng.randint(0, 2, size=X.shape)
n_components = 50
# Check that it diverges with many (non-normalized) power iterations
U, s, V = randomized_svd(X, n_components, n_iter=2,
power_iteration_normalizer='none')
A = X - U.dot(np.diag(s).dot(V))
error_2 = linalg.norm(A, ord='fro')
U, s, V = randomized_svd(X, n_components, n_iter=20,
power_iteration_normalizer='none')
A = X - U.dot(np.diag(s).dot(V))
error_20 = linalg.norm(A, ord='fro')
assert_greater(np.abs(error_2 - error_20), 100)
for normalizer in ['LU', 'QR', 'auto']:
U, s, V = randomized_svd(X, n_components, n_iter=2,
power_iteration_normalizer=normalizer,
random_state=0)
A = X - U.dot(np.diag(s).dot(V))
error_2 = linalg.norm(A, ord='fro')
for i in [5, 10, 50]:
U, s, V = randomized_svd(X, n_components, n_iter=i,
power_iteration_normalizer=normalizer,
random_state=0)
A = X - U.dot(np.diag(s).dot(V))
error = linalg.norm(A, ord='fro')
assert_greater(15, np.abs(error_2 - error))
def test_svd_flip():
# Check that svd_flip works in both situations, and reconstructs input.
rs = np.random.RandomState(1999)
n_samples = 20
n_features = 10
X = rs.randn(n_samples, n_features)
# Check matrix reconstruction
U, S, V = linalg.svd(X, full_matrices=False)
U1, V1 = svd_flip(U, V, u_based_decision=False)
assert_almost_equal(np.dot(U1 * S, V1), X, decimal=6)
# Check transposed matrix reconstruction
XT = X.T
U, S, V = linalg.svd(XT, full_matrices=False)
U2, V2 = svd_flip(U, V, u_based_decision=True)
assert_almost_equal(np.dot(U2 * S, V2), XT, decimal=6)
# Check that different flip methods are equivalent under reconstruction
U_flip1, V_flip1 = svd_flip(U, V, u_based_decision=True)
assert_almost_equal(np.dot(U_flip1 * S, V_flip1), XT, decimal=6)
U_flip2, V_flip2 = svd_flip(U, V, u_based_decision=False)
assert_almost_equal(np.dot(U_flip2 * S, V_flip2), XT, decimal=6)
def test_randomized_svd_sign_flip():
a = np.array([[2.0, 0.0], [0.0, 1.0]])
u1, s1, v1 = randomized_svd(a, 2, flip_sign=True, random_state=41)
for seed in range(10):
u2, s2, v2 = randomized_svd(a, 2, flip_sign=True, random_state=seed)
assert_almost_equal(u1, u2)
assert_almost_equal(v1, v2)
assert_almost_equal(np.dot(u2 * s2, v2), a)
assert_almost_equal(np.dot(u2.T, u2), np.eye(2))
assert_almost_equal(np.dot(v2.T, v2), np.eye(2))
def test_randomized_svd_sign_flip_with_transpose():
# Check if the randomized_svd sign flipping is always done based on u
# irrespective of transpose.
# See https://github.com/scikit-learn/scikit-learn/issues/5608
# for more details.
def max_loading_is_positive(u, v):
"""
returns bool tuple indicating if the values maximising np.abs
are positive across all rows for u and across all columns for v.
"""
u_based = (np.abs(u).max(axis=0) == u.max(axis=0)).all()
v_based = (np.abs(v).max(axis=1) == v.max(axis=1)).all()
return u_based, v_based
mat = np.arange(10 * 8).reshape(10, -1)
# Without transpose
u_flipped, _, v_flipped = randomized_svd(mat, 3, flip_sign=True)
u_based, v_based = max_loading_is_positive(u_flipped, v_flipped)
assert_true(u_based)
assert_false(v_based)
# With transpose
u_flipped_with_transpose, _, v_flipped_with_transpose = randomized_svd(
mat, 3, flip_sign=True, transpose=True)
u_based, v_based = max_loading_is_positive(
u_flipped_with_transpose, v_flipped_with_transpose)
assert_true(u_based)
assert_false(v_based)
def test_cartesian():
# Check if cartesian product delivers the right results
axes = (np.array([1, 2, 3]), np.array([4, 5]), np.array([6, 7]))
true_out = np.array([[1, 4, 6],
[1, 4, 7],
[1, 5, 6],
[1, 5, 7],
[2, 4, 6],
[2, 4, 7],
[2, 5, 6],
[2, 5, 7],
[3, 4, 6],
[3, 4, 7],
[3, 5, 6],
[3, 5, 7]])
out = cartesian(axes)
assert_array_equal(true_out, out)
# check single axis
x = np.arange(3)
assert_array_equal(x[:, np.newaxis], cartesian((x,)))
def test_logistic_sigmoid():
# Check correctness and robustness of logistic sigmoid implementation
def naive_log_logistic(x):
return np.log(1 / (1 + np.exp(-x)))
x = np.linspace(-2, 2, 50)
assert_array_almost_equal(log_logistic(x), naive_log_logistic(x))
extreme_x = np.array([-100., 100.])
assert_array_almost_equal(log_logistic(extreme_x), [-100, 0])
def test_fast_dot():
# Check fast dot blas wrapper function
if fast_dot is np.dot:
return
rng = np.random.RandomState(42)
A = rng.random_sample([2, 10])
B = rng.random_sample([2, 10])
try:
linalg.get_blas_funcs(['gemm'])[0]
has_blas = True
except (AttributeError, ValueError):
has_blas = False
if has_blas:
# Test _fast_dot for invalid input.
# Maltyped data.
for dt1, dt2 in [['f8', 'f4'], ['i4', 'i4']]:
assert_raises(ValueError, _fast_dot, A.astype(dt1),
B.astype(dt2).T)
# Malformed data.
# ndim == 0
E = np.empty(0)
assert_raises(ValueError, _fast_dot, E, E)
# ndim == 1
assert_raises(ValueError, _fast_dot, A, A[0])
# ndim > 2
assert_raises(ValueError, _fast_dot, A.T, np.array([A, A]))
# min(shape) == 1
assert_raises(ValueError, _fast_dot, A, A[0, :][None, :])
# test for matrix mismatch error
assert_raises(ValueError, _fast_dot, A, A)
# Test cov-like use case + dtypes.
for dtype in ['f8', 'f4']:
A = A.astype(dtype)
B = B.astype(dtype)
# col < row
C = np.dot(A.T, A)
C_ = fast_dot(A.T, A)
assert_almost_equal(C, C_, decimal=5)
C = np.dot(A.T, B)
C_ = fast_dot(A.T, B)
assert_almost_equal(C, C_, decimal=5)
C = np.dot(A, B.T)
C_ = fast_dot(A, B.T)
assert_almost_equal(C, C_, decimal=5)
# Test square matrix * rectangular use case.
A = rng.random_sample([2, 2])
for dtype in ['f8', 'f4']:
A = A.astype(dtype)
B = B.astype(dtype)
C = np.dot(A, B)
C_ = fast_dot(A, B)
assert_almost_equal(C, C_, decimal=5)
C = np.dot(A.T, B)
C_ = fast_dot(A.T, B)
assert_almost_equal(C, C_, decimal=5)
if has_blas:
for x in [np.array([[d] * 10] * 2) for d in [np.inf, np.nan]]:
assert_raises(ValueError, _fast_dot, x, x.T)
def test_incremental_variance_update_formulas():
# Test Youngs and Cramer incremental variance formulas.
# Doggie data from http://www.mathsisfun.com/data/standard-deviation.html
A = np.array([[600, 470, 170, 430, 300],
[600, 470, 170, 430, 300],
[600, 470, 170, 430, 300],
[600, 470, 170, 430, 300]]).T
idx = 2
X1 = A[:idx, :]
X2 = A[idx:, :]
old_means = X1.mean(axis=0)
old_variances = X1.var(axis=0)
old_sample_count = X1.shape[0]
final_means, final_variances, final_count = \
_incremental_mean_and_var(X2, old_means, old_variances,
old_sample_count)
assert_almost_equal(final_means, A.mean(axis=0), 6)
assert_almost_equal(final_variances, A.var(axis=0), 6)
assert_almost_equal(final_count, A.shape[0])
@skip_if_32bit
def test_incremental_variance_numerical_stability():
# Test Youngs and Cramer incremental variance formulas.
def np_var(A):
return A.var(axis=0)
# Naive one pass variance computation - not numerically stable
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
def one_pass_var(X):
n = X.shape[0]
exp_x2 = (X ** 2).sum(axis=0) / n
expx_2 = (X.sum(axis=0) / n) ** 2
return exp_x2 - expx_2
# Two-pass algorithm, stable.
# We use it as a benchmark. It is not an online algorithm
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Two-pass_algorithm
def two_pass_var(X):
mean = X.mean(axis=0)
Y = X.copy()
return np.mean((Y - mean)**2, axis=0)
# Naive online implementation
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Online_algorithm
# This works only for chunks for size 1
def naive_mean_variance_update(x, last_mean, last_variance,
last_sample_count):
updated_sample_count = (last_sample_count + 1)
samples_ratio = last_sample_count / float(updated_sample_count)
updated_mean = x / updated_sample_count + last_mean * samples_ratio
updated_variance = last_variance * samples_ratio + \
(x - last_mean) * (x - updated_mean) / updated_sample_count
return updated_mean, updated_variance, updated_sample_count
# We want to show a case when one_pass_var has error > 1e-3 while
# _batch_mean_variance_update has less.
tol = 200
n_features = 2
n_samples = 10000
x1 = np.array(1e8, dtype=np.float64)
x2 = np.log(1e-5, dtype=np.float64)
A0 = x1 * np.ones((n_samples // 2, n_features), dtype=np.float64)
A1 = x2 * np.ones((n_samples // 2, n_features), dtype=np.float64)
A = np.vstack((A0, A1))
# Older versions of numpy have different precision
# In some old version, np.var is not stable
if np.abs(np_var(A) - two_pass_var(A)).max() < 1e-6:
stable_var = np_var
else:
stable_var = two_pass_var
# Naive one pass var: >tol (=1063)
assert_greater(np.abs(stable_var(A) - one_pass_var(A)).max(), tol)
# Starting point for online algorithms: after A0
# Naive implementation: >tol (436)
mean, var, n = A0[0, :], np.zeros(n_features), n_samples // 2
for i in range(A1.shape[0]):
mean, var, n = \
naive_mean_variance_update(A1[i, :], mean, var, n)
assert_equal(n, A.shape[0])
# the mean is also slightly unstable
assert_greater(np.abs(A.mean(axis=0) - mean).max(), 1e-6)
assert_greater(np.abs(stable_var(A) - var).max(), tol)
# Robust implementation: <tol (177)
mean, var, n = A0[0, :], np.zeros(n_features), n_samples // 2
for i in range(A1.shape[0]):
mean, var, n = \
_incremental_mean_and_var(A1[i, :].reshape((1, A1.shape[1])),
mean, var, n)
assert_equal(n, A.shape[0])
assert_array_almost_equal(A.mean(axis=0), mean)
assert_greater(tol, np.abs(stable_var(A) - var).max())
def test_incremental_variance_ddof():
# Test that degrees of freedom parameter for calculations are correct.
rng = np.random.RandomState(1999)
X = rng.randn(50, 10)
n_samples, n_features = X.shape
for batch_size in [11, 20, 37]:
steps = np.arange(0, X.shape[0], batch_size)
if steps[-1] != X.shape[0]:
steps = np.hstack([steps, n_samples])
for i, j in zip(steps[:-1], steps[1:]):
batch = X[i:j, :]
if i == 0:
incremental_means = batch.mean(axis=0)
incremental_variances = batch.var(axis=0)
# Assign this twice so that the test logic is consistent
incremental_count = batch.shape[0]
sample_count = batch.shape[0]
else:
result = _incremental_mean_and_var(
batch, incremental_means, incremental_variances,
sample_count)
(incremental_means, incremental_variances,
incremental_count) = result
sample_count += batch.shape[0]
calculated_means = np.mean(X[:j], axis=0)
calculated_variances = np.var(X[:j], axis=0)
assert_almost_equal(incremental_means, calculated_means, 6)
assert_almost_equal(incremental_variances,
calculated_variances, 6)
assert_equal(incremental_count, sample_count)
def test_vector_sign_flip():
# Testing that sign flip is working & largest value has positive sign
data = np.random.RandomState(36).randn(5, 5)
max_abs_rows = np.argmax(np.abs(data), axis=1)
data_flipped = _deterministic_vector_sign_flip(data)
max_rows = np.argmax(data_flipped, axis=1)
assert_array_equal(max_abs_rows, max_rows)
signs = np.sign(data[range(data.shape[0]), max_abs_rows])
assert_array_equal(data, data_flipped * signs[:, np.newaxis])
def test_softmax():
rng = np.random.RandomState(0)
X = rng.randn(3, 5)
exp_X = np.exp(X)
sum_exp_X = np.sum(exp_X, axis=1).reshape((-1, 1))
assert_array_almost_equal(softmax(X), exp_X / sum_exp_X)
def test_stable_cumsum():
if np_version < (1, 9):
raise SkipTest("Sum is as unstable as cumsum for numpy < 1.9")
assert_array_equal(stable_cumsum([1, 2, 3]), np.cumsum([1, 2, 3]))
r = np.random.RandomState(0).rand(100000)
assert_warns(RuntimeWarning, stable_cumsum, r, rtol=0, atol=0)
# test axis parameter
A = np.random.RandomState(36).randint(1000, size=(5, 5, 5))
assert_array_equal(stable_cumsum(A, axis=0), np.cumsum(A, axis=0))
assert_array_equal(stable_cumsum(A, axis=1), np.cumsum(A, axis=1))
assert_array_equal(stable_cumsum(A, axis=2), np.cumsum(A, axis=2))
|
fabiking/plugin.video.fadimesa | refs/heads/master | _ytplist.py | 173 | import urllib
import urllib2,json
import xbmcvfs
import requests,time
import os,xbmc,xbmcaddon,xbmcgui,re
addon = xbmcaddon.Addon('plugin.video.live.streamspro')
profile = xbmc.translatePath(addon.getAddonInfo('profile').decode('utf-8'))
cacheDir = os.path.join(profile, 'cachedir')
clean_cache=os.path.join(cacheDir,'cleancacheafter1month')
headers=dict({'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; rv:32.0) Gecko/20100101 Firefox/32.0'})
if not cacheDir.startswith(('smb://', 'nfs://', 'upnp://', 'ftp://')) and not os.path.isdir(cacheDir):
os.mkdir(cacheDir)
if xbmcvfs.exists(clean_cache) and (time.time()-os.path.getmtime(clean_cache) > 60*60*24*30):
print 'time of creation of ff',str(time.time()-os.path.getmtime(clean_cache))
import shutil
shutil.rmtree(cacheDir)
else:
with open(clean_cache,'w') as f:
f.write('')
utubeid = 'www.youtube.*?v(?:=|%3D)([0-9A-Za-z_-]{11})'
def YoUTube(page_data,youtube=None,duration=None,max_page=20,nosave=None):
pDialog = xbmcgui.DialogProgress()
pDialog.create('Updating list', 'Downloading ...')
base_yt_url ='http://gdata.youtube.com/feeds/api'
if 'search' in page_data:
youtube = youtube.replace(' ','+')#Lana Del Rey
build_url= base_yt_url + '/videos?q=%s&max-results=50&v=2&alt=json&orderby=published&start-index=%s'
if addon.getSetting('searchlongvideos') == 'true': #duration: #medium or long
build_url = base_yt_url + '/videos?q=%s&max-results=20&v=2&alt=json&duration=long&start-index=%s'
else:
build_url = 'http://www.youtube.com/watch?v=%s' %page_data
count = 1
allurls ={}
for i in range(1,max_page):
url = build_url %(youtube,str(count))
#print url
try:
content = cache(url,int(addon.getSetting("Youtube")))
print len(content)
jcontent = json.loads(content)
entry = jcontent['feed']['entry']
except Exception:
break
for myUrl in entry:
count += 1
allitem = 'item' + str(count)
item = {}
item['title']= removeNonAscii(myUrl['title']['$t']).encode('utf-8')
item['date']= myUrl['published']['$t'].encode('utf-8')
try:
item['desc']= removeNonAscii(myUrl['media$group']['media$description']['$t']).encode('utf-8')
except Exception:
desc = 'UNAVAIABLE'
link = myUrl['link'][0]['href'].encode('utf-8','ignore')
item['url']= re_me(link,utubeid)
allurls[allitem] = item
print len(allurls)
if nosave:
return allurls
pDialog.close()
def re_me(data, re_patten):
match = ''
m = re.search(re_patten, data,re.I)
if m != None:
match = m.group(1)
else:
match = ''
return match
def notification(header="", message="", sleep=3000):
""" Will display a notification dialog with the specified header and message,
in addition you can set the length of time it displays in milliseconds and a icon image.
"""
xbmc.executebuiltin("XBMC.Notification(%s,%s,%i)" % ( header, message, sleep ))
def removeNonAscii(s): return "".join(filter(lambda x: ord(x)<128, s))
def makeRequest(url,referer=None,post=None,body={}):
if referer:
headers.update=({'Referer':referer})
else:
req = urllib2.Request(url,None,headers)
response = urllib2.urlopen(req)
data = response.read()
response.close()
return data
# from AddonScriptorde X:\plugin.video.my_music_tv\default.py
def cache(url, duration=0):
cacheFile = os.path.join(cacheDir, (''.join(c for c in unicode(url, 'utf-8') if c not in '/\\:?"*|<>')).strip())
if os.path.exists(cacheFile) and duration!=0 and (time.time()-os.path.getmtime(cacheFile) < 60*60*24*duration):
fh = xbmcvfs.File(cacheFile, 'r')
content = fh.read()
fh.close()
return content
else:
content = makeRequest(url)
fh = xbmcvfs.File(cacheFile, 'w')
fh.write(content)
fh.close()
return content
|
SublimeText-Markdown/TableEditor | refs/heads/master | table_plugin.py | 2 | # table_plugin.py - sublime plugins for pretty print text table
# Copyright (C) 2012 Free Software Foundation, Inc.
# Author: Valery Kocubinsky
# Package: SublimeTableEditor
# Homepage: https://github.com/vkocubinsky/SublimeTableEditor
# This file is part of SublimeTableEditor.
# SublimeTableEditor is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# SublimeTableEditor is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with SublimeTableEditor. If not, see <http://www.gnu.org/licenses/>.
import sublime
import sublime_plugin
import re
try:
from . import table_lib as tlib
from . import table_base as tbase
except ValueError:
import table_lib as tlib
import table_base as tbase
class TableContext:
def __init__(self, view, sel, syntax):
self.view = view
(sel_row, sel_col) = self.view.rowcol(sel.begin())
self.syntax = syntax
self.first_table_row = self._get_first_table_row(sel_row, sel_col)
self.last_table_row = self._get_last_table_row(sel_row, sel_col)
self.table_text = self._get_table_text(self.first_table_row, self.last_table_row)
self.visual_field_num = self._visual_field_num(sel_row, sel_col)
self.row_num = sel_row - self.first_table_row
self.table_pos = tbase.TablePos(self.row_num, self.visual_field_num)
self.table = self.syntax.table_parser.parse_text(self.table_text)
self.table_driver = self.syntax.table_driver
self.field_num = self.table_driver.visual_to_internal_index(self.table, self.table_pos).field_num
def _get_table_text(self, first_table_row, last_table_row):
begin_point = self.view.line(self.view.text_point(first_table_row, 0)
).begin()
end_point = self.view.line(self.view.text_point(last_table_row, 0)
).end()
return self.view.substr(sublime.Region(begin_point, end_point))
def _get_last_table_row(self, sel_row, sel_col):
row = sel_row
last_table_row = sel_row
last_line = self.view.rowcol(self.view.size())[0]
while (row <= last_line and self._is_table_row(row)):
last_table_row = row
row = row + 1
return last_table_row
def _get_first_table_row(self, sel_row, sel_col):
row = sel_row
first_table_row = sel_row
while (row >= 0 and self._is_table_row(row)):
first_table_row = row
row = row - 1
return first_table_row
def _is_table_row(self, row):
text = self._get_text(row)
return self.syntax.table_parser.is_table_row(text)
def _visual_field_num(self, sel_row, sel_col):
line_text = self._get_text(sel_row)
line = self.syntax.line_parser.parse(line_text)
return line.field_num(sel_col)
def _get_text(self, row):
point = self.view.text_point(row, 0)
region = self.view.line(point)
text = self.view.substr(region)
return text
class AbstractTableCommand(sublime_plugin.TextCommand):
def detect_syntax(self):
if self.view.settings().has("table_editor_syntax"):
syntax_name = self.view.settings().get("table_editor_syntax")
else:
syntax_name = self.auto_detect_syntax_name()
table_configuration = tbase.TableConfiguration()
border_style = (self.view.settings().get("table_editor_border_style", None)
or self.view.settings().get("table_editor_style", None))
if border_style == "emacs":
table_configuration.hline_out_border = '|'
table_configuration.hline_in_border = '+'
elif border_style == "grid":
table_configuration.hline_out_border = '+'
table_configuration.hline_in_border = '+'
elif border_style == "simple":
table_configuration.hline_out_border = '|'
table_configuration.hline_in_border = '|'
if self.view.settings().has("table_editor_custom_column_alignment"):
table_configuration.custom_column_alignment = self.view.settings().get("table_editor_custom_column_alignment")
if self.view.settings().has("table_editor_keep_space_left"):
table_configuration.keep_space_left = self.view.settings().get("table_editor_keep_space_left")
if self.view.settings().has("table_editor_align_number_right"):
table_configuration.align_number_right = self.view.settings().get("table_editor_align_number_right")
if self.view.settings().has("table_editor_detect_header"):
table_configuration.detect_header = self.view.settings().get("table_editor_detect_header")
if self.view.settings().has("table_editor_intelligent_formatting"):
table_configuration.intelligent_formatting = self.view.settings().get("table_editor_intelligent_formatting")
syntax = tlib.create_syntax(syntax_name, table_configuration)
return syntax
def auto_detect_syntax_name(self):
view_syntax = self.view.settings().get('syntax')
if (view_syntax == 'Packages/Markdown/MultiMarkdown.tmLanguage' or
view_syntax == 'Packages/Markdown/Markdown.tmLanguage'):
return "MultiMarkdown"
elif view_syntax == 'Packages/Textile/Textile.tmLanguage':
return "Textile"
elif (view_syntax == 'Packages/RestructuredText/reStructuredText.tmLanguage'):
return "reStructuredText"
else:
return "Simple"
def merge(self, edit, ctx):
table = ctx.table
new_lines = table.render_lines()
first_table_row = ctx.first_table_row
last_table_row = ctx.last_table_row
rows = range(first_table_row, last_table_row + 1)
for row, new_text in zip(rows, new_lines):
region = self.view.line(self.view.text_point(row, 0))
old_text = self.view.substr(region)
if old_text != new_text:
self.view.replace(edit, region, new_text)
#case 1: some lines inserted
if len(rows) < len(new_lines):
row = last_table_row
for new_text in new_lines[len(rows):]:
end_point = self.view.line(self.view.text_point(row, 0)).end()
self.view.insert(edit, end_point, "\n" + new_text)
row = row + 1
#case 2: some lines deleted
elif len(rows) > len(new_lines):
for row in rows[len(new_lines):]:
region = self.view.line(self.view.text_point(row, 0))
self.view.erase(edit, region)
def create_context(self, sel):
return TableContext(self.view, sel, self.detect_syntax())
def run(self, edit):
new_sels = []
for sel in self.view.sel():
new_sel = self.run_one_sel(edit, sel)
new_sels.append(new_sel)
self.view.sel().clear()
for sel in new_sels:
self.view.sel().add(sel)
self.view.show(sel, False)
def run_one_sel(self, edit, sel):
ctx = self.create_context(sel)
try:
msg, table_pos = self.run_operation(ctx)
self.merge(edit, ctx)
sublime.status_message("Table Editor: {0}".format(msg))
return self.table_pos_sel(ctx, table_pos)
except tbase.TableException as err:
sublime.status_message("Table Editor: {0}".format(err))
return self.table_pos_sel(ctx, ctx.table_pos)
def visual_field_sel(self, ctx, row_num, visual_field_num):
if ctx.table.empty():
pt = self.view.text_point(ctx.first_table_row, 0)
else:
pos = tbase.TablePos(row_num, visual_field_num)
col = ctx.table_driver.get_cursor(ctx.table, pos)
pt = self.view.text_point(ctx.first_table_row + row_num, col)
return sublime.Region(pt, pt)
def table_pos_sel(self, ctx, table_pos):
return self.visual_field_sel(ctx, table_pos.row_num,
table_pos.field_num)
def field_sel(self, ctx, row_num, field_num):
if ctx.table.empty():
visual_field_num = 0
else:
pos = tbase.TablePos(row_num, field_num)
visual_field_num = ctx.table_driver.internal_to_visual_index(ctx.table, pos).field_num
return self.visual_field_sel(ctx, row_num, visual_field_num)
class TableEditorAlignCommand(AbstractTableCommand):
"""
Key: ctrl+shift+a
Re-align the table without change the current table field.
Move cursor to begin of the current table field.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_align(ctx.table, ctx.table_pos)
class TableEditorNextField(AbstractTableCommand):
"""
Key: tab
Re-align the table, move to the next field.
Creates a new row if necessary.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_next_field(ctx.table, ctx.table_pos)
class TableEditorPreviousField(AbstractTableCommand):
"""
Key: shift+tab
Re-align, move to previous field.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_previous_field(ctx.table, ctx.table_pos)
class TableEditorNextRow(AbstractTableCommand):
"""
Key: enter
Re-align the table and move down to next row.
Creates a new row if necessary.
At the beginning or end of a line, enter still does new line.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_next_row(ctx.table, ctx.table_pos)
class TableEditorMoveColumnLeft(AbstractTableCommand):
"""
Key: alt+left
Move the current column left.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_move_column_left(ctx.table,
ctx.table_pos)
class TableEditorMoveColumnRight(AbstractTableCommand):
"""
Key: alt+right
Move the current column right.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_move_column_right(ctx.table,
ctx.table_pos)
class TableEditorDeleteColumn(AbstractTableCommand):
"""
Key: alt+shift+left
Kill the current column.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_delete_column(ctx.table,
ctx.table_pos)
class TableEditorInsertColumn(AbstractTableCommand):
"""
Keys: alt+shift+right
Insert a new column to the left of the cursor position.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_insert_column(ctx.table,
ctx.table_pos)
class TableEditorKillRow(AbstractTableCommand):
"""
Key : alt+shift+up
Kill the current row.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_kill_row(ctx.table, ctx.table_pos)
class TableEditorInsertRow(AbstractTableCommand):
"""
Key: alt+shift+down
Insert a new row above the current row.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_insert_row(ctx.table, ctx.table_pos)
class TableEditorMoveRowUp(AbstractTableCommand):
"""
Key: alt+up
Move the current row up.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_move_row_up(ctx.table, ctx.table_pos)
class TableEditorMoveRowDown(AbstractTableCommand):
"""
Key: alt+down
Move the current row down.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_move_row_down(ctx.table,
ctx.table_pos)
class TableEditorInsertSingleHline(AbstractTableCommand):
"""
Key: ctrl+k,-
Insert single horizontal line below current row.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_insert_single_hline(ctx.table,
ctx.table_pos)
class TableEditorInsertDoubleHline(AbstractTableCommand):
"""
Key: ctrl+k,=
Insert double horizontal line below current row.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_insert_double_hline(ctx.table,
ctx.table_pos)
class TableEditorHlineAndMove(AbstractTableCommand):
"""
Key: ctrl+k, enter
Insert a horizontal line below current row,
and move the cursor into the row below that line.
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_insert_hline_and_move(ctx.table,
ctx.table_pos)
class TableEditorSplitColumnDown(AbstractTableCommand):
"""
Key: alt+enter
Split rest of cell down from current cursor position,
insert new line bellow if current row is last row in the table
or if next line is hline
"""
def remove_rest_line(self, edit, sel):
end_region = self.view.find("\|",
sel.begin())
rest_region = sublime.Region(sel.begin(), end_region.begin())
rest_data = self.view.substr(rest_region)
self.view.replace(edit, rest_region, "")
return rest_data.strip()
def run_one_sel(self, edit, sel):
ctx = self.create_context(sel)
field_num = ctx.field_num
row_num = ctx.row_num
if (ctx.table[row_num].is_separator() or
ctx.table[row_num].is_header_separator()):
sublime.status_message("Table Editor: Split column is not "
"permitted for separator or header "
"separator line")
return self.table_pos_sel(ctx, ctx.table_pos)
if row_num + 1 < len(ctx.table):
if len(ctx.table[row_num + 1]) - 1 < field_num:
sublime.status_message("Table Editor: Split column is not "
"permitted for short line")
return self.table_pos_sel(ctx, ctx.table_pos)
elif ctx.table[row_num + 1][field_num].pseudo():
sublime.status_message("Table Editor: Split column is not "
"permitted to colspan column")
return self.table_pos_sel(ctx, ctx.table_pos)
(sel_row, sel_col) = self.view.rowcol(sel.begin())
rest_data = self.remove_rest_line(edit, sel)
ctx = self.create_context(sel)
field_num = ctx.field_num
row_num = ctx.row_num
if row_num + 1 == len(ctx.table) or ctx.table[row_num + 1].is_separator():
ctx.table.insert_empty_row(row_num + 1)
row_num = row_num + 1
ctx.table[row_num][field_num].data = rest_data + " " + ctx.table[row_num][field_num].data.strip()
ctx.table.pack()
self.merge(edit, ctx)
sublime.status_message("Table Editor: Column splitted down")
return self.field_sel(ctx, row_num, field_num)
class TableEditorJoinLines(AbstractTableCommand):
"""
Key: ctrl+j
Join current row and next row into one if next row is not hline
"""
def run_operation(self, ctx):
return ctx.table_driver.editor_join_lines(ctx.table, ctx.table_pos)
class TableEditorCsvToTable(AbstractTableCommand):
"""
Command: table_csv_to_table
Key: ctrl+k, |
Convert selected CSV region into table
"""
def run_one_sel(self, edit, sel):
if sel.empty():
return sel
else:
syntax = self.detect_syntax()
text = self.view.substr(sel)
table = syntax.table_driver.parse_csv(text)
self.view.replace(edit, sel, table.render())
first_row = self.view.rowcol(sel.begin())[0]
pt = self.view.text_point(first_row, syntax.table_driver.get_cursor(table, tbase.TablePos(0, 0)))
sublime.status_message("Table Editor: Table created from CSV")
return sublime.Region(pt, pt)
class TableEditorDisableForCurrentView(sublime_plugin.TextCommand):
def run(self, args, prop):
self.view.settings().set(prop, False)
class TableEditorEnableForCurrentView(sublime_plugin.TextCommand):
def run(self, args, prop):
self.view.settings().set(prop, True)
class TableEditorDisableForCurrentSyntax(sublime_plugin.TextCommand):
def run(self, edit):
syntax = self.view.settings().get('syntax')
if syntax is not None:
m = re.search("([^/]+)[.]tmLanguage$", syntax)
if m:
base_name = m.group(1) + ".sublime-settings"
settings = sublime.load_settings(base_name)
settings.erase("enable_table_editor")
sublime.save_settings(base_name)
class TableEditorEnableForCurrentSyntax(sublime_plugin.TextCommand):
def run(self, edit):
syntax = self.view.settings().get('syntax')
if syntax is not None:
m = re.search("([^/]+)[.]tmLanguage$", syntax)
if m:
base_name = m.group(1) + ".sublime-settings"
settings = sublime.load_settings(base_name)
settings.set("enable_table_editor", True)
sublime.save_settings(base_name)
class TableEditorSetSyntax(sublime_plugin.TextCommand):
def run(self, edit, syntax):
self.view.settings().set("enable_table_editor", True)
self.view.settings().set("table_editor_syntax", syntax)
sublime.status_message("Table Editor: set syntax to '{0}'"
.format(syntax))
|
yencarnacion/jaikuengine | refs/heads/master | .google_appengine/lib/django-1.5/django/contrib/localflavor/py/py_department.py | 109 | # -*- coding: utf-8 -*-
# http://www.statoids.com/upy.html
from __future__ import unicode_literals
DEPARTMENT_CHOICES = (
('AG', 'Alto Paraguay'),
('AA', 'Alto Paraná'),
('AM', 'Amambay'),
('AS', 'Asunción'),
('BQ', 'Boquerón'),
('CG', 'Caaguazú'),
('CZ', 'Caazapá'),
('CY', 'Canindeyú'),
('CE', 'Central'),
('CN', 'Concepción'),
('CR', 'Cordillera'),
('GU', 'Guairá'),
('IT', 'Itapúa'),
('MI', 'Misiones'),
('NE', 'Ñeembucú'),
('PG', 'Paraguarí'),
('PH', 'Pdte. Hayes'),
('SP', 'San Pedro'),
)
DEPARTMENT_ROMAN_CHOICES = (
('CN', 'I Concepción'),
('SP', 'II San Pedro'),
('CR', 'III Cordillera'),
('GU', 'IV Guairá'),
('CG', 'V Caaguazú'),
('CZ', 'VI Caazapá'),
('IT', 'VII Itapúa'),
('MI', 'VIII Misiones'),
('PG', 'IX Paraguarí'),
('AA', 'X Alto Paraná'),
('CE', 'XI Central'),
('NE', 'XII Ñeembucú'),
('AM', 'XIII Amambay'),
('CY', 'XIV Canindeyú'),
('PH', 'XV Pdte. Hayes'),
('AG', 'XVI Alto Paraguay'),
('BQ', 'XVII Boquerón'),
('AS', 'XVIII Asunción'),
)
|
dzsii/lic | refs/heads/master | src/LicUndoActions.py | 5 | """
Lic - Instruction Book Creation software
Copyright (C) 2010 Remi Gagne
This file (LicUndoActions.py) is part of Lic.
Lic is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lic is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see http://www.gnu.org/licenses/
"""
from LicCommonImports import *
def resetGLItem(self, templateItem):
instructions = templateItem.getPage().instructions
templateItem.resetPixmap()
templateItem.getPage().resetCallout()
templateItem.getPage().initLayout()
if templateItem.itemClassName == "CSI":
for unused in instructions.initCSIDimensions(True):
pass # Don't care about yielded items here
elif templateItem.itemClassName == "PLI":
for unused in instructions.initPartDimensions(True):
pass # Don't care about yielded items here
instructions.mainModel.initAllPLILayouts()
elif templateItem.itemClassName == "SubmodelPreview":
instructions.mainModel.initSubmodelImages() # TODO: Template rotate Submodel Image is broken for nested submodels (viper.lic)
NextCommandID = 122
def getNewCommandID():
global NextCommandID
NextCommandID += 1
return NextCommandID
QUndoCommand.id = lambda self: self._id
QUndoCommand.undo = lambda self: self.doAction(False)
QUndoCommand.redo = lambda self: self.doAction(True)
QUndoCommand.resetGLItem = resetGLItem
class MoveCommand(QUndoCommand):
"""
MoveCommand stores a list of parts moved together:
itemList[0] = (item, item.oldPos, item.newPos)
"""
_id = getNewCommandID()
def __init__(self, itemList):
QUndoCommand.__init__(self, "move Page Object")
self.itemList = []
for item in itemList:
self.itemList.append((item, item.oldPos, item.pos()))
def doAction(self, redo):
for item, oldPos, newPos in self.itemList:
item.setPos(newPos if redo else oldPos)
if hasattr(item, "resetArrow"):
item.resetArrow()
if hasattr(item.parentItem(), "resetRect"):
item.parentItem().resetRect()
class ResizeCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, item, oldRect, newRect):
QUndoCommand.__init__(self, "resize Item")
self.item, self.oldRect, self.newRect = item, oldRect, newRect
def doAction(self, redo):
rect = self.newRect if redo else self.oldRect
if hasattr(self.item, 'initLayout'):
self.item.initLayout(rect)
else:
self.item.setRect(rect)
class LayoutItemCommand(QUndoCommand): # TODO: Should be able to undo Step Layouts (for Create Callout, etc)
_id = getNewCommandID()
def __init__(self, target, originalLayout):
QUndoCommand.__init__(self, "auto-layout")
self.target, self.originalLayout = target, originalLayout
def doAction(self, redo):
if redo:
self.target.initLayout()
else:
self.target.revertToLayout(self.originalLayout)
class CalloutArrowMoveCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, arrow, oldPoint, newPoint):
QUndoCommand.__init__(self, "move Callout Arrow")
self.arrow, self.oldPoint, self.newPoint = arrow, oldPoint, newPoint
# Need to invalidate scene because we don't actually move a part here, so scene doesn't redraw
def doAction(self, redo):
self.arrow.point = self.newPoint if redo else self.oldPoint
self.arrow.parentItem().internalPoints = []
self.arrow.update()
class SetTextCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, label, oldText, newText):
QUndoCommand.__init__(self, "Change Label Text")
self.label, self.oldText, self.newText = label, oldText, newText
def doAction(self, redo):
text = self.newText if redo else self.oldText
self.label.setText(text)
self.label.data = lambda index: "Label: " + text
class CalloutBorderFitCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, callout, oldBorder, newBorder):
QUndoCommand.__init__(self, "Callout Border fit")
self.callout, self.oldBorder, self.newBorder = callout, oldBorder, newBorder
def doAction(self, redo):
self.callout.setBorderFit(self.newBorder if redo else self.oldBorder)
self.callout.update()
class SetDefaultDiameterCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, circle, oldDiameter, newDiameter, doLayout):
QUndoCommand.__init__(self, "circle Diameter")
self.circle, self.oldDiameter, self.newDiameter, self.doLayout = circle, oldDiameter, newDiameter, doLayout
def doAction(self, redo):
diameter = self.newDiameter if redo else self.oldDiameter
template = self.circle.getPage()
self.circle.setDiameter(diameter)
self.circle.update()
if self.doLayout:
template.initLayout()
for page in template.instructions.getPageList():
for child in page.getAllChildItems():
if self.circle.itemClassName == child.itemClassName:
child.setDiameter(diameter)
child.update()
if self.doLayout:
child.getPage().initLayout()
class DisplacePartCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, part, oldDisp, newDisp):
QUndoCommand.__init__(self, "Displace Part")
self.part, self.oldDisp, self.newDisp = part, oldDisp, newDisp
def doAction(self, redo):
self.part.displacement = list(self.newDisp if redo else self.oldDisp)
self.part.getCSI().resetPixmap()
class BeginEndDisplacementCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, part, direction, end = False):
if end:
QUndoCommand.__init__(self, "Remove Part displacement")
self.undo, self.redo = self.redo, self.undo
else:
QUndoCommand.__init__(self, "Begin Part displacement")
self.part, self.direction = part, direction
def doAction(self, redo):
part = self.part
part.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
part.addNewDisplacement(self.direction) if redo else part.removeDisplacement()
part.scene().emit(SIGNAL("layoutChanged()"))
part.getCSI().resetPixmap()
if part.originalPart: # Part is in Callout - resize Callout
part.getStep().resetRect()
class ResizePageCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, template, oldPageSize, newPageSize, oldResolution, newResolution, doRescale):
QUndoCommand.__init__(self, "Page Resize")
self.template = template
self.oldPageSize, self.newPageSize = oldPageSize, newPageSize
self.oldResolution, self.newResolution = oldResolution, newResolution
self.doRescale = doRescale
self.oldScale = 1.0
self.newScale = float(newPageSize.width()) / float(oldPageSize.width())
def undo(self):
self.template.setGlobalPageSize(self.oldPageSize, self.oldResolution, self.doRescale, self.oldScale)
def redo(self):
self.template.setGlobalPageSize(self.newPageSize, self.newResolution, self.doRescale, self.newScale)
class MoveStepToPageAtRowCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, page, step, row):
QUndoCommand.__init__(self, "move Step to Page")
self.step, self.page, self.row = step, page, row
self.oldRow, self.oldPage = step.row(), step.getPage()
def doAction(self, redo):
if redo:
self.page.insertStepAtRow(self.step, self.row)
else:
self.oldPage.insertStepAtRow(self.step, self.oldRow)
class MoveStepToPageCommand(QUndoCommand):
"""
stepSet stores a list of (step, oldPage, newPage) tuples:
stepSet = [(step1, oldPage1, newPage1), (step2, oldPage2, newPage2)]
"""
_id = getNewCommandID()
def __init__(self, stepSet):
QUndoCommand.__init__(self, "move Step to Page")
self.stepSet = stepSet
def doAction(self, redo):
self.stepSet[0][0].scene().emit(SIGNAL("layoutAboutToBeChanged()"))
for step, oldPage, newPage in self.stepSet:
step.moveToPage(newPage if redo else oldPage)
if step.csi.containsSubmodel():
model = newPage.instructions.mainModel
model.reOrderSubmodelPages()
model.syncPageNumbers()
newPage.initLayout()
oldPage.initLayout()
self.stepSet[0][0].scene().emit(SIGNAL("layoutChanged()"))
class SwapStepsCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, step1, step2):
QUndoCommand.__init__(self, "Swap Steps")
self.step1, self.step2 = step1, step2
def doAction(self, redo):
s1, s2 = self.step1, self.step2
p1, p2 = s1.parentItem(), s2.parentItem()
p1.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if not s1.isInCallout():
i1, i2 = s1.row(), s2.row()
p1.children[i1], p2.children[i2] = p2.children[i2], p1.children[i1]
i1, i2 = p1.steps.index(s1), p2.steps.index(s2)
p1.steps[i1], p2.steps[i2] = p2.steps[i2], p1.steps[i1]
s1.number, s2.number = s2.number, s1.number
s1.csi.isDirty, s2.csi.isDirty = True, True
if p1 != p2:
s1.setParentItem(p2)
s2.setParentItem(p1)
if s1.csi.containsSubmodel() or s2.csi.containsSubmodel():
model = p1.instructions.mainModel
model.reOrderSubmodelPages()
model.syncPageNumbers()
p1.initLayout()
if p1 != p2:
p2.initLayout()
p1.scene().emit(SIGNAL("layoutChanged()"))
class AddRemovePartCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, part, step, addPart):
QUndoCommand.__init__(self, "%s Part" % ("add" if addPart else "delete"))
self.part, self.step, self.addPart = part, step, addPart
def doAction(self, redo):
step = self.step
page = step.getPage()
submodel = page.submodel
step.scene().clearSelection()
step.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.addPart) or (not redo and not self.addPart):
step.addPart(self.part)
submodel.parts.append(self.part)
else:
self.part.setParentItem(None)
step.removePart(self.part)
submodel.parts.remove(self.part)
step.scene().emit(SIGNAL("layoutChanged()"))
page.instructions.updateMainModel()
page.updateSubmodel()
step.csi.isDirty = True
page.initLayout()
class AddRemoveArrowCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, part, arrow, index, addArrow):
QUndoCommand.__init__(self, "%s Arrow" % ("add" if addArrow else "delete"))
self.part, self.arrow, self.index, self.addArrow = part, arrow, index, addArrow
def doAction(self, redo):
self.part.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.addArrow) or (not redo and not self.addArrow):
self.arrow.setParentItem(self.part)
self.part.arrows.insert(self.index, self.arrow)
else:
self.part.scene().removeItem(self.arrow)
self.part.arrows.remove(self.arrow)
self.arrow.setParentItem(None)
self.part.getCSI().isDirty = True
self.part.scene().emit(SIGNAL("layoutChanged()"))
class AddRemoveLabelCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, page, label, index, addLabel):
QUndoCommand.__init__(self, "%s Label" % ("add" if addLabel else "delete"))
self.page, self.label, self.index, self.addLabel = page, label, index, addLabel
def doAction(self, redo):
self.page.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.addLabel) or (not redo and not self.addLabel):
self.label.setParentItem(self.page)
self.page.labels.insert(self.index, self.label)
else:
self.page.scene().removeItem(self.label)
self.page.labels.remove(self.label)
self.label.setParentItem(None)
class ShowHideSubmodelsInPLICommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, templatePLI, show):
QUndoCommand.__init__(self, "%s Submodels in PLI" % ("show" if show else "remove"))
self.templatePLI, self.show = templatePLI, show
def doAction(self, redo):
self.templatePLI.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
show = (redo and self.show) or (not redo and not self.show)
self.templatePLI.__class__.includeSubmodels = show
for page in self.templatePLI.getPage().instructions.getPageList():
for step in page.steps:
for part in [p for p in step.csi.getPartList() if p.isSubmodel]:
if show:
step.enablePLI()
if not part.isInPLI:
step.pli.addPart(part)
else:
if part.isInPLI:
step.pli.removePart(part)
if step.pli.isEmpty():
step.disablePLI()
part.isInPLI = show
step.initLayout()
self.templatePLI.scene().emit(SIGNAL("layoutChanged()"))
class ShowHideStepSeparatorCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, template, show):
QUndoCommand.__init__(self, "%s Step Separators" % ("show" if show else "hide"))
self.template, self.show = template, show
def doAction(self, redo):
show = (redo and self.show) or (not redo and not self.show)
self.template.__class__.separatorsVisible = show
for s in self.template.separators:
s.enabled = show
for page in self.template.instructions.getPageList():
page.showHideSeparators(show)
class AddRemoveRotateIconCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, step, addIcon):
QUndoCommand.__init__(self, "%s Rotation Icon" % ("add" if addIcon else "delete"))
self.step, self.addIcon = step, addIcon
def doAction(self, redo):
self.step.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.addIcon) or (not redo and not self.addIcon):
self.step.addRotateIcon()
self.step.positionRotateIcon()
else:
self.step.removeRotateIcon()
self.step.scene().emit(SIGNAL("layoutChanged()"))
class AddRemoveStepCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, step, addStep):
QUndoCommand.__init__(self, "%s Step" % ("add" if addStep else "delete"))
self.step, self.addStep = step, addStep
self.parent = step.parentItem()
self.originalLayout = self.parent.getCurrentLayout()
def doAction(self, redo):
parent = self.parent
parent.scene().clearSelection()
if (redo and self.addStep) or (not redo and not self.addStep):
parent.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
parent.insertStep(self.step)
parent.scene().emit(SIGNAL("layoutChanged()"))
self.step.setSelected(True)
parent.initLayout()
else:
self.step.setSelected(False)
parent.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
parent.removeStep(self.step)
parent.scene().emit(SIGNAL("layoutChanged()"))
parent.revertToLayout(self.originalLayout)
class AddRemoveCalloutCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, callout, addCallout):
QUndoCommand.__init__(self, "%s Callout" % ("add" if addCallout else "delete"))
self.callout, self.addCallout = callout, addCallout
self.parent = callout.parentItem()
def doAction(self, redo):
parent = self.parent
if (redo and self.addCallout) or (not redo and not self.addCallout):
parent.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
parent.addCallout(self.callout)
parent.scene().emit(SIGNAL("layoutChanged()"))
else:
self.callout.setSelected(False)
parent.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
parent.removeCallout(self.callout)
parent.scene().emit(SIGNAL("layoutChanged()"))
parent.initLayout()
class AddRemovePageCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, scene, page, addPage):
QUndoCommand.__init__(self, "%s Page" % ("add" if addPage else "delete"))
self.scene, self.page, self.addPage = scene, page, addPage
def doAction(self, redo):
page = self.page
self.scene.emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.addPage) or (not redo and not self.addPage):
page.submodel.addPage(page)
number = page.number
else:
page.submodel.deletePage(page)
number = page.number - 1
self.scene.emit(SIGNAL("layoutChanged()"))
self.scene.selectPage(number)
class AddRemoveTitlePageCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, scene, page, addPage):
QUndoCommand.__init__(self, "%s Title Page" % ("add" if addPage else "delete"))
self.scene, self.page, self.addPage = scene, page, addPage
def doAction(self, redo):
page = self.page
model = page.submodel
self.scene.emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.addPage) or (not redo and not self.addPage):
model._hasTitlePage = True
model.titlePage = page
if page.scene() is None:
self.scene.addItem(page)
model.updatePageNumbers(1, 1)
model.incrementRows(1)
else:
model._hasTitlePage = False
model.titlePage = None
self.scene.removeItem(page)
model.updatePageNumbers(1, -1)
model.incrementRows(-1)
self.scene.emit(SIGNAL("layoutChanged()"))
self.scene.selectPage(1)
class AddRemoveGuideCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, scene, guide, addGuide):
QUndoCommand.__init__(self, "%s Guide" % ("add" if addGuide else "remove"))
self.scene, self.guide, self.addGuide = scene, guide, addGuide
def doAction(self, redo):
if (redo and self.addGuide) or (not redo and not self.addGuide):
self.scene.guides.append(self.guide)
self.scene.addItem(self.guide)
else:
self.scene.removeItem(self.guide)
self.scene.guides.remove(self.guide)
class AddRemoveAnnotationCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, page, annotation, addAnnotation):
QUndoCommand.__init__(self, "%s Annotation" % ("add" if addAnnotation else "remove"))
self.page, self.annotation, self.addAnnotation = page, annotation, addAnnotation
def doAction(self, redo):
page, item = self.page, self.annotation
page.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.addAnnotation) or (not redo and not self.addAnnotation):
item.setParentItem(page)
page.annotations.append(item)
page.addChild(len(page.children), item)
else:
page.scene().removeItem(item)
page.annotations.remove(item)
page.children.remove(item)
page.scene().emit(SIGNAL("layoutChanged()"))
class AddRemovePartToPLICommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, part, addPart):
QUndoCommand.__init__(self, "%s Part %s PLI" % (("add", "to") if addPart else ("remove", "from")))
self.part, self.addPart = part, addPart
def doAction(self, redo):
part, step = self.part, self.part.getStep()
pli = step.pli
part.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.addPart) or (not redo and not self.addPart):
step.enablePLI()
pli.addPart(part)
part.isInPLI = True
else:
pli.removePart(part)
if pli.isEmpty():
step.disablePLI()
part.isInPLI = False
step.initLayout()
part.scene().emit(SIGNAL("layoutChanged()"))
class MovePartsToStepCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, partList, newStep):
QUndoCommand.__init__(self, "move Part to Step")
self.newStep = newStep
self.partListStepPairs = [(p, p.getStep()) for p in partList]
def doAction(self, redo):
step = self.newStep
step.scene().clearSelection()
step.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
redoSubmodelOrder = False
stepsToReset = set([step.number])
for part, oldStep in self.partListStepPairs:
if part.filename == 'arrow':
continue
startStep = oldStep if redo else step
endStep = step if redo else oldStep
part.setParentItem(None) # Temporarily set part's parent, so it doesn't get deleted by Qt
startStep.removePart(part)
endStep.addPart(part)
if part.isSubmodel:
redoSubmodelOrder = True
stepsToReset.add(oldStep.number)
if redoSubmodelOrder:
mainModel = step.getPage().instructions.mainModel
mainModel.reOrderSubmodelPages()
mainModel.syncPageNumbers()
step.scene().emit(SIGNAL("layoutChanged()"))
# Need to refresh each step between the lowest and highest numbers
if step.isInCallout():
step.parentItem().resetStepSet(min(stepsToReset), max(stepsToReset))
else:
step.getPage().submodel.resetStepSet(min(stepsToReset), max(stepsToReset))
class AddPartsToCalloutCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, callout, partList):
QUndoCommand.__init__(self, "Add Parts to Callout")
self.callout, self.partList = callout, partList
def doAction(self, redo):
self.callout.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
for part in self.partList:
if redo:
self.callout.addPart(part)
else:
self.callout.removePart(part)
self.callout.scene().emit(SIGNAL("layoutChanged()"))
self.callout.steps[-1].csi.resetPixmap()
self.callout.initLayout()
class RemovePartsFromCalloutCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, callout, partList):
QUndoCommand.__init__(self, "Remove Parts from Callout")
self.callout = callout
self.partStepList = [(part, part.calloutPart.getStep()) for part in partList]
def doAction(self, redo):
self.callout.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
for part, step in self.partStepList:
if redo:
self.callout.removePart(part)
else:
self.callout.addPart(part, step)
self.callout.scene().emit(SIGNAL("layoutChanged()"))
for step in self.callout.steps:
step.csi.resetPixmap()
self.callout.initLayout()
class MergeCalloutsCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, mainCallout, calloutList, mergeCallouts):
QUndoCommand.__init__(self, "%s Callouts" % ("Merge" if mergeCallouts else "Split"))
self.mainCallout, self.mergeCallouts = mainCallout, mergeCallouts
# Store the original {callout: merged callouts} configuration
self.calloutConfig = dict([(callout, tuple(callout.mergedCallouts)) for callout in calloutList])
self.originalMergedCallouts = tuple(self.mainCallout.mergedCallouts)
self.parent = mainCallout.parentItem()
def doAction(self, redo):
parent = self.parent
parent.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.mergeCallouts) or (not redo and not self.mergeCallouts):
for callout in self.calloutConfig.keys():
parent.removeCallout(callout)
self.mainCallout.mergeCallout(callout)
else:
for callout, mergeList in self.calloutConfig.items():
parent.addCallout(callout)
self.mainCallout.removeMergedCallout(callout)
callout.setMergedCallouts(list(mergeList))
self.mainCallout.setMergedCallouts(list(self.originalMergedCallouts))
parent.initLayout()
parent.scene().emit(SIGNAL("layoutChanged()"))
class SwitchToNextCalloutBase(QUndoCommand):
_id = getNewCommandID()
def __init__(self, callout, doSwitch):
QUndoCommand.__init__(self, "Switch to next Callout base")
self.callout, self.doSwitch = callout, doSwitch
self.parent = callout.parentItem()
def doAction(self, redo):
parent = self.parent
parent.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.doSwitch) or (not redo and not self.doSwitch):
newCallout = self.callout.mergedCallouts.pop(0)
parent.addCallout(newCallout)
newCallout.mergeCallout(self.callout, append = True)
else:
newCallout = self.callout.mergedCallouts.pop()
parent.addCallout(newCallout)
newCallout.mergeCallout(self.callout)
parent.removeCallout(self.callout)
self.callout.mergedCallouts = []
self.callout = newCallout
parent.initLayout()
parent.scene().emit(SIGNAL("layoutChanged()"))
class ChangeAnnotationPixmap(QUndoCommand):
_id = getNewCommandID()
def __init__(self, annotation, oldFilename, newFilename):
QUndoCommand.__init__(self, "change Annotation picture")
self.annotation, self.oldFilename, self.newFilename = annotation, oldFilename, newFilename
def doAction(self, redo):
filename = self.newFilename if redo else self.oldFilename
self.annotation.setPixmap(QPixmap(filename))
class ToggleAnnotationOrderCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, annotation, moveForward):
QUndoCommand.__init__(self, "move Annotation to %s" % ("Foreground" if moveForward else "Background"))
self.annotation, self.moveForward = annotation, moveForward
def doAction(self, redo):
moveForward = (redo and self.moveForward) or (not redo and not self.moveForward)
self.annotation.changeOrder(moveForward)
class ToggleStepNumbersCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, callout, enableNumbers):
QUndoCommand.__init__(self, "%s Step Numbers" % ("show" if enableNumbers else "hide"))
self.callout, self.enableNumbers = callout, enableNumbers
def doAction(self, redo):
self.callout.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.enableNumbers) or (not redo and not self.enableNumbers):
self.callout.enableStepNumbers()
else:
self.callout.disableStepNumbers()
self.callout.scene().emit(SIGNAL("layoutChanged()"))
self.callout.initLayout()
class ToggleCalloutQtyCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, callout, enableQty):
QUndoCommand.__init__(self, "%s Callout Quantity" % ("Show" if enableQty else "Hide"))
self.callout, self.enableQty = callout, enableQty
def doAction(self, redo):
self.callout.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.enableQty) or (not redo and not self.enableQty):
self.callout.setMergedQuantity()
else:
self.callout.removeQuantityLabel()
self.callout.scene().emit(SIGNAL("layoutChanged()"))
self.callout.initLayout()
class AdjustArrowLength(QUndoCommand):
_id = getNewCommandID()
def __init__(self, arrow, oldLength, newLength):
QUndoCommand.__init__(self, "change arrow length")
self.arrow, self.oldLength, self.newLength = arrow, oldLength, newLength
def doAction(self, redo):
length = self.newLength if redo else self.oldLength
self.arrow.setLength(length)
self.arrow.getCSI().resetPixmap()
class AdjustArrowRotation(QUndoCommand):
_id = getNewCommandID()
def __init__(self, arrow, oldRotation, newRotation):
QUndoCommand.__init__(self, "change arrow rotation")
self.arrow, self.oldRotation, self.newRotation = arrow, oldRotation, newRotation
def doAction(self, redo):
self.arrow.axisRotation = self.newRotation if redo else self.oldRotation
self.arrow.getCSI().resetPixmap()
class SetFontCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, labelList, newFont):
QUndoCommand.__init__(self, "Set Font")
self.newFont = newFont
self.labelList = zip(labelList, [i.font() for i in labelList])
def doAction(self, redo):
for label, oldFont in self.labelList:
label.setFont(self.newFont if redo else oldFont)
class ScaleItemCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, target, oldScale, newScale):
QUndoCommand.__init__(self, "Item Scale")
self.target, self.oldScale, self.newScale = target, oldScale, newScale
def doAction(self, redo):
self.target.scaling = self.newScale if redo else self.oldScale
self.target.resetPixmap()
self.target.getPage().initLayout()
class RotateItemCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, target, oldRotation, newRotation):
QUndoCommand.__init__(self, "Item rotation")
self.target, self.oldRotation, self.newRotation = target, oldRotation, newRotation
def doAction(self, redo):
self.target.rotation = list(self.newRotation) if redo else list(self.oldRotation)
self.target.resetPixmap()
self.target.getPage().initLayout()
class ScaleDefaultItemCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, templateItem, oldScale, newScale):
QUndoCommand.__init__(self, "Change default %s Scale" % templateItem.itemClassName)
self.templateItem = templateItem
self.oldScale, self.newScale = oldScale, newScale
def doAction(self, redo):
scale = self.newScale if redo else self.oldScale
self.templateItem.changeDefaultScale(scale)
self.resetGLItem(self.templateItem)
self.templateItem.update() # Need this to force full redraw
class RotateDefaultItemCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, templateItem, oldRotation, newRotation):
QUndoCommand.__init__(self, "Change default %s rotation" % templateItem.itemClassName)
self.templateItem = templateItem
self.oldRotation, self.newRotation = oldRotation, newRotation
def doAction(self, redo):
rotation = list(self.newRotation) if redo else list(self.oldRotation)
self.templateItem.changeDefaultRotation(rotation)
self.resetGLItem(self.templateItem)
class SetPageNumberPosCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, template, oldPos, newPos):
QUndoCommand.__init__(self, "change Page Number position")
self.template, self.oldPos, self.newPos = template, oldPos, newPos
def doAction(self, redo):
pos = self.newPos if redo else self.oldPos
self.template.setNumberItemPos(pos)
for page in self.template.instructions.getPageList():
page.resetPageNumberPosition()
class SetPageBackgroundColorCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, template, oldColor, newColor):
QUndoCommand.__init__(self, "change Page background")
self.template, self.oldColor, self.newColor = template, oldColor, newColor
def doAction(self, redo):
color = self.newColor if redo else self.oldColor
self.template.instructions.templateSettings.Page.backgroundColor = color
class SetPageBackgroundBrushCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, template, oldBrush, newBrush):
QUndoCommand.__init__(self, "change Page background")
self.template, self.oldBrush, self.newBrush = template, oldBrush, newBrush
def doAction(self, redo):
brush = self.newBrush if redo else self.oldBrush
self.template.instructions.templateSettings.Page.brush = brush
class SetPenCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, target, oldPen, newPen = None, penSetter = "setPen"):
QUndoCommand.__init__(self, "change Border")
self.target, self.oldPen, self.penSetter = target, oldPen, penSetter
self.newPen = newPen if newPen else target.pen()
def doAction(self, redo):
pen = self.newPen if redo else self.oldPen
self.target.__getattribute__(self.penSetter)(pen)
class SetBrushCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, target, oldBrush, newBrush = None, text = "change Fill"):
QUndoCommand.__init__(self, text)
self.target, self.oldBrush = target, oldBrush
self.newBrush = newBrush if newBrush else target.brush()
def doAction(self, redo):
brush = self.newBrush if redo else self.oldBrush
self.target.setBrush(brush)
class SetItemFontsCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, template, oldFont, newFont, target):
QUndoCommand.__init__(self, "change " + target + " font")
self.template, self.oldFont, self.newFont, self.target = template, oldFont, newFont, target
def doAction(self, redo):
font = self.newFont if redo else self.oldFont
if self.target == 'Page':
self.template.numberItem.setFont(font)
for page in self.template.instructions.getPageList():
page.numberItem.setFont(font)
elif self.target == 'Step':
self.template.steps[0].numberItem.setFont(font)
for page in self.template.instructions.getPageList():
for step in page.steps:
step.numberItem.setFont(font)
elif self.target == 'PLIItem':
for item in self.template.steps[0].pli.pliItems:
item.numberItem.setFont(font)
for page in self.template.instructions.getPageList():
for child in page.getAllChildItems():
if self.target == child.itemClassName:
child.numberItem.setFont(font)
elif self.target == 'GraphicsCircleLabelItem':
for item in self.template.steps[0].pli.pliItems:
if item.lengthIndicator:
item.lengthIndicator.setFont(font)
for page in self.template.instructions.getPageList():
for child in page.getAllChildItems():
if self.target == child.itemClassName:
child.setFont(font)
elif self.target == 'Submodel Quantity':
self.template.submodelItem.numberItem.setFont(font)
for page in self.template.instructions.getPageList():
if page.submodelItem and page.submodelItem.hasQuantity():
page.submodelItem.numberItem.setFont(font)
elif self.target == 'Callout Step':
self.template.steps[0].callouts[0].steps[0].numberItem.setFont(font)
for page in self.template.instructions.getPageList():
for step in page.steps:
for callout in step.callouts:
for step in callout.steps:
if step.numberItem is not None:
step.numberItem.setFont(font)
elif self.target == 'Callout Quantity':
self.template.steps[0].callouts[0].qtyLabel.setFont(font)
for page in self.template.instructions.getPageList():
for step in page.steps:
for callout in step.callouts:
if callout.qtyLabel is not None:
callout.qtyLabel.setFont(font)
class TogglePLIs(QUndoCommand):
_id = getNewCommandID()
def __init__(self, template, enablePLIs):
QUndoCommand.__init__(self, "%s PLIs" % ("Enable" if enablePLIs else "Remove"))
self.template, self.enablePLIs = template, enablePLIs
def doAction(self, redo):
self.template.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
if (redo and self.enablePLIs) or (not redo and not self.enablePLIs):
self.template.steps[0].enablePLI()
self.template.instructions.mainModel.showHidePLIs(True, True)
else:
self.template.steps[0].disablePLI()
self.template.instructions.mainModel.showHidePLIs(False, True)
self.template.scene().emit(SIGNAL("layoutChanged()"))
self.template.initLayout()
class ToggleCSIPartHighlightCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, state, target, templateCSI):
QUndoCommand.__init__(self, "Highlight Parts")
self.state, self.target, self.templateCSI = state, target, templateCSI
def doAction(self, redo):
self.target.highlightNewParts = redo and self.state
self.templateCSI.isDirty = True
self.templateCSI.getPage().instructions.setAllCSIDirty()
self.templateCSI.scene().update()
class ChangePartColorCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, part, oldColor, newColor):
QUndoCommand.__init__(self, "Change Part color")
self.part, self.oldColor, self.newColor = part, oldColor, newColor
def doAction(self, redo):
self.part.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
oldColor, newColor = (self.oldColor, self.newColor) if redo else (self.newColor, self.oldColor)
self.part.changeColor(newColor)
if self.part.getStep().pli:
self.part.getStep().pli.changePartColor(self.part, oldColor, newColor)
page = self.part.getPage()
page.instructions.updateMainModel()
page.updateSubmodel()
self.part.scene().emit(SIGNAL("layoutChanged()"))
class ChangeAbstractPartCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, part, newFilename):
QUndoCommand.__init__(self, "Change Part")
self.part, self.newFilename = part, newFilename
self.oldFilename = self.part.filename
def doAction(self, redo):
scene = self.part.scene()
scene.emit(SIGNAL("layoutAboutToBeChanged()"))
scene.clearSelection()
self.part.changeAbstractPart(self.newFilename if redo else self.oldFilename)
page = self.part.getPage()
page.instructions.updateMainModel()
page.updateSubmodel()
scene.emit(SIGNAL("layoutChanged()"))
page.initLayout()
scene.update()
class ChangePartPosRotCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, part, oldPos, newPos, oldRot, newRot):
QUndoCommand.__init__(self, "Change Part position")
self.part, self.oldPos, self.newPos = part, oldPos, newPos
self.oldRot, self.newRot = oldRot, newRot
def doAction(self, redo):
self.part.scene().emit(SIGNAL("layoutAboutToBeChanged()"))
pos = self.newPos if redo else self.oldPos
rot = self.newRot if redo else self.oldRot
self.part.changePosRot(pos, rot)
self.part.scene().emit(SIGNAL("layoutChanged()"))
page = self.part.getPage()
page.instructions.updateMainModel(False)
page.updateSubmodel()
class SubmodelToCalloutCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, submodel):
QUndoCommand.__init__(self, "Submodel To Callout")
self.submodel = submodel
self.parentModel = submodel._parent
def redo(self):
# Convert a Submodel into a Callout
self.targetStep = self.parentModel.findSubmodelStep(self.submodel)
instructions = self.targetStep.getPage().instructions
targetModel = self.submodel._parent
scene = self.targetStep.scene()
scene.clearSelection()
scene.emit(SIGNAL("layoutAboutToBeChanged()"))
self.targetCallout = self.targetStep.addBlankCalloutSignal(False, False)
# Find each instance of this submodel on the target page
self.submodelInstanceList = []
self.addedParts = []
for part in self.targetStep.csi.getPartList():
if part.abstractPart == self.submodel:
part.setParentItem(None) # Temporarily set part's parent, so it doesn't get deleted by Qt
self.targetStep.removePart(part)
targetModel.parts.remove(part)
self.submodelInstanceList.append(part)
calloutDone = False
for submodelPart in self.submodelInstanceList:
for page in self.submodel.pages:
for step in page.steps:
for part in step.csi.getPartList():
newPart = part.duplicate()
originalMatrix = newPart.matrix
newPart.matrix = LicHelpers.multiplyMatrices(newPart.matrix, submodelPart.matrix)
self.addedParts.append(newPart)
targetModel.parts.append(newPart)
self.targetStep.addPart(newPart)
if not calloutDone:
calloutPart = newPart.duplicate()
calloutPart.matrix = list(originalMatrix)
self.targetCallout.addPart(calloutPart)
if step != page.steps[-1] and not calloutDone:
self.targetCallout.addBlankStep(False)
calloutDone = True
if len(self.submodelInstanceList) > 1:
self.targetCallout.setQuantity(len(self.submodelInstanceList))
for step in self.targetCallout.steps:
step.csi.resetPixmap()
self.targetStep.initLayout()
self.targetCallout.initLayout()
self.parentModel.removeSubmodel(self.submodel)
instructions.partDictionary.pop(self.submodel.filename)
scene.emit(SIGNAL("layoutChanged()"))
instructions.updateMainModel()
scene.selectPage(self.targetStep.parentItem().number)
self.targetCallout.setSelected(True)
scene.emit(SIGNAL("sceneClick"))
def undo(self):
# Convert a Callout into a Submodel
# For now, assume this really is an undo, and we have a fully defined self.submodel, targetStep and targetCallout
scene = self.targetStep.scene()
scene.clearSelection()
scene.emit(SIGNAL("layoutAboutToBeChanged()"))
for part in self.addedParts:
self.targetStep.removePart(part)
for submodel in self.submodelInstanceList:
self.targetStep.addPart(submodel)
self.parentModel.addSubmodel(self.submodel)
self.targetStep.removeCallout(self.targetCallout)
self.targetStep.initLayout()
scene.emit(SIGNAL("layoutChanged()"))
scene.selectPage(self.submodel.pages[0].number)
self.submodel.pages[0].setSelected(True)
scene.emit(SIGNAL("sceneClick"))
class CalloutToSubmodelCommand(SubmodelToCalloutCommand):
_id = getNewCommandID()
def __init__(self, callout):
QUndoCommand.__init__(self, "Callout To Submodel")
self.callout = callout
def redo(self):
callout = self.callout
scene = callout.scene()
scene.clearSelection()
scene.emit(SIGNAL("layoutAboutToBeChanged()"))
partList = callout.getOriginalPartList()
self.targetStep = callout.parentItem()
self.parentModel = callout.getPage().parent()
submodel = callout.createBlankSubmodel()
submodel.appendBlankPage()
for part in partList:
submodel.parts.append(part)
submodel.pages[0].steps[0].addPart(part)
self.targetStep.removePart(part)
submodel.addInitialPagesAndSteps()
submodel.mergeInitialPages()
if submodel.glDispID == LicGLHelpers.UNINIT_GL_DISPID:
submodel.createGLDisplayList()
# submodel.resetPixmap(callout.getContext(), True)
self.newPart = submodel.createBlankPart()
self.newPart.abstractPart = submodel
self.targetStep.addPart(self.newPart)
self.parentModel.addSubmodel(submodel)
self.targetStep.removeCallout(callout)
self.targetStep.initLayout()
self.submodel = submodel
scene.emit(SIGNAL("layoutChanged()"))
scene.selectPage(submodel.pages[0].number)
submodel.pages[0].setSelected(True)
scene.emit(SIGNAL("sceneClick"))
def undo(self):
scene = self.targetStep.scene()
scene.clearSelection()
scene.emit(SIGNAL("layoutAboutToBeChanged()"))
self.targetStep.removePart(self.newPart)
self.parentModel.removeSubmodel(self.submodel)
for part in self.submodel.parts:
self.targetStep.addPart(part)
self.targetStep.addCallout(self.callout)
self.targetStep.initLayout()
self.callout.initLayout()
scene.emit(SIGNAL("layoutChanged()"))
scene.selectPage(self.targetStep.parentItem().number)
self.callout.setSelected(True)
scene.emit(SIGNAL("sceneClick"))
class SubmodelToFromSubAssembly(QUndoCommand):
_id = getNewCommandID()
def __init__(self, submodel, submodelToAssembly):
text = "Submodel to Sub Assembly" if submodelToAssembly else "Sub Assembly to Submodel"
QUndoCommand.__init__(self, text)
self.submodel, self.submodelToAssembly = submodel, submodelToAssembly
def doAction(self, redo):
self.submodel.isSubAssembly = not self.submodel.isSubAssembly
do = (redo and self.submodelToAssembly) or (not redo and not self.submodelToAssembly)
self.submodel.showHidePLIs(not do)
submodelItem = self.submodel.pages[0].submodelItem
submodelItem.convertToSubAssembly() if do else submodelItem.convertToSubmodel()
class ClonePageStepsFromSubmodel(QUndoCommand):
_id = getNewCommandID()
def __init__(self, targetSubmodel, destinationSubmodel):
QUndoCommand.__init__(self, "clone Submodel Pages and Steps")
self.target, self.destination = targetSubmodel, destinationSubmodel
self.originalPageList = []
for page in self.destination.pages:
self.originalPageList.append((page, page._row, page._number))
self.partPageStepList = []
for part in self.destination.parts:
pageNumber, stepNumber = part.getCSI().getPageStepNumberPair()
self.partPageStepList.append((part, pageNumber, stepNumber))
def redo(self):
dest = self.destination
scene = dest.instructions.scene
scene.emit(SIGNAL("layoutAboutToBeChanged()"))
# Remove all Pages and Steps from destination submodel
for page in list(dest.pages):
dest.deletePage(page)
# Now have totally empty dest, and submodel with lots of pages & steps
# Add the right number of blank pages and steps
for page in self.target.pages:
dest.appendBlankPage()
dest.pages[-1].layout.orientation = page.layout.orientation
for step in page.steps[1:]: # skip first Step because appendBlankPage() adds one Step automatically
dest.pages[-1].addBlankStep()
currentStep = dest.pages[0].steps[0]
nextStep = currentStep.getNextStep()
# Copy all parts in dest submodel to its first CSI
for part in dest.parts:
currentStep.addPart(part)
for page in self.target.pages:
for step in page.steps:
if step is self.target.pages[-1].steps[-1]:
break # At last step: done
# Remove all parts in submodel's current Step from the list of parts to be moved to next step
partList = currentStep.csi.getPartList()
for part in step.csi.getPartList():
matchList = [(p.getPositionMatch(part), p) for p in partList if p.color == part.color and p.filename == part.filename]
if matchList:
partList.remove(max(matchList)[1])
else: # Try finding a match by ignoring color
matchList = [(p.getPositionMatch(part), p) for p in partList if p.filename == part.filename]
if matchList:
partList.remove(max(matchList)[1]) # no match list means submodel has part not in dest, which we ignore utterly, which is fine
for part in partList: # Move all parts to the next step
part.setParentItem(nextStep)
currentStep.removePart(part)
nextStep.addPart(part)
if currentStep.isEmpty(): # Check if any part are left
currentStep.parentItem().removeStep(currentStep)
currentStep = nextStep
nextStep = nextStep.getNextStep()
if self.target.pages[0].submodelItem:
dest.pages[0].addSubmodelImage()
for page in dest.pages:
for step in page.steps:
step.csi.isDirty = True
page.initLayout()
dest.instructions.mainModel.syncPageNumbers()
scene.emit(SIGNAL("layoutChanged()"))
def undo(self):
dest = self.destination
scene = dest.instructions.scene
scene.emit(SIGNAL("layoutAboutToBeChanged()"))
for part in dest.parts:
part.setParentItem(None) # About to delete the pages these parts live on, so change parents so Qt doesn't delete them
for page in list(dest.pages):
dest.deletePage(page)
for page, row, number in self.originalPageList:
page._row, page.number = row, number
dest.addPage(page)
for part, pageNumber, stepNumber in self.partPageStepList:
page = dest.getPage(pageNumber)
csi = page.getStepByNumber(stepNumber).csi
csi.addPart(part)
scene.fullItemSelectionUpdate(dest.pages[0])
scene.emit(SIGNAL("layoutChanged()"))
class ChangeLightingCommand(QUndoCommand):
_id = getNewCommandID()
def __init__(self, scene, oldValues):
QUndoCommand.__init__(self, "Change 3D Lighting")
self.scene, self.oldValues = scene, oldValues
self.newValues = LicGLHelpers.getLightParameters()
def doAction(self, redo):
values = self.newValues if redo else self.oldValues
LicGLHelpers.setLightParameters(*values)
if self.scene.currentPage:
self.scene.currentPage.update()
|
2013Commons/hue | refs/heads/master | desktop/core/ext-py/Django-1.4.5/tests/modeltests/m2m_through/models.py | 43 | from datetime import datetime
from django.db import models
# M2M described on one of the models
class Person(models.Model):
name = models.CharField(max_length=128)
class Meta:
ordering = ('name',)
def __unicode__(self):
return self.name
class Group(models.Model):
name = models.CharField(max_length=128)
members = models.ManyToManyField(Person, through='Membership')
custom_members = models.ManyToManyField(Person, through='CustomMembership', related_name="custom")
nodefaultsnonulls = models.ManyToManyField(Person, through='TestNoDefaultsOrNulls', related_name="testnodefaultsnonulls")
class Meta:
ordering = ('name',)
def __unicode__(self):
return self.name
class Membership(models.Model):
person = models.ForeignKey(Person)
group = models.ForeignKey(Group)
date_joined = models.DateTimeField(default=datetime.now)
invite_reason = models.CharField(max_length=64, null=True)
class Meta:
ordering = ('date_joined', 'invite_reason', 'group')
def __unicode__(self):
return "%s is a member of %s" % (self.person.name, self.group.name)
class CustomMembership(models.Model):
person = models.ForeignKey(Person, db_column="custom_person_column", related_name="custom_person_related_name")
group = models.ForeignKey(Group)
weird_fk = models.ForeignKey(Membership, null=True)
date_joined = models.DateTimeField(default=datetime.now)
def __unicode__(self):
return "%s is a member of %s" % (self.person.name, self.group.name)
class Meta:
db_table = "test_table"
class TestNoDefaultsOrNulls(models.Model):
person = models.ForeignKey(Person)
group = models.ForeignKey(Group)
nodefaultnonull = models.CharField(max_length=5)
class PersonSelfRefM2M(models.Model):
name = models.CharField(max_length=5)
friends = models.ManyToManyField('self', through="Friendship", symmetrical=False)
def __unicode__(self):
return self.name
class Friendship(models.Model):
first = models.ForeignKey(PersonSelfRefM2M, related_name="rel_from_set")
second = models.ForeignKey(PersonSelfRefM2M, related_name="rel_to_set")
date_friended = models.DateTimeField()
|
paultcochrane/bokeh | refs/heads/master | examples/plotting/file/image_rgba.py | 45 | from __future__ import division
import numpy as np
from bokeh.plotting import figure, show, output_file
N = 20
img = np.empty((N,N), dtype=np.uint32)
view = img.view(dtype=np.uint8).reshape((N, N, 4))
for i in range(N):
for j in range(N):
view[i, j, 0] = int(i/N*255)
view[i, j, 1] = 158
view[i, j, 2] = int(j/N*255)
view[i, j, 3] = 255
output_file("image_rgba.html", title="image_rgba.py example")
p = figure(x_range=[0,10], y_range=[0,10])
p.image_rgba(image=[img], x=[0], y=[0], dw=[10], dh=[10])
show(p) # open a browser
|
partofthething/home-assistant | refs/heads/dev | homeassistant/components/joaoapps_join/__init__.py | 2 | """Support for Joaoapps Join services."""
import logging
from pyjoin import (
get_devices,
ring_device,
send_file,
send_notification,
send_sms,
send_url,
set_wallpaper,
)
import voluptuous as vol
from homeassistant.const import CONF_API_KEY, CONF_DEVICE_ID, CONF_NAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = "joaoapps_join"
CONF_DEVICE_IDS = "device_ids"
CONF_DEVICE_NAMES = "device_names"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_DEVICE_ID): cv.string,
vol.Optional(CONF_DEVICE_IDS): cv.string,
vol.Optional(CONF_DEVICE_NAMES): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
],
)
},
extra=vol.ALLOW_EXTRA,
)
def register_device(hass, api_key, name, device_id, device_ids, device_names):
"""Register services for each join device listed."""
def ring_service(service):
"""Service to ring devices."""
ring_device(
api_key=api_key,
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
)
def set_wallpaper_service(service):
"""Service to set wallpaper on devices."""
set_wallpaper(
api_key=api_key,
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
url=service.data.get("url"),
)
def send_file_service(service):
"""Service to send files to devices."""
send_file(
api_key=api_key,
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
url=service.data.get("url"),
)
def send_url_service(service):
"""Service to open url on devices."""
send_url(
api_key=api_key,
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
url=service.data.get("url"),
)
def send_tasker_service(service):
"""Service to open url on devices."""
send_notification(
api_key=api_key,
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
text=service.data.get("command"),
)
def send_sms_service(service):
"""Service to send sms from devices."""
send_sms(
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
sms_number=service.data.get("number"),
sms_text=service.data.get("message"),
api_key=api_key,
)
hass.services.register(DOMAIN, f"{name}ring", ring_service)
hass.services.register(DOMAIN, f"{name}set_wallpaper", set_wallpaper_service)
hass.services.register(DOMAIN, f"{name}send_sms", send_sms_service)
hass.services.register(DOMAIN, f"{name}send_file", send_file_service)
hass.services.register(DOMAIN, f"{name}send_url", send_url_service)
hass.services.register(DOMAIN, f"{name}send_tasker", send_tasker_service)
def setup(hass, config):
"""Set up the Join services."""
for device in config[DOMAIN]:
api_key = device.get(CONF_API_KEY)
device_id = device.get(CONF_DEVICE_ID)
device_ids = device.get(CONF_DEVICE_IDS)
device_names = device.get(CONF_DEVICE_NAMES)
name = device.get(CONF_NAME)
name = f"{name.lower().replace(' ', '_')}_" if name else ""
if api_key:
if not get_devices(api_key):
_LOGGER.error("Error connecting to Join, check API key")
return False
if device_id is None and device_ids is None and device_names is None:
_LOGGER.error(
"No device was provided. Please specify device_id"
", device_ids, or device_names"
)
return False
register_device(hass, api_key, name, device_id, device_ids, device_names)
return True
|
balloob/home-assistant | refs/heads/dev | homeassistant/components/homekit/aidmanager.py | 12 | """
Manage allocation of accessory ID's.
HomeKit needs to allocate unique numbers to each accessory. These need to
be stable between reboots and upgrades.
Using a hash function to generate them means collisions. It also means you
can't change the hash without causing breakages for HA users.
This module generates and stores them in a HA storage.
"""
import random
from fnvhash import fnv1a_32
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_registry import RegistryEntry
from homeassistant.helpers.storage import Store
from .util import get_aid_storage_filename_for_entry_id
AID_MANAGER_STORAGE_VERSION = 1
AID_MANAGER_SAVE_DELAY = 2
ALLOCATIONS_KEY = "allocations"
UNIQUE_IDS_KEY = "unique_ids"
INVALID_AIDS = (0, 1)
AID_MIN = 2
AID_MAX = 18446744073709551615
def get_system_unique_id(entity: RegistryEntry):
"""Determine the system wide unique_id for an entity."""
return f"{entity.platform}.{entity.domain}.{entity.unique_id}"
def _generate_aids(unique_id: str, entity_id: str) -> int:
"""Generate accessory aid."""
if unique_id:
# Use fnv1a_32 of the unique id as
# fnv1a_32 has less collisions than
# adler32
yield fnv1a_32(unique_id.encode("utf-8"))
# If there is no unique id we use
# fnv1a_32 as it is unlikely to collide
yield fnv1a_32(entity_id.encode("utf-8"))
# If called again resort to random allocations.
# Given the size of the range its unlikely we'll encounter duplicates
# But try a few times regardless
for _ in range(5):
yield random.randrange(AID_MIN, AID_MAX)
class AccessoryAidStorage:
"""
Holds a map of entity ID to HomeKit ID.
Will generate new ID's, ensure they are unique and store them to make sure they
persist over reboots.
"""
def __init__(self, hass: HomeAssistant, entry: ConfigEntry):
"""Create a new entity map store."""
self.hass = hass
self.allocations = {}
self.allocated_aids = set()
self._entry = entry
self.store = None
self._entity_registry = None
async def async_initialize(self):
"""Load the latest AID data."""
self._entity_registry = (
await self.hass.helpers.entity_registry.async_get_registry()
)
aidstore = get_aid_storage_filename_for_entry_id(self._entry)
self.store = Store(self.hass, AID_MANAGER_STORAGE_VERSION, aidstore)
raw_storage = await self.store.async_load()
if not raw_storage:
# There is no data about aid allocations yet
return
self.allocations = raw_storage.get(ALLOCATIONS_KEY, {})
self.allocated_aids = set(self.allocations.values())
def get_or_allocate_aid_for_entity_id(self, entity_id: str):
"""Generate a stable aid for an entity id."""
entity = self._entity_registry.async_get(entity_id)
if not entity:
return self._get_or_allocate_aid(None, entity_id)
sys_unique_id = get_system_unique_id(entity)
return self._get_or_allocate_aid(sys_unique_id, entity_id)
def _get_or_allocate_aid(self, unique_id: str, entity_id: str):
"""Allocate (and return) a new aid for an accessory."""
if unique_id and unique_id in self.allocations:
return self.allocations[unique_id]
if entity_id in self.allocations:
return self.allocations[entity_id]
for aid in _generate_aids(unique_id, entity_id):
if aid in INVALID_AIDS:
continue
if aid not in self.allocated_aids:
# Prefer the unique_id over the entitiy_id
storage_key = unique_id or entity_id
self.allocations[storage_key] = aid
self.allocated_aids.add(aid)
self.async_schedule_save()
return aid
raise ValueError(
f"Unable to generate unique aid allocation for {entity_id} [{unique_id}]"
)
def delete_aid(self, storage_key: str):
"""Delete an aid allocation."""
if storage_key not in self.allocations:
return
aid = self.allocations.pop(storage_key)
self.allocated_aids.discard(aid)
self.async_schedule_save()
@callback
def async_schedule_save(self):
"""Schedule saving the entity map cache."""
self.store.async_delay_save(self._data_to_save, AID_MANAGER_SAVE_DELAY)
async def async_save(self):
"""Save the entity map cache."""
return await self.store.async_save(self._data_to_save())
@callback
def _data_to_save(self):
"""Return data of entity map to store in a file."""
return {ALLOCATIONS_KEY: self.allocations}
|
dgary50/eovsa | refs/heads/master | test_svn/udb_util.py | 1 | '''Reads and averages UDB, IDB files, hacked from
read_idb_.py. Replaces dump_tsys_ext.py. New version feb 2 2017, This
creates UDB files with the same variables as in IDB files (xsampler,
ysampler) rather than (xtyys, ytsys)'''
#jmm, 2017-02-03
#
# dg, 2017-08-30 -- Changes to allow these routines to work on IDB or UDB
# reprocessing via calls to readXdata() followed by udb_write()
# dg, 2021-06-01 -- It seems the IDB files occasionally have time glitches where
# the time jumps back, I think due to resetting the network every
# 15 minutes. Now readXdata() just skips such records.
#needed for file creation
import time, os
import aipy
from astropy.io import fits
from util import Time
import numpy as np
#data is a masked array
import numpy.ma as ma
#pcapture2 gives us a baseline array
import pcapture2 as p
#eovsa_lst gives LST if it isn't present in the file
import eovsa_lst as el
#copy is used for filter option in idb_read
import copy
#to strip non-printable characters from antenna list
def strip_non_printable(string_in):
''' Only allow certain printable ascii characters to get through
to fits write routine'''
#Keep values of 32 to 127
stripped = (c for c in string_in if 31 < ord(c) < 127)
return ''.join(stripped)
#End strip_non_printable
def avXdata(x, nsec=60):
'''Averages UDB data over nsec seconds. '''
# The input should be output from read_udb.readXdata
one_day = 24.0*3600.0
t = x['time']
ntimes = len(t)
dt = t[1::]-t[0:ntimes-1]
dtsec = int(round(np.median(dt)*one_day))
if dtsec >= nsec:
print 'avXdata: Averaging time is too short, returning'
return x
#endif
#time in seconds from the start
#need a loop for some python reason
tsec = t-t[0]
for j in range(ntimes):
tsec[j] = int(round(tsec[j]*one_day))
#endfor
#we'll create time bin edges here
dtsec_all = int(np.max(tsec))
nnew = 2+dtsec_all/nsec
tsec_new = np.arange(0, nnew*nsec, nsec)
#one less time than the bin edges
ntnew = len(tsec_new)-1
#define the output
xx_shape = np.shape(x['x'])
nf = xx_shape[0]
nblc = xx_shape[1]
npol = xx_shape[2]
ntimes1 = xx_shape[3]
#nf = np.size(x['x'][:,0,0,0])
#nblc = np.size(x['x'][0,:,0,0])
#npol = np.size(x['x'][0,0,:,0])
outx0 = np.zeros((nf, nblc, npol, ntnew),dtype=np.complex64)
omask = np.zeros((nf, nblc, npol, ntnew),dtype=np.int32)
outx = ma.masked_array(outx0, mask = omask)
nantsnf3 = np.size(x['px'][:,0])
outpx = np.zeros((nantsnf3, ntnew), dtype=np.float32)
outpy = np.zeros((nantsnf3, ntnew), dtype=np.float32)
uvwarray = np.zeros((3, nblc, ntnew), dtype=np.float)
lstarray = np.zeros(ntnew, dtype=np.float)
nants = np.size(x['delay'][:,0])
delayarray = np.zeros((nants, ntnew), dtype=np.float)
utarray = np.zeros(ntnew, dtype=np.float)
nsjarray = np.zeros(ntnew, dtype=np.int32)
#step through and average, jc is the time bin that we're working
#on, njc is the number of shorter intervals inside of this interval
jc = 0
njc = 0
for j in range(ntnew):
tj = tsec_new[j]
tj1 = tsec_new[j+1]
ssj = []
ssj0 = []
for i in range(ntimes):
if tsec[i] == tj:
ssj0.append(i)
if tsec[i] >= tj and tsec[i] < tj1:
ssj.append(i)
#endif
#endfor
# print j, ssj0
# print ssj
if len(ssj) > 0:
nsj = float(len(ssj))
nsjarray[j] = nsj
xxj = x['x'][:,:,:,ssj]
outx[:,:,:,j]=ma.average(xxj, axis=3)
pxj = x['px'][:,ssj]
outpx[:,j]=np.sum(pxj, axis=1)
pyj = x['py'][:,ssj]
outpy[:,j]=np.sum(pyj, axis=1)
#endif
#endfor
#time arrays, delays and uvw are interpolated: to interval center times
tsec_mid = (tsec_new[1::]+tsec_new[0:ntnew])*0.5
lstarray = np.interp(tsec_mid, tsec, x['lst'])
utarray = np.interp(tsec_mid, tsec, x['ut'])
for k in range(nants):
delayk = x['delay'][k,:]
delayarray[k, :] = np.interp(tsec_mid, tsec, delayk)
#endfor
for k in range(3):
for i in range(nblc):
uvwki = x['uvw'][k,i,:]
uvwarray[k,i,:] = np.interp(tsec_mid, tsec, uvwki)
#endfor
#endfor
tnew = t[0]+tsec_mid/one_day
out = {'x':outx,'uvw':uvwarray,'time':tnew,'px':outpx,'py':outpy,'i0':x['i0'],
'j0':x['j0'],'lst':lstarray,'pol':x['pol'],'delay':delayarray,'ut':utarray,
'file0':x['file0'], 'nsamples':nsjarray,'fghz':x['fghz']}
return out
#END of avXdata
def udbfile_write(y, ufile_in, ufilename):
'''Read in a UDB dataset average in time and write out the file. Y is
the output from avXdata or readXdata, ufile_in is the input
filename (needed for source, scan, etc...), ufilename is the
output filename.
'''
if len(y) == 0:
print 'udbfile_write: No data input'
return []
#endif
if len(ufile_in) == 0:
print 'udbfile_write: No file input'
return []
#endif
if len(ufilename) == 0:
print 'udbfile_write: No output file'
return []
#endif
# Ready to output
# Open the file and use that to replicate the NRV
# (non-record-variable) variables
uv = aipy.miriad.UV(ufile_in)
src = uv['source']
scanid = uv['scanid']
nants = uv['nants']
# The assumption here is that all the variables are going to be
# there since it's already been processed
uvout = aipy.miriad.UV(ufilename, 'new')
uvout.add_var('name', 'a')
uvout['name'] = strip_non_printable(ufilename)
nrv_varlist_string = ['telescop', 'project', 'operator', 'version',
'source', 'scanid', 'proj', 'antlist', 'obstype']
for j in range(len(nrv_varlist_string)):
uvout.add_var(nrv_varlist_string[j], 'a')
uvout[nrv_varlist_string[j]] = strip_non_printable(uv[nrv_varlist_string[j]])
#endfor
nrv_varlist_int = ['nants', 'npol']
for j in range(len(nrv_varlist_int)):
uvout.add_var(nrv_varlist_int[j], 'i')
uvout[nrv_varlist_int[j]] = uv[nrv_varlist_int[j]]
#endfor
nrv_varlist_rl = ['vsource', 'veldop', 'epoch']
for j in range(len(nrv_varlist_rl)):
uvout.add_var(nrv_varlist_rl[j], 'r')
uvout[nrv_varlist_rl[j]] = uv[nrv_varlist_rl[j]]
#endfor
nrv_varlist_rl8 = ['freq', 'restfreq', 'antpos', 'ra', 'dec', 'obsra', 'obsdec']
for j in range(len(nrv_varlist_rl8)):
uvout.add_var(nrv_varlist_rl8[j], 'd')
uvout[nrv_varlist_rl8[j]] = uv[nrv_varlist_rl8[j]]
#endfor
#sfreq, sdf, nchan, nspect don't change
sfreq_in = uv['sfreq']
na = len(sfreq_in)
#add these vars
uvout.add_var('nspect', 'i')
uvout['nspect'] = uv['nspect']
uvout.add_var('sfreq', 'd')
uvout['sfreq'] = sfreq_in
uvout.add_var('sdf', 'd')
uvout['sdf'] = uv['sdf']
#spectral windows
nschan = np.ones(na, dtype=np.int32)
ischan = np.arange(na, dtype=np.int32)+1
uvout.add_var('nschan', 'i')
uvout['nschan'] = nschan
uvout.add_var('ischan', 'i')
uvout['ischan'] = ischan
#add a variable for ntimes
uvout.add_var('ntimes', 'i')
ntimes = len(y['time'])
uvout['ntimes'] = ntimes
# If input has nsamples key for the number of 1-s samples in each interval,
# add a variable for that, and one for integration time
if 'nsamples' in y.keys():
#Add a variable for delta_time in seconds
dtsec = np.float(np.max(y['nsamples'])) # This will typically be 60.0
uvout.add_var('inttime', 'r')
uvout['inttime'] = dtsec
uvout.add_var('nsamples', 'i')
# Array of number of 1-s samples in each interval
uvout['nsamples'] = y['nsamples']
#define the record variables here
uvout.add_var('ut', 'd')
uvout.add_var('lst', 'd')
uvout.add_var('xsampler', 'r')
uvout.add_var('ysampler', 'r')
uvout.add_var('delay', 'd')
uvout.add_var('pol', 'i')
#Need version info here
version = "3.0"
#Loop through times and add the other variables
yy_shape = np.shape(y['x'])
nf = yy_shape[0]
nblc = yy_shape[1]
npol = yy_shape[2]
ntimes1 = yy_shape[3]
for j in range(ntimes):
tj = y['time'][j]
utj = y['ut'][j]
lstj = y['lst'][j]
#odd things happen to xsampler, ysampler
pxj = np.zeros(3*nants*nf, dtype = np.float32)
pyj = np.zeros(3*nants*nf, dtype = np.float32)
for k in range(3*nants*nf):
pxj[k] = y['px'][k,j]
pyj[k] = y['py'][k,j]
#endfor
dj = y['delay'][:,j]
#xsampler
uvout['ut'] = utj
uvout['lst'] = lstj
uvout['xsampler'] = pxj
#ysampler
uvout['ut'] = utj
uvout['lst'] = lstj
uvout['ysampler'] = pyj
#delays
uvout['delay'] = dj
#for each polarization:
for k in range(npol):
uvout['pol'] = y['pol'][k]
uvout['ut'] = utj
uvout['lst'] = lstj
#for each baseline
for i in range(nblc):
uvwij = y['uvw'][:,i,j]
i0i = y['i0'][i]
j0i = y['j0'][i]
#this may work
preamble = uvwij, tj, (i0i, j0i)
data = y['x'][:,i,k,j]
uvout.write(preamble, data)
#endfor (baseline)
#endfor (polarization)
#endfor (time)
del(uv) #done
return ufilename
#End of udbfile_write
def readXdata(filename, filter=False):
'''This routine reads the data from a single IDB or UDB file.
Optional Keywords: filter boolean--if True, returns only
non-zero frequencies if False (default), returns all
frequencies. This differs from Dale's version in that it
includes all correlations, drops the tp_only option, and the
outputs that are not in the UDB files.
'''
# Open uv file for reading
uv = aipy.miriad.UV(filename)
# Read all to get a time array
utcount = 0
ut = 0.0
# skip out bad Miriad files
try:
for preamble, data in uv.all():
# Look for time change
if preamble[1] != ut:
ut = preamble[1]
utcount = utcount+1
#endif
#endfor
uv.rewind()
except:
print "UDB_UTIL.READXDATA: Bad File: "+filename
return []
#endexcept
nf_orig = len(uv['sfreq'])
good_idx = np.arange(nf_orig)
if filter:
good_idx = []
# Read a bunch of records to get number of good frequencies,
# i.e. those with at least some non-zero data. Read 20
# records for baseline 1-2, XX pol
uv.select('antennae',0,2,include=True)
uv.select('polarization',-5,-5,include=True)
for i in range(20):
preamble, data = uv.read()
idx, = data.nonzero()
if len(idx) > len(good_idx):
good_idx = copy.copy(idx)
uv.select('clear',0,0)
uv.rewind()
#endif
#set up outputs
nf = len(good_idx)
print 'NF: ', nf
freq = uv['sfreq'][good_idx]
npol = uv['npol']
polarr = np.array([-5, -6, -7, -8])
nants = uv['nants']
if 'nsamples' in uv.vartable:
nsamples = uv['nsamples']
else:
nsamples = None
nbl = nants*(nants-1)/2
nblc = nbl+nants
# all-correlations, add a mask for the output vis array
outx0 = np.zeros((nf, nblc, npol, utcount),dtype=np.complex64)
omask = np.zeros((nf, nblc, npol, utcount),dtype=np.int32)
outx = ma.masked_array(outx0, mask = omask)
i0array = np.zeros((nblc, utcount), dtype = np.int32)
j0array = np.zeros((nblc, utcount), dtype = np.int32)
outpx = np.zeros((3*nf*nants, utcount), dtype=np.float)
outpy = np.zeros((3*nf*nants, utcount), dtype=np.float)
uvwarray = np.zeros((3, nblc, utcount), dtype=np.float)
delayarray = np.zeros((nants, utcount), dtype=np.float)
#lists for time arrays
utarray = []
timearray = []
lstarray = []
l = -1
tprev = 0
tsav = 0
# Use antennalist if available
if 'antlist' in uv.vartable:
ants = strip_non_printable(uv['antlist'])
antlist = map(int, ants.split())
else:
antlist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]
#endelse
#keep autocorrelations in the array
bl2ord = p.bl_list()
for ij in range(len(antlist)):
bl2ord[ij, ij] = nbl+ij
#endfor
for preamble, data in uv.all():
uvw, t, (i0,j0) = preamble
i = antlist.index(i0+1)
j = antlist.index(j0+1)
if i > j:
# Reverse order of indices
j = antlist.index(i0+1)
i = antlist.index(j0+1)
#endif
# Assumes uv['pol'] is one of -5, -6, -7, -8
k = -5 - uv['pol']
if filter:
if len(data.nonzero()[0]) == nf and uv['ut'] > 0:
if t != tprev:
# New time
if t < tprev:
# Some kind of glitch, so skip this record
continue
l += 1
if l == utcount:
break
#endif
tprev = t
timearray.append(t)
utarray.append(uv['ut'])
try:
lstarray.append(uv['lst'])
except:
pass
#endexcept
xdata0 = uv['xsampler'].reshape(nf_orig,nants,3)
xdata = xdata0[good_idx, :, :]
outpx[:,l] = xdata.reshape(nf*nants*3)
ydata0 = uv['ysampler'].reshape(nf_orig,nants,3)
ydata = ydata0[good_idx, :, :]
outpy[:,l] = ydata.reshape(nf*nants*3)
delayarray[:,l] = uv['delay']
#endif
outx[:, bl2ord[i,j],k,l] = data[data.nonzero()]
if k == 3:
uvwarray[:, bl2ord[i,j],l] = uvw
i0array[bl2ord[i,j],l] = i0
j0array[bl2ord[i,j],l] = j0
#endif
#endif, for len(data.nonzero()) == nf and uv['ut'] > 0
else:
if uv['ut'] > 0:
if t != tprev:
# New time
if t < tprev:
# Some kind of glitch, so skip this record
continue
l += 1
if l == utcount:
break
#endif
tprev = t
timearray.append(t)
utarray.append(uv['ut'])
try:
lstarray.append(uv['lst'])
except:
pass
#endexcept
xdata = uv['xsampler']
outpx[:,l] = xdata
ydata = uv['ysampler']
outpy[:,l] = ydata
delayarray[:,l] = uv['delay']
#endif
outx[:,bl2ord[i,j],k,l] = data
if k == 3:
uvwarray[:,bl2ord[i,j],l] = uvw
i0array[bl2ord[i,j],l] = i0
j0array[bl2ord[i,j],l] = j0
#endif
#endif (uv['ut'] > 0)
#endelse (not filter)
#endfor
# Truncate in case of early end of data, return if there is no good data
nt = len(timearray)
if nt == 0:
out = []
else:
outpx = outpx[:,:nt]
outpy = outpy[:,:nt]
outx = outx[:,:,:,:nt]
uvwarray = uvwarray[:, :, :nt]
delayarray = delayarray[:, :nt]
if len(lstarray) != 0:
pass
else:
tarray = Time(timearray,format='jd')
for t in tarray:
lstarray.append(el.eovsa_lst(t))
#endfor
#endelse
#i0 and j0 should always be the same
i0array = i0array[:,0]
j0array = j0array[:,0]
#timearray, lstarray and utarray are lists
out = {'x':outx,'uvw':uvwarray,'time':np.array(timearray),'px':outpx,'py':outpy,
'i0':i0array,'j0':j0array,'lst':np.array(lstarray),'pol':polarr,
'delay':delayarray,'ut':np.array(utarray),'file0':filename,'fghz':freq}
if nsamples is None:
pass
else:
out.update({'nsamples':nsamples})
#endelse
return out
#end of readXdata
def concatXdata(x0, x):
''' Concatenates readXdata outputs'''
#check for ok variable
try:
x0
except:
print 'udb_util.concatXdata: No initial input'
return []
#endexcept
#check for ok variable
try:
x
except:
print 'udb_util.concatXdata: No concat input'
return []
#endexcept
#Sometimes, the frequencies do not match -- typically this means
#that the first x has crappy data, at least that is true in Jan
#2017, jmm. So keep x and ditch x0
xx_shape0 = np.shape(x0['x'])
nf0 = xx_shape0[0]
xx_shape = np.shape(x['x'])
nf = xx_shape[0]
if nf != nf0:
print 'Frequency mismatch -- throwing out the first Xdata'
return x
#endif
#vis array, is masked
outx = ma.concatenate((x0['x'], x['x']), axis = 3)
#uvw array
uvwarray = np.concatenate((x0['uvw'], x['uvw']), axis = 2)
#baseline to nt arrays
i0array = x0['i0']
j0array = x0['j0']
#polarizations
polarr = x0['pol']
#power (sampler) arrays
outpx = np.concatenate((x0['px'], x['px']), axis = 1)
outpy = np.concatenate((x0['py'], x['py']), axis = 1)
#delays
delayarray = np.concatenate((x0['delay'], x['delay']), axis = 1)
#times
timearray = np.concatenate((x0['time'], x['time']))
lstarray = np.concatenate((x0['lst'], x['lst']))
utarray = np.concatenate((x0['ut'], x['ut']))
#done
out = {'x':outx,'uvw':uvwarray,'time':timearray,'px':outpx,'py':outpy,
'i0':i0array,'j0':j0array,'lst':lstarray,'pol':polarr,'delay':delayarray,
'ut':utarray,'file0':x0['file0'],'fghz':x0['fghz']}
return out
#end of concatXdata
def valid_miriad_dataset(filelist0):
'''Returns True or False for valid or invalid Miriad datasets,
checks for existnce of the directory, and then for flags, header,
vartable, and visdata. Also returns names of valid datasets, and
invalid ones'''
if len(filelist0) == 0:
print 'valid_miriad_file: No files input'
return False
#endif
#need a list input, otherwise all sorts of things are messed up
if not isinstance(filelist0, list):
filelist = [filelist0]
else:
filelist = filelist0
#endelse
n = len(filelist)
otp = []
ok_filelist = []
bad_filelist = []
for j in range(n):
filename = filelist[j]
tempvar = True
if (os.path.isdir(filename) == False or
os.path.isfile(filename+'/flags') == False or
os.path.isfile(filename+'/header') == False or
os.path.isfile(filename+'/vartable') == False or
os.path.isfile(filename+'/visdata') == False):
tempvar = False
#end if
otp.append(tempvar)
if tempvar == True:
ok_filelist.append(filelist[j])
#end if
if tempvar == False:
bad_filelist.append(filelist[j])
#end if
#endfor
return otp, ok_filelist, bad_filelist
#End of valid_miriad_dataset
def udbfile_create(filelist, ufilename, nsec=60):
'''Given a list of IDB filenames, create the appropriate UDB file, by
averaging over energy bands, but keep 1 second time resolution'''
print 'UDBFILE_CREATE: UFILENAME: ', ufilename
if len(filelist) == 0:
print 'udbfile_create: No files input'
return []
#endif
# Be sure that files exist, and has all of the appropriate elements
filelist_test, ok_filelist, bad_filelist = valid_miriad_dataset(filelist)
if len(ok_filelist) == 0:
print 'udbfile_create: No valid files input'
return []
#endif
#For each file, read in the data, then concatenate and average
ufile_out = []
fc = 0
for filename in ok_filelist:
xj = readXdata(filename)
fc = fc+1
if fc == 1:
x = xj
print x['x'].shape
else:
x = concatXdata(x, xj)
print x['x'].shape
#endelse
#Now do the time average
#endfor
if fc == 0:
print 'UDB_UTIL: No good data?'
return ufilename
#endif
#average data here
y = avXdata(x,nsec=nsec)
print y['x'].shape
#Now write the file
ufile_out = udbfile_write(y, ok_filelist[0], ufilename)
print 'UDBFILE_CREATE: UFILE_OUT: ', ufile_out
return ufile_out
#End of udbfile_create
def xpx_comp(x):
''' Compares autocorrelations with Power calculations '''
try:
x
except:
print 'udb_util.xpx_comp: No input:'
return []
#endexcept
xfactor = 64 #could be 16??
px = x['px']
py = x['py']
nt = len(x['time'])
nf = len(x['x'][:,0,0,0])
px.shape = (nf, 16, 3, nt)
py.shape = (nf, 16, 3, nt)
M = px[:,0,2,0]/1792.0
xcfrac = np.zeros((nf, 16, nt), dtype = np.float)
ycfrac = np.zeros((nf, 16, nt), dtype = np.float)
i = 0
i1 = 1
for kk in range(16):
k = kk+120
for n in range(nt):
xcfrac[:,kk,n] = x['x'][:,k,i,n]*M[:]/(xfactor*px[:,kk,0,n])
ycfrac[:,kk,n] = x['x'][:,k,i1,n]*M[:]/(xfactor*py[:,kk,0,n])
#endfor n
#endfor kk
out = {'xcfrac':xcfrac, 'ycfrac':ycfrac}
return out
#End of xpx_comp
|
QISKit/qiskit-sdk-py | refs/heads/master | qiskit/assembler/assemble_schedules.py | 1 | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Assemble function for converting a list of circuits into a qobj"""
from qiskit.exceptions import QiskitError
from qiskit.pulse.commands import (PulseInstruction, AcquireInstruction,
DelayInstruction, SamplePulse)
from qiskit.qobj import (PulseQobj, QobjExperimentHeader,
PulseQobjInstruction, PulseQobjExperimentConfig,
PulseQobjExperiment, PulseQobjConfig, PulseLibraryItem)
from qiskit.qobj.converters import InstructionToQobjConverter, LoConfigConverter
def assemble_schedules(schedules, qobj_id, qobj_header, run_config):
"""Assembles a list of schedules into a qobj which can be run on the backend.
Args:
schedules (list[Schedule]): schedules to assemble
qobj_id (int): identifier for the generated qobj
qobj_header (QobjHeader): header to pass to the results
run_config (RunConfig): configuration of the runtime environment
Returns:
PulseQobj: the Qobj to be run on the backends
Raises:
QiskitError: when invalid schedules or configs are provided
"""
if hasattr(run_config, 'instruction_converter'):
instruction_converter = run_config.instruction_converter
else:
instruction_converter = InstructionToQobjConverter
qobj_config = run_config.to_dict()
qubit_lo_freq = qobj_config.get('qubit_lo_freq', None)
if qubit_lo_freq is None:
raise QiskitError('qubit_lo_freq must be supplied.')
meas_lo_freq = qobj_config.get('meas_lo_freq', None)
if meas_lo_freq is None:
raise QiskitError('meas_lo_freq must be supplied.')
qubit_lo_range = qobj_config.pop('qubit_lo_range', None)
meas_lo_range = qobj_config.pop('meas_lo_range', None)
meas_map = qobj_config.pop('meas_map', None)
instruction_converter = instruction_converter(PulseQobjInstruction, **qobj_config)
lo_converter = LoConfigConverter(PulseQobjExperimentConfig,
qubit_lo_range=qubit_lo_range,
meas_lo_range=meas_lo_range,
**qobj_config)
memory_slot_size = 0
# Pack everything into the Qobj
qobj_schedules = []
user_pulselib = {}
for idx, schedule in enumerate(schedules):
# instructions
max_memory_slot = 0
qobj_instructions = []
# Instructions are returned as tuple of shifted time and instruction
for shift, instruction in schedule.instructions:
# TODO: support conditional gate
if isinstance(instruction, DelayInstruction):
# delay instructions are ignored as timing is explicit within qobj
continue
elif isinstance(instruction, PulseInstruction):
name = instruction.command.name
if name in user_pulselib and instruction.command != user_pulselib[name]:
name = "{0}-{1:x}".format(name, hash(instruction.command.samples.tostring()))
instruction = PulseInstruction(
command=SamplePulse(name=name, samples=instruction.command.samples),
name=instruction.name,
channel=instruction.channels[0])
# add samples to pulse library
user_pulselib[name] = instruction.command
elif isinstance(instruction, AcquireInstruction):
max_memory_slot = max(max_memory_slot,
*[slot.index for slot in instruction.mem_slots])
if meas_map:
# verify all acquires satisfy meas_map
_validate_meas_map(instruction, meas_map)
converted_instruction = instruction_converter(shift, instruction)
qobj_instructions.append(converted_instruction)
# memory slot size is memory slot index + 1 because index starts from zero
exp_memory_slot_size = max_memory_slot + 1
memory_slot_size = max(memory_slot_size, exp_memory_slot_size)
# experiment header
# TODO: add other experimental header items (see circuit assembler)
qobj_experiment_header = QobjExperimentHeader(
memory_slots=exp_memory_slot_size,
name=schedule.name or 'Experiment-%d' % idx
)
qobj_schedules.append({
'header': qobj_experiment_header,
'instructions': qobj_instructions
})
# set number of memoryslots
qobj_config['memory_slots'] = memory_slot_size
# setup pulse_library
qobj_config['pulse_library'] = [PulseLibraryItem(name=pulse.name, samples=pulse.samples)
for pulse in user_pulselib.values()]
# create qobj experiment field
experiments = []
schedule_los = qobj_config.pop('schedule_los', [])
if len(schedule_los) == 1:
lo_dict = schedule_los[0]
# update global config
q_los = lo_converter.get_qubit_los(lo_dict)
if q_los:
qobj_config['qubit_lo_freq'] = q_los
m_los = lo_converter.get_meas_los(lo_dict)
if m_los:
qobj_config['meas_lo_freq'] = m_los
if schedule_los:
# multiple frequency setups
if len(qobj_schedules) == 1:
# frequency sweep
for lo_dict in schedule_los:
experiments.append(PulseQobjExperiment(
instructions=qobj_schedules[0]['instructions'],
header=qobj_schedules[0]['header'],
config=lo_converter(lo_dict)
))
elif len(qobj_schedules) == len(schedule_los):
# n:n setup
for lo_dict, schedule in zip(schedule_los, qobj_schedules):
experiments.append(PulseQobjExperiment(
instructions=schedule['instructions'],
header=schedule['header'],
config=lo_converter(lo_dict)
))
else:
raise QiskitError('Invalid LO setting is specified. '
'The LO should be configured for each schedule, or '
'single setup for all schedules (unique), or '
'multiple setups for a single schedule (frequency sweep),'
'or no LO configured at all.')
else:
# unique frequency setup
for schedule in qobj_schedules:
experiments.append(PulseQobjExperiment(
instructions=schedule['instructions'],
header=schedule['header'],
))
qobj_config = PulseQobjConfig(**qobj_config)
return PulseQobj(qobj_id=qobj_id,
config=qobj_config,
experiments=experiments,
header=qobj_header)
def _validate_meas_map(acquire, meas_map):
"""Validate all qubits tied in meas_map are to be acquired."""
meas_map_set = [set(m) for m in meas_map]
# Verify that each qubit is listed once in measurement map
measured_qubits = {acq_ch.index for acq_ch in acquire.acquires}
tied_qubits = set()
for meas_qubit in measured_qubits:
for map_inst in meas_map_set:
if meas_qubit in map_inst:
tied_qubits |= map_inst
if measured_qubits != tied_qubits:
raise QiskitError('Qubits to be acquired: {0} do not satisfy required qubits '
'in measurement map: {1}'.format(measured_qubits, tied_qubits))
return True
|
pdellaert/ansible | refs/heads/devel | lib/ansible/modules/cloud/amazon/ec2_metric_alarm.py | 11 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = """
module: ec2_metric_alarm
short_description: "Create/update or delete AWS Cloudwatch 'metric alarms'"
description:
- Can create or delete AWS metric alarms.
- Metrics you wish to alarm on must already exist.
version_added: "1.6"
author: "Zacharie Eakin (@Zeekin)"
options:
state:
description:
- register or deregister the alarm
required: true
choices: ['present', 'absent']
name:
description:
- Unique name for the alarm
required: true
metric:
description:
- Name of the monitored metric (e.g. CPUUtilization)
- Metric must already exist
required: false
namespace:
description:
- Name of the appropriate namespace ('AWS/EC2', 'System/Linux', etc.), which determines the category it will appear under in cloudwatch
required: false
statistic:
description:
- Operation applied to the metric
- Works in conjunction with period and evaluation_periods to determine the comparison value
required: false
choices: ['SampleCount','Average','Sum','Minimum','Maximum']
comparison:
description:
- Determines how the threshold value is compared
required: false
choices: ['<=','<','>','>=']
threshold:
description:
- Sets the min/max bound for triggering the alarm
required: false
period:
description:
- The time (in seconds) between metric evaluations
required: false
evaluation_periods:
description:
- The number of times in which the metric is evaluated before final calculation
required: false
unit:
description:
- The threshold's unit of measurement
required: false
choices:
- 'Seconds'
- 'Microseconds'
- 'Milliseconds'
- 'Bytes'
- 'Kilobytes'
- 'Megabytes'
- 'Gigabytes'
- 'Terabytes'
- 'Bits'
- 'Kilobits'
- 'Megabits'
- 'Gigabits'
- 'Terabits'
- 'Percent'
- 'Count'
- 'Bytes/Second'
- 'Kilobytes/Second'
- 'Megabytes/Second'
- 'Gigabytes/Second'
- 'Terabytes/Second'
- 'Bits/Second'
- 'Kilobits/Second'
- 'Megabits/Second'
- 'Gigabits/Second'
- 'Terabits/Second'
- 'Count/Second'
- 'None'
description:
description:
- A longer description of the alarm
required: false
dimensions:
description:
- Describes to what the alarm is applied
required: false
alarm_actions:
description:
- A list of the names action(s) taken when the alarm is in the 'alarm' status, denoted as Amazon Resource Name(s)
required: false
insufficient_data_actions:
description:
- A list of the names of action(s) to take when the alarm is in the 'insufficient_data' status
required: false
ok_actions:
description:
- A list of the names of action(s) to take when the alarm is in the 'ok' status, denoted as Amazon Resource Name(s)
required: false
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = '''
- name: create alarm
ec2_metric_alarm:
state: present
region: ap-southeast-2
name: "cpu-low"
metric: "CPUUtilization"
namespace: "AWS/EC2"
statistic: Average
comparison: "<="
threshold: 5.0
period: 300
evaluation_periods: 3
unit: "Percent"
description: "This will alarm when a bamboo slave's cpu usage average is lower than 5% for 15 minutes "
dimensions: {'InstanceId':'i-XXX'}
alarm_actions: ["action1","action2"]
- name: Create an alarm to recover a failed instance
ec2_metric_alarm:
state: present
region: us-west-1
name: "recover-instance"
metric: "StatusCheckFailed_System"
namespace: "AWS/EC2"
statistic: "Minimum"
comparison: ">="
threshold: 1.0
period: 60
evaluation_periods: 2
unit: "Count"
description: "This will recover an instance when it fails"
dimensions: {"InstanceId":'i-XXX'}
alarm_actions: ["arn:aws:automate:us-west-1:ec2:recover"]
'''
try:
import boto.ec2.cloudwatch
from boto.ec2.cloudwatch import MetricAlarm
from boto.exception import BotoServerError, NoAuthHandlerFound
except ImportError:
pass # Taken care of by ec2.HAS_BOTO
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import (AnsibleAWSError, HAS_BOTO, connect_to_aws, ec2_argument_spec,
get_aws_connection_info)
def create_metric_alarm(connection, module):
name = module.params.get('name')
metric = module.params.get('metric')
namespace = module.params.get('namespace')
statistic = module.params.get('statistic')
comparison = module.params.get('comparison')
threshold = module.params.get('threshold')
period = module.params.get('period')
evaluation_periods = module.params.get('evaluation_periods')
unit = module.params.get('unit')
description = module.params.get('description')
dimensions = module.params.get('dimensions')
alarm_actions = module.params.get('alarm_actions')
insufficient_data_actions = module.params.get('insufficient_data_actions')
ok_actions = module.params.get('ok_actions')
alarms = None
try:
alarms = connection.describe_alarms(alarm_names=[name])
except BotoServerError as e:
module.fail_json(msg="Failed to describe alarm %s: %s" % (name, str(e)), exception=traceback.format_exc())
if not alarms:
alm = MetricAlarm(
name=name,
metric=metric,
namespace=namespace,
statistic=statistic,
comparison=comparison,
threshold=threshold,
period=period,
evaluation_periods=evaluation_periods,
unit=unit,
description=description,
dimensions=dimensions,
alarm_actions=alarm_actions,
insufficient_data_actions=insufficient_data_actions,
ok_actions=ok_actions
)
try:
connection.create_alarm(alm)
changed = True
alarms = connection.describe_alarms(alarm_names=[name])
except BotoServerError as e:
module.fail_json(msg="Failed to create alarm %s: %s" % (name, str(e)), exception=traceback.format_exc())
else:
alarm = alarms[0]
changed = False
for attr in ('comparison', 'metric', 'namespace', 'statistic', 'threshold', 'period', 'evaluation_periods', 'unit', 'description'):
if getattr(alarm, attr) != module.params.get(attr):
changed = True
setattr(alarm, attr, module.params.get(attr))
# this is to deal with a current bug where you cannot assign '<=>' to the comparator when modifying an existing alarm
comparison = alarm.comparison
comparisons = {'<=': 'LessThanOrEqualToThreshold', '<': 'LessThanThreshold', '>=': 'GreaterThanOrEqualToThreshold', '>': 'GreaterThanThreshold'}
alarm.comparison = comparisons[comparison]
dim1 = module.params.get('dimensions')
dim2 = alarm.dimensions
for keys in dim1:
if not isinstance(dim1[keys], list):
dim1[keys] = [dim1[keys]]
if keys not in dim2 or dim1[keys] != dim2[keys]:
changed = True
setattr(alarm, 'dimensions', dim1)
for attr in ('alarm_actions', 'insufficient_data_actions', 'ok_actions'):
action = module.params.get(attr) or []
# Boto and/or ansible may provide same elements in lists but in different order.
# Compare on sets since they do not need any order.
if set(getattr(alarm, attr)) != set(action):
changed = True
setattr(alarm, attr, module.params.get(attr))
try:
if changed:
connection.create_alarm(alarm)
except BotoServerError as e:
module.fail_json(msg=str(e))
result = alarms[0]
module.exit_json(changed=changed, name=result.name,
actions_enabled=result.actions_enabled,
alarm_actions=result.alarm_actions,
alarm_arn=result.alarm_arn,
comparison=result.comparison,
description=result.description,
dimensions=result.dimensions,
evaluation_periods=result.evaluation_periods,
insufficient_data_actions=result.insufficient_data_actions,
last_updated=result.last_updated,
metric=result.metric,
namespace=result.namespace,
ok_actions=result.ok_actions,
period=result.period,
state_reason=result.state_reason,
state_value=result.state_value,
statistic=result.statistic,
threshold=result.threshold,
unit=result.unit)
def delete_metric_alarm(connection, module):
name = module.params.get('name')
alarms = None
try:
alarms = connection.describe_alarms(alarm_names=[name])
except BotoServerError as e:
module.fail_json(msg="Failed to describe alarm %s: %s" % (name, str(e)), exception=traceback.format_exc())
if alarms:
try:
connection.delete_alarms([name])
module.exit_json(changed=True)
except BotoServerError as e:
module.fail_json(msg=str(e))
else:
module.exit_json(changed=False)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(required=True, type='str'),
metric=dict(type='str'),
namespace=dict(type='str'),
statistic=dict(type='str', choices=['SampleCount', 'Average', 'Sum', 'Minimum', 'Maximum']),
comparison=dict(type='str', choices=['<=', '<', '>', '>=']),
threshold=dict(type='float'),
period=dict(type='int'),
unit=dict(type='str', choices=['Seconds', 'Microseconds', 'Milliseconds', 'Bytes', 'Kilobytes', 'Megabytes', 'Gigabytes', 'Terabytes',
'Bits', 'Kilobits', 'Megabits', 'Gigabits', 'Terabits', 'Percent', 'Count', 'Bytes/Second', 'Kilobytes/Second',
'Megabytes/Second', 'Gigabytes/Second', 'Terabytes/Second', 'Bits/Second', 'Kilobits/Second', 'Megabits/Second',
'Gigabits/Second', 'Terabits/Second', 'Count/Second', 'None']),
evaluation_periods=dict(type='int'),
description=dict(type='str'),
dimensions=dict(type='dict', default={}),
alarm_actions=dict(type='list'),
insufficient_data_actions=dict(type='list'),
ok_actions=dict(type='list'),
state=dict(default='present', choices=['present', 'absent']),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region:
try:
connection = connect_to_aws(boto.ec2.cloudwatch, region, **aws_connect_params)
except (NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="region must be specified")
if state == 'present':
create_metric_alarm(connection, module)
elif state == 'absent':
delete_metric_alarm(connection, module)
if __name__ == '__main__':
main()
|
litchfield/django | refs/heads/master | tests/admin_widgets/tests.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import gettext
import os
from datetime import datetime, timedelta
from importlib import import_module
from unittest import skipIf
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.auth.models import User
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.db.models import CharField, DateField
from django.test import SimpleTestCase, TestCase, override_settings
from django.utils import six, translation
from . import models
from .widgetadmin import site as widget_admin_site
try:
import pytz
except ImportError:
pytz = None
class TestDataMixin(object):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='super@example.com',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
pk=101, username='testser', first_name='Add', last_name='User', email='auser@example.com',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=False,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
models.Car.objects.create(id=1, owner=cls.u1, make='Volkswagon', model='Passat')
models.Car.objects.create(id=2, owner=cls.u2, make='BMW', model='M3')
class SeleniumDataMixin(object):
def setUp(self):
self.u1 = User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='super@example.com',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime(2007, 5, 30, 13, 20, 10)
)
class AdminFormfieldForDBFieldTests(SimpleTestCase):
"""
Tests for correct behavior of ModelAdmin.formfield_for_dbfield
"""
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides):
"""
Helper to call formfield_for_dbfield for a given model and field name
and verify that the returned formfield is appropriate.
"""
# Override any settings on the model admin
class MyModelAdmin(admin.ModelAdmin):
pass
for k in admin_overrides:
setattr(MyModelAdmin, k, admin_overrides[k])
# Construct the admin, and ask it for a formfield
ma = MyModelAdmin(model, admin.site)
ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None)
# "unwrap" the widget wrapper, if needed
if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper):
widget = ff.widget.widget
else:
widget = ff.widget
# Check that we got a field of the right type
self.assertTrue(
isinstance(widget, widgetclass),
"Wrong widget for %s.%s: expected %s, got %s" % (
model.__class__.__name__,
fieldname,
widgetclass,
type(widget),
)
)
# Return the formfield so that other tests can continue
return ff
def test_DateField(self):
self.assertFormfield(models.Event, 'start_date', widgets.AdminDateWidget)
def test_DateTimeField(self):
self.assertFormfield(models.Member, 'birthdate', widgets.AdminSplitDateTime)
def test_TimeField(self):
self.assertFormfield(models.Event, 'start_time', widgets.AdminTimeWidget)
def test_TextField(self):
self.assertFormfield(models.Event, 'description', widgets.AdminTextareaWidget)
def test_URLField(self):
self.assertFormfield(models.Event, 'link', widgets.AdminURLFieldWidget)
def test_IntegerField(self):
self.assertFormfield(models.Event, 'min_age', widgets.AdminIntegerFieldWidget)
def test_CharField(self):
self.assertFormfield(models.Member, 'name', widgets.AdminTextInputWidget)
def test_EmailField(self):
self.assertFormfield(models.Member, 'email', widgets.AdminEmailInputWidget)
def test_FileField(self):
self.assertFormfield(models.Album, 'cover_art', widgets.AdminFileWidget)
def test_ForeignKey(self):
self.assertFormfield(models.Event, 'main_band', forms.Select)
def test_raw_id_ForeignKey(self):
self.assertFormfield(models.Event, 'main_band', widgets.ForeignKeyRawIdWidget,
raw_id_fields=['main_band'])
def test_radio_fields_ForeignKey(self):
ff = self.assertFormfield(models.Event, 'main_band', widgets.AdminRadioSelect,
radio_fields={'main_band': admin.VERTICAL})
self.assertEqual(ff.empty_label, None)
def test_many_to_many(self):
self.assertFormfield(models.Band, 'members', forms.SelectMultiple)
def test_raw_id_many_to_many(self):
self.assertFormfield(models.Band, 'members', widgets.ManyToManyRawIdWidget,
raw_id_fields=['members'])
def test_filtered_many_to_many(self):
self.assertFormfield(models.Band, 'members', widgets.FilteredSelectMultiple,
filter_vertical=['members'])
def test_formfield_overrides(self):
self.assertFormfield(models.Event, 'start_date', forms.TextInput,
formfield_overrides={DateField: {'widget': forms.TextInput}})
def test_formfield_overrides_widget_instances(self):
"""
Test that widget instances in formfield_overrides are not shared between
different fields. (#19423)
"""
class BandAdmin(admin.ModelAdmin):
formfield_overrides = {
CharField: {'widget': forms.TextInput(attrs={'size': '10'})}
}
ma = BandAdmin(models.Band, admin.site)
f1 = ma.formfield_for_dbfield(models.Band._meta.get_field('name'), request=None)
f2 = ma.formfield_for_dbfield(models.Band._meta.get_field('style'), request=None)
self.assertNotEqual(f1.widget, f2.widget)
self.assertEqual(f1.widget.attrs['maxlength'], '100')
self.assertEqual(f2.widget.attrs['maxlength'], '20')
self.assertEqual(f2.widget.attrs['size'], '10')
def test_field_with_choices(self):
self.assertFormfield(models.Member, 'gender', forms.Select)
def test_choices_with_radio_fields(self):
self.assertFormfield(models.Member, 'gender', widgets.AdminRadioSelect,
radio_fields={'gender': admin.VERTICAL})
def test_inheritance(self):
self.assertFormfield(models.Album, 'backside_art', widgets.AdminFileWidget)
def test_m2m_widgets(self):
"""m2m fields help text as it applies to admin app (#9321)."""
class AdvisorAdmin(admin.ModelAdmin):
filter_vertical = ['companies']
self.assertFormfield(models.Advisor, 'companies', widgets.FilteredSelectMultiple,
filter_vertical=['companies'])
ma = AdvisorAdmin(models.Advisor, admin.site)
f = ma.formfield_for_dbfield(models.Advisor._meta.get_field('companies'), request=None)
self.assertEqual(six.text_type(f.help_text), 'Hold down "Control", or "Command" on a Mac, to select more than one.')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase):
def test_filter_choices_by_request_user(self):
"""
Ensure the user can only see their own cars in the foreign key dropdown.
"""
self.client.login(username="super", password="secret")
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
self.assertNotContains(response, "BMW M3")
self.assertContains(response, "Volkswagon Passat")
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase):
def setUp(self):
self.client.login(username="super", password="secret")
def test_changelist_ForeignKey(self):
response = self.client.get(reverse('admin:admin_widgets_car_changelist'))
self.assertContains(response, '/auth/user/add/')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase):
def setUp(self):
self.client.login(username="super", password="secret")
def test_nonexistent_target_id(self):
band = models.Band.objects.create(name='Bogey Blues')
pk = band.pk
band.delete()
post_data = {
"main_band": '%s' % pk,
}
# Try posting with a non-existent pk in a raw id field: this
# should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data)
self.assertContains(response,
'Select a valid choice. That choice is not one of the available choices.')
def test_invalid_target_id(self):
for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234):
# This should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'),
{"main_band": test_str})
self.assertContains(response,
'Select a valid choice. That choice is not one of the available choices.')
def test_url_params_from_lookup_dict_any_iterable(self):
lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')})
lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']})
self.assertEqual(lookup1, {'color__in': 'red,blue'})
self.assertEqual(lookup1, lookup2)
def test_url_params_from_lookup_dict_callable(self):
def my_callable():
return 'works'
lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable})
lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()})
self.assertEqual(lookup1, lookup2)
class FilteredSelectMultipleWidgetTest(SimpleTestCase):
def test_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', False)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple="multiple" name="test" class="selectfilter">\n</select>'
'<script type="text/javascript">addEvent(window, "load", function(e) '
'{SelectFilter.init("id_test", "test\\u005C", 0); });</script>\n'
)
def test_stacked_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', True)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple="multiple" name="test" class="selectfilterstacked">\n</select>'
'<script type="text/javascript">addEvent(window, "load", function(e) '
'{SelectFilter.init("id_test", "test\\u005C", 1); });</script>\n'
)
class AdminDateWidgetTest(SimpleTestCase):
def test_attrs(self):
"""
Ensure that user-supplied attrs are used.
Refs #12073.
"""
w = widgets.AdminDateWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="vDateField" name="test" size="10" />',
)
# pass attrs to widget
w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="myDateField" name="test" size="20" />',
)
class AdminTimeWidgetTest(SimpleTestCase):
def test_attrs(self):
"""
Ensure that user-supplied attrs are used.
Refs #12073.
"""
w = widgets.AdminTimeWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="vTimeField" name="test" size="8" />',
)
# pass attrs to widget
w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="myTimeField" name="test" size="20" />',
)
class AdminSplitDateTimeWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminSplitDateTime()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">Date: <input value="2007-12-01" type="text" class="vDateField" name="test_0" size="10" /><br />Time: <input value="09:30:00" type="text" class="vTimeField" name="test_1" size="8" /></p>',
)
def test_localization(self):
w = widgets.AdminSplitDateTime()
with self.settings(USE_L10N=True), translation.override('de-at'):
w.is_localized = True
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">Datum: <input value="01.12.2007" type="text" class="vDateField" name="test_0" size="10" /><br />Zeit: <input value="09:30:00" type="text" class="vTimeField" name="test_1" size="8" /></p>',
)
class AdminURLWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', ''),
'<input class="vURLField" name="test" type="url" />'
)
self.assertHTMLEqual(
w.render('test', 'http://example.com'),
'<p class="url">Currently:<a href="http://example.com">http://example.com</a><br />Change:<input class="vURLField" name="test" type="url" value="http://example.com" /></p>'
)
def test_render_idn(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', 'http://example-äüö.com'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">http://example-äüö.com</a><br />Change:<input class="vURLField" name="test" type="url" value="http://example-äüö.com" /></p>'
)
def test_render_quoting(self):
# WARNING: Don't use assertHTMLEqual in that testcase!
# assertHTMLEqual will get rid of some escapes which are tested here!
w = widgets.AdminURLFieldWidget()
self.assertEqual(
w.render('test', 'http://example.com/<sometag>some text</sometag>'),
'<p class="url">Currently: <a href="http://example.com/%3Csometag%3Esome%20text%3C/sometag%3E">http://example.com/<sometag>some text</sometag></a><br />Change: <input class="vURLField" name="test" type="url" value="http://example.com/<sometag>some text</sometag>" /></p>'
)
self.assertEqual(
w.render('test', 'http://example-äüö.com/<sometag>some text</sometag>'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com/%3Csometag%3Esome%20text%3C/sometag%3E">http://example-äüö.com/<sometag>some text</sometag></a><br />Change: <input class="vURLField" name="test" type="url" value="http://example-äüö.com/<sometag>some text</sometag>" /></p>'
)
self.assertEqual(
w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"'),
'<p class="url">Currently: <a href="http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22">http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"</a><br />Change: <input class="vURLField" name="test" type="url" value="http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"" /></p>'
)
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls',
)
class AdminFileWidgetTests(TestDataMixin, TestCase):
@classmethod
def setUpTestData(cls):
super(AdminFileWidgetTests, cls).setUpTestData()
band = models.Band.objects.create(name='Linkin Park')
cls.album = band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
def test_render(self):
w = widgets.AdminFileWidget()
self.assertHTMLEqual(
w.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id" /> '
'<label for="test-clear_id">Clear</label></span><br />'
'Change: <input type="file" name="test" /></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
self.assertHTMLEqual(
w.render('test', SimpleUploadedFile('test', b'content')),
'<input type="file" name="test" />',
)
def test_readonly_fields(self):
"""
File widgets should render as a link when they're marked "read only."
"""
self.client.login(username="super", password="secret")
response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,)))
self.assertContains(
response,
'<p><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">'
'albums\hybrid_theory.jpg</a></p>' % {'STORAGE_URL': default_storage.url('')},
html=True,
)
self.assertNotContains(
response,
'<input type="file" name="cover_art" id="id_cover_art" />',
html=True,
)
response = self.client.get(reverse('admin:admin_widgets_album_add'))
self.assertContains(
response,
'<p></p>',
html=True,
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ForeignKeyRawIdWidgetTest(TestCase):
def test_render(self):
band = models.Band.objects.create(name='Linkin Park')
band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
rel = models.Album._meta.get_field('band').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', band.pk, attrs={}), (
'<input type="text" name="test" value="%(bandpk)s" class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/band/?_to_field=id" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong>Linkin Park</strong>'
) % {'bandpk': band.pk}
)
def test_relations_to_non_primary_key(self):
# Check that ForeignKeyRawIdWidget works with fields which aren't
# related to the model's primary key.
apple = models.Inventory.objects.create(barcode=86, name='Apple')
models.Inventory.objects.create(barcode=22, name='Pear')
core = models.Inventory.objects.create(
barcode=87, name='Core', parent=apple
)
rel = models.Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', core.parent_id, attrs={}), (
'<input type="text" name="test" value="86" class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/inventory/?_to_field=barcode" class="related-lookup" id="lookup_id_test" title="Lookup">'
'</a> <strong>Apple</strong>'
)
)
def test_fk_related_model_not_in_admin(self):
# FK to a model not registered with admin site. Raw ID widget should
# have no magnifying glass link. See #16542
big_honeycomb = models.Honeycomb.objects.create(location='Old tree')
big_honeycomb.bee_set.create()
rel = models.Bee._meta.get_field('honeycomb').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('honeycomb_widget', big_honeycomb.pk, attrs={}),
'<input type="text" name="honeycomb_widget" value="%(hcombpk)s" /> <strong>Honeycomb object</strong>' % {'hcombpk': big_honeycomb.pk}
)
def test_fk_to_self_model_not_in_admin(self):
# FK to self, not registered with admin site. Raw ID widget should have
# no magnifying glass link. See #16542
subject1 = models.Individual.objects.create(name='Subject #1')
models.Individual.objects.create(name='Child', parent=subject1)
rel = models.Individual._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('individual_widget', subject1.pk, attrs={}),
'<input type="text" name="individual_widget" value="%(subj1pk)s" /> <strong>Individual object</strong>' % {'subj1pk': subject1.pk}
)
def test_proper_manager_for_label_lookup(self):
# see #9258
rel = models.Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
hidden = models.Inventory.objects.create(
barcode=93, name='Hidden', hidden=True
)
child_of_hidden = models.Inventory.objects.create(
barcode=94, name='Child of hidden', parent=hidden
)
self.assertHTMLEqual(
w.render('test', child_of_hidden.parent_id, attrs={}), (
'<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField" />'
'<a href="/admin_widgets/inventory/?_to_field=barcode" class="related-lookup" id="lookup_id_test" title="Lookup">'
'</a> <strong>Hidden</strong>'
)
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ManyToManyRawIdWidgetTest(TestCase):
def test_render(self):
band = models.Band.objects.create(name='Linkin Park')
m1 = models.Member.objects.create(name='Chester')
m2 = models.Member.objects.create(name='Mike')
band.members.add(m1, m2)
rel = models.Band._meta.get_field('members').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', [m1.pk, m2.pk], attrs={}), (
'<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField" />'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % dict(m1pk=m1.pk, m2pk=m2.pk)
)
self.assertHTMLEqual(
w.render('test', [m1.pk]), (
'<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % dict(m1pk=m1.pk)
)
def test_m2m_related_model_not_in_admin(self):
# M2M relationship with model not registered with admin site. Raw ID
# widget should have no magnifying glass link. See #16542
consultor1 = models.Advisor.objects.create(name='Rockstar Techie')
c1 = models.Company.objects.create(name='Doodle')
c2 = models.Company.objects.create(name='Pear')
consultor1.companies.add(c1, c2)
rel = models.Advisor._meta.get_field('companies').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('company_widget1', [c1.pk, c2.pk], attrs={}),
'<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s" />' % {'c1pk': c1.pk, 'c2pk': c2.pk}
)
self.assertHTMLEqual(
w.render('company_widget2', [c1.pk]),
'<input type="text" name="company_widget2" value="%(c1pk)s" />' % {'c1pk': c1.pk}
)
class RelatedFieldWidgetWrapperTests(SimpleTestCase):
def test_no_can_add_related(self):
rel = models.Individual._meta.get_field('parent').remote_field
w = widgets.AdminRadioSelect()
# Used to fail with a name error.
w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site)
self.assertFalse(w.can_add_related)
def test_select_multiple_widget_cant_change_delete_related(self):
rel = models.Individual._meta.get_field('parent').remote_field
widget = forms.SelectMultiple()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertFalse(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_on_delete_cascade_rel_cant_delete_related(self):
rel = models.Individual._meta.get_field('soulmate').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertTrue(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class DateTimePickerSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_show_hide_date_time_picker_widgets(self):
"""
Ensure that pressing the ESC key closes the date and time picker
widgets.
Refs #17064.
"""
from selenium.webdriver.common.keys import Keys
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# First, with the date picker widget ---------------------------------
# Check that the date picker is hidden
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'none')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Check that the date picker is visible
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'block')
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# Check that the date picker is hidden again
self.assertEqual(
self.get_css_value('#calendarbox0', 'display'), 'none')
# Then, with the time picker widget ----------------------------------
# Check that the time picker is hidden
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'none')
# Click the time icon
self.selenium.find_element_by_id('clocklink0').click()
# Check that the time picker is visible
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'block')
self.assertEqual(
[
x.text for x in
self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a")
],
['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.']
)
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# Check that the time picker is hidden again
self.assertEqual(
self.get_css_value('#clockbox0', 'display'), 'none')
def test_calendar_nonday_class(self):
"""
Ensure cells that are not days of the month have the `nonday` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# make sure the first and last 6 cells have class nonday
for td in tds[:6] + tds[-6:]:
self.assertEqual(td.get_attribute('class'), 'nonday')
def test_calendar_selected_class(self):
"""
Ensure cell for the day in the input has the `selected` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify the selected cell
selected = tds[6]
self.assertEqual(selected.get_attribute('class'), 'selected')
self.assertEqual(selected.text, '1')
def test_calendar_no_selected_class(self):
"""
Ensure no cells are given the selected class when the field is empty.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify there are no cells with the selected class
selected = [td for td in tds if td.get_attribute('class') == 'selected']
self.assertEqual(len(selected), 0)
def test_calendar_show_date_from_input(self):
"""
Ensure that the calendar show the date from the input field for every
locale supported by django.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Enter test data
member = models.Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M')
# Get month names translations for every locales
month_string = 'January February March April May June July August September October November December'
path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale')
for language_code, language_name in settings.LANGUAGES:
try:
catalog = gettext.translation('djangojs', path, [language_code])
except IOError:
continue
if month_string in catalog._catalog:
month_names = catalog._catalog[month_string]
else:
month_names = month_string
# Get the expected caption
may_translation = month_names.split(' ')[4]
expected_caption = '{0:s} {1:d}'.format(may_translation.upper(), 1984)
# Test with every locale
with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True):
# Open a page that has a date picker widget
self.selenium.get('{}{}'.format(self.live_server_url,
reverse('admin:admin_widgets_member_change', args=(member.pk,))))
# Click on the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Get the calendar caption
calendar0 = self.selenium.find_element_by_id('calendarin0')
caption = calendar0.find_element_by_tag_name('caption')
# Make sure that the right month and year are displayed
self.assertEqual(caption.text, expected_caption)
class DateTimePickerSeleniumChromeTests(DateTimePickerSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerSeleniumIETests(DateTimePickerSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@skipIf(pytz is None, "this test requires pytz")
@override_settings(TIME_ZONE='Asia/Singapore')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class DateTimePickerShortcutsSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_date_time_picker_shortcuts(self):
"""
Ensure that date/time/datetime picker shortcuts work in the current time zone.
Refs #20663.
This test case is fairly tricky, it relies on selenium still running the browser
in the default time zone "America/Chicago" despite `override_settings` changing
the time zone to "Asia/Singapore".
"""
self.admin_login(username='super', password='secret', login_url='/')
error_margin = timedelta(seconds=10)
# If we are neighbouring a DST, we add an hour of error margin.
tz = pytz.timezone('America/Chicago')
utc_now = datetime.now(pytz.utc)
tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname()
tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname()
if tz_yesterday != tz_tomorrow:
error_margin += timedelta(hours=1)
now = datetime.now()
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_widgets_member_add')))
self.selenium.find_element_by_id('id_name').send_keys('test')
# Click on the "today" and "now" shortcuts.
shortcuts = self.selenium.find_elements_by_css_selector(
'.field-birthdate .datetimeshortcuts')
for shortcut in shortcuts:
shortcut.find_element_by_tag_name('a').click()
# Check that there is a time zone mismatch warning.
# Warning: This would effectively fail if the TIME_ZONE defined in the
# settings has the same UTC offset as "Asia/Singapore" because the
# mismatch warning would be rightfully missing from the page.
self.selenium.find_elements_by_css_selector(
'.field-birthdate .timezonewarning')
# Submit the form.
self.selenium.find_element_by_tag_name('form').submit()
self.wait_page_loaded()
# Make sure that "now" in javascript is within 10 seconds
# from "now" on the server side.
member = models.Member.objects.get(name='test')
self.assertGreater(member.birthdate, now - error_margin)
self.assertLess(member.birthdate, now + error_margin)
class DateTimePickerShortcutsSeleniumChromeTests(DateTimePickerShortcutsSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class DateTimePickerShortcutsSeleniumIETests(DateTimePickerShortcutsSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class HorizontalVerticalFilterSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
super(HorizontalVerticalFilterSeleniumFirefoxTests, self).setUp()
self.lisa = models.Student.objects.create(name='Lisa')
self.john = models.Student.objects.create(name='John')
self.bob = models.Student.objects.create(name='Bob')
self.peter = models.Student.objects.create(name='Peter')
self.jenny = models.Student.objects.create(name='Jenny')
self.jason = models.Student.objects.create(name='Jason')
self.cliff = models.Student.objects.create(name='Cliff')
self.arthur = models.Student.objects.create(name='Arthur')
self.school = models.School.objects.create(name='School of Awesome')
def assertActiveButtons(self, mode, field_name, choose, remove,
choose_all=None, remove_all=None):
choose_link = '#id_%s_add_link' % field_name
choose_all_link = '#id_%s_add_all_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
remove_all_link = '#id_%s_remove_all_link' % field_name
self.assertEqual(self.has_css_class(choose_link, 'active'), choose)
self.assertEqual(self.has_css_class(remove_link, 'active'), remove)
if mode == 'horizontal':
self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all)
self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all)
def execute_basic_operations(self, mode, field_name):
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
choose_all_link = 'id_%s_add_all_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
remove_all_link = 'id_%s_remove_all_link' % field_name
# Initial positions ---------------------------------------------------
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id)])
self.assertActiveButtons(mode, field_name, False, False, True, True)
# Click 'Choose all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(choose_all_link).click()
elif mode == 'vertical':
# There 's no 'Choose all' button in vertical mode, so individually
# select all options and click 'Choose'.
for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'):
option.click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertActiveButtons(mode, field_name, False, False, False, True)
# Click 'Remove all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(remove_all_link).click()
elif mode == 'vertical':
# There 's no 'Remove all' button in vertical mode, so individually
# select all options and click 'Remove'.
for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'):
option.click()
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box,
[str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
self.assertSelectOptions(to_box, [])
self.assertActiveButtons(mode, field_name, False, False, True, False)
# Choose some options ------------------------------------------------
from_lisa_select_option = self.get_select_option(from_box, str(self.lisa.id))
# Check the title attribute is there for tool tips: ticket #20821
self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text'))
from_lisa_select_option.click()
self.get_select_option(from_box, str(self.jason.id)).click()
self.get_select_option(from_box, str(self.bob.id)).click()
self.get_select_option(from_box, str(self.john.id)).click()
self.assertActiveButtons(mode, field_name, True, False, True, False)
self.selenium.find_element_by_id(choose_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.bob.id),
str(self.jason.id), str(self.john.id)])
# Check the tooltip is still there after moving: ticket #20821
to_lisa_select_option = self.get_select_option(to_box, str(self.lisa.id))
self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text'))
# Remove some options -------------------------------------------------
self.get_select_option(to_box, str(self.lisa.id)).click()
self.get_select_option(to_box, str(self.bob.id)).click()
self.assertActiveButtons(mode, field_name, False, True, True, True)
self.selenium.find_element_by_id(remove_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)])
self.assertSelectOptions(to_box,
[str(self.jason.id), str(self.john.id)])
# Choose some more options --------------------------------------------
self.get_select_option(from_box, str(self.arthur.id)).click()
self.get_select_option(from_box, str(self.cliff.id)).click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box,
[str(self.peter.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)])
self.assertSelectOptions(to_box,
[str(self.jason.id), str(self.john.id),
str(self.arthur.id), str(self.cliff.id)])
def test_basic(self):
self.school.students = [self.lisa, self.peter]
self.school.alumni = [self.lisa, self.peter]
self.school.save()
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get('%s%s' % (
self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,))))
self.wait_page_loaded()
self.execute_basic_operations('vertical', 'students')
self.execute_basic_operations('horizontal', 'alumni')
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.school = models.School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()),
[self.arthur, self.cliff, self.jason, self.john])
self.assertEqual(list(self.school.alumni.all()),
[self.arthur, self.cliff, self.jason, self.john])
def test_filter(self):
"""
Ensure that typing in the search box filters out options displayed in
the 'from' box.
"""
from selenium.webdriver.common.keys import Keys
self.school.students = [self.lisa, self.peter]
self.school.alumni = [self.lisa, self.peter]
self.school.save()
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,))))
for field_name in ['students', 'alumni']:
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = '#id_%s_add_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
input = self.selenium.find_element_by_css_selector('#id_%s_input' % field_name)
# Initial values
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
# Typing in some characters filters out non-matching options
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys('R')
self.assertSelectOptions(from_box, [str(self.arthur.id)])
# Clearing the text box makes the other options reappear
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id)])
# -----------------------------------------------------------------
# Check that choosing a filtered option sends it properly to the
# 'to' box.
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
self.get_select_option(from_box, str(self.jason.id)).click()
self.selenium.find_element_by_css_selector(choose_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id)])
self.assertSelectOptions(to_box,
[str(self.lisa.id), str(self.peter.id),
str(self.jason.id)])
self.get_select_option(to_box, str(self.lisa.id)).click()
self.selenium.find_element_by_css_selector(remove_link).click()
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.lisa.id)])
self.assertSelectOptions(to_box,
[str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE]) # Clear text box
self.assertSelectOptions(from_box,
[str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jenny.id),
str(self.john.id), str(self.lisa.id)])
self.assertSelectOptions(to_box,
[str(self.peter.id), str(self.jason.id)])
# -----------------------------------------------------------------
# Check that pressing enter on a filtered option sends it properly
# to the 'to' box.
self.get_select_option(to_box, str(self.jason.id)).click()
self.selenium.find_element_by_css_selector(remove_link).click()
input.send_keys('ja')
self.assertSelectOptions(from_box, [str(self.jason.id)])
input.send_keys([Keys.ENTER])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE])
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.school = models.School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()),
[self.jason, self.peter])
self.assertEqual(list(self.school.alumni.all()),
[self.jason, self.peter])
class HorizontalVerticalFilterSeleniumChromeTests(HorizontalVerticalFilterSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class HorizontalVerticalFilterSeleniumIETests(HorizontalVerticalFilterSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class AdminRawIdWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
super(AdminRawIdWidgetSeleniumFirefoxTests, self).setUp()
models.Band.objects.create(id=42, name='Bogey Blues')
models.Band.objects.create(id=98, name='Green Potatoes')
def test_ForeignKey(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add')))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(
self.selenium.find_element_by_id('id_main_band').get_attribute('value'),
'')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.selenium.switch_to.window('id_main_band')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.selenium.switch_to.window('id_main_band')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the other selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '98')
def test_many_to_many(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(
'%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add')))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(
self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'),
'')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.selenium.switch_to.window('id_supporting_bands')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.selenium.switch_to.window('id_supporting_bands')
self.wait_page_loaded()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the two selected bands' ids
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42,98')
class AdminRawIdWidgetSeleniumChromeTests(AdminRawIdWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class AdminRawIdWidgetSeleniumIETests(AdminRawIdWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='admin_widgets.urls')
class RelatedFieldWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def test_ForeignKey_using_to_field(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get('%s%s' % (
self.live_server_url,
reverse('admin:admin_widgets_profile_add')))
main_window = self.selenium.current_window_handle
# Click the Add User button to add new
self.selenium.find_element_by_id('add_id_user').click()
self.selenium.switch_to.window('id_user')
self.wait_for('#id_password')
password_field = self.selenium.find_element_by_id('id_password')
password_field.send_keys('password')
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'newuser'
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
# The field now contains the new user
self.wait_for('#id_user option[value="newuser"]')
# Click the Change User button to change it
self.selenium.find_element_by_id('change_id_user').click()
self.selenium.switch_to_window('id_user')
self.wait_page_loaded()
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'changednewuser'
username_field.clear()
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to_window(main_window)
# Wait up to 2 seconds for the new option to show up after clicking save in the popup.
self.selenium.implicitly_wait(2)
self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]')
self.selenium.implicitly_wait(0)
# Go ahead and submit the form to make sure it works
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.wait_for_text('li.success', 'The profile "changednewuser" was added successfully.')
profiles = models.Profile.objects.all()
self.assertEqual(len(profiles), 1)
self.assertEqual(profiles[0].user.username, username_value)
class RelatedFieldWidgetSeleniumChromeTests(RelatedFieldWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class RelatedFieldWidgetSeleniumIETests(RelatedFieldWidgetSeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
|
sofianehaddad/ot-svn | refs/heads/master | python/test/t_WhiteNoise_std.py | 2 | #! /usr/bin/env python
from openturns import *
TESTPREAMBLE()
RandomGenerator.SetSeed(0)
try:
# Tmin , time step and step number for TimeGrid
Tmin = 0.0
deltaT = 1.0
N = 11
timeGrid = RegularGrid(Tmin, deltaT, N)
# Fixing the distribution for the WhiteNoise
dist = Distribution(Uniform())
print "dist = ", dist
# Building a process from a White Noise
process = Process(WhiteNoise(dist))
# print the process
process.setTimeGrid(timeGrid)
print "process = ", process
# Get a realization of the process
timeSerie = process.getRealization()
print "timeSerie = ", timeSerie
except:
import sys
print "t_WhiteNoise_std.py", sys.exc_type, sys.exc_value
|
saadatqadri/django-oscar | refs/heads/master | src/oscar/apps/checkout/session.py | 3 | from decimal import Decimal as D
from django.contrib import messages
from django import http
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from oscar.core import prices
from oscar.core.loading import get_model, get_class
from . import exceptions
Repository = get_class('shipping.repository', 'Repository')
OrderTotalCalculator = get_class(
'checkout.calculators', 'OrderTotalCalculator')
CheckoutSessionData = get_class(
'checkout.utils', 'CheckoutSessionData')
ShippingAddress = get_model('order', 'ShippingAddress')
BillingAddress = get_model('order', 'BillingAddress')
UserAddress = get_model('address', 'UserAddress')
class CheckoutSessionMixin(object):
"""
Mixin to provide common functionality shared between checkout views.
All checkout views subclass this mixin. It ensures that all relevant
checkout information is available in the template context.
"""
# A pre-condition is a condition that MUST be met in order for a view
# to be available. If it isn't then the customer should be redirected
# to a view *earlier* in the chain.
# pre_conditions is a list of method names that get executed before the
# normal flow of the view. Each method should check some condition has been
# met. If not, then an exception is raised that indicates the URL the
# customer will be redirected to.
pre_conditions = None
# A *skip* condition is a condition that MUST NOT be met in order for a
# view to be available. If the condition is met, this means the view MUST
# be skipped and the customer should be redirected to a view *later* in
# the chain.
# Skip conditions work similar to pre-conditions, and get evaluated after
# pre-conditions have been evaluated.
skip_conditions = None
def dispatch(self, request, *args, **kwargs):
# Assign the checkout session manager so it's available in all checkout
# views.
self.checkout_session = CheckoutSessionData(request)
# Enforce any pre-conditions for the view.
try:
self.check_pre_conditions(request)
except exceptions.FailedPreCondition as e:
for message in e.messages:
messages.warning(request, message)
return http.HttpResponseRedirect(e.url)
# Check if this view should be skipped
try:
self.check_skip_conditions(request)
except exceptions.PassedSkipCondition as e:
return http.HttpResponseRedirect(e.url)
return super(CheckoutSessionMixin, self).dispatch(
request, *args, **kwargs)
def check_pre_conditions(self, request):
pre_conditions = self.get_pre_conditions(request)
for method_name in pre_conditions:
if not hasattr(self, method_name):
raise ImproperlyConfigured(
"There is no method '%s' to call as a pre-condition" % (
method_name))
getattr(self, method_name)(request)
def get_pre_conditions(self, request):
"""
Return the pre-condition method names to run for this view
"""
if self.pre_conditions is None:
return []
return self.pre_conditions
def check_skip_conditions(self, request):
skip_conditions = self.get_skip_conditions(request)
for method_name in skip_conditions:
if not hasattr(self, method_name):
raise ImproperlyConfigured(
"There is no method '%s' to call as a skip-condition" % (
method_name))
getattr(self, method_name)(request)
def get_skip_conditions(self, request):
"""
Return the skip-condition method names to run for this view
"""
if self.skip_conditions is None:
return []
return self.skip_conditions
# Re-usable pre-condition validators
def check_basket_is_not_empty(self, request):
if request.basket.is_empty:
raise exceptions.FailedPreCondition(
url=reverse('basket:summary'),
message=_(
"You need to add some items to your basket to checkout")
)
def check_basket_is_valid(self, request):
"""
Check that the basket is permitted to be submitted as an order. That
is, all the basket lines are available to buy - nothing has gone out of
stock since it was added to the basket.
"""
messages = []
strategy = request.strategy
for line in request.basket.all_lines():
result = strategy.fetch_for_line(line)
is_permitted, reason = result.availability.is_purchase_permitted(
line.quantity)
if not is_permitted:
# Create a more meaningful message to show on the basket page
msg = _(
"'%(title)s' is no longer available to buy (%(reason)s). "
"Please adjust your basket to continue"
) % {
'title': line.product.get_title(),
'reason': reason}
messages.append(msg)
if messages:
raise exceptions.FailedPreCondition(
url=reverse('basket:summary'),
messages=messages
)
def check_user_email_is_captured(self, request):
if not request.user.is_authenticated() \
and not self.checkout_session.get_guest_email():
raise exceptions.FailedPreCondition(
url=reverse('checkout:index'),
message=_(
"Please either sign in or enter your email address")
)
def check_shipping_data_is_captured(self, request):
if not request.basket.is_shipping_required():
# Even without shipping being required, we still need to check that
# a shipping method code has been set.
if not self.checkout_session.is_shipping_method_set(
self.request.basket):
raise exceptions.FailedPreCondition(
url=reverse('checkout:shipping-method'),
)
return
# Basket requires shipping: check address and method are captured and
# valid.
self.check_a_valid_shipping_address_is_captured()
self.check_a_valid_shipping_method_is_captured()
def check_a_valid_shipping_address_is_captured(self):
# Check that shipping address has been completed
if not self.checkout_session.is_shipping_address_set():
raise exceptions.FailedPreCondition(
url=reverse('checkout:shipping-address'),
message=_("Please choose a shipping address")
)
# Check that the previously chosen shipping address is still valid
shipping_address = self.get_shipping_address(
basket=self.request.basket)
if not shipping_address:
raise exceptions.FailedPreCondition(
url=reverse('checkout:shipping-address'),
message=_("Your previously chosen shipping address is "
"no longer valid. Please choose another one")
)
def check_a_valid_shipping_method_is_captured(self):
# Check that shipping method has been set
if not self.checkout_session.is_shipping_method_set(
self.request.basket):
raise exceptions.FailedPreCondition(
url=reverse('checkout:shipping-method'),
message=_("Please choose a shipping method")
)
# Check that a *valid* shipping method has been set
shipping_address = self.get_shipping_address(
basket=self.request.basket)
shipping_method = self.get_shipping_method(
basket=self.request.basket,
shipping_address=shipping_address)
if not shipping_method:
raise exceptions.FailedPreCondition(
url=reverse('checkout:shipping-method'),
message=_("Your previously chosen shipping method is "
"no longer valid. Please choose another one")
)
def check_payment_data_is_captured(self, request):
# We don't collect payment data by default so we don't have anything to
# validate here. If your shop requires forms to be submitted on the
# payment details page, then override this method to check that the
# relevant data is available. Often just enforcing that the preview
# view is only accessible from a POST request is sufficient.
pass
# Re-usable skip conditions
def skip_unless_basket_requires_shipping(self, request):
# Check to see that a shipping address is actually required. It may
# not be if the basket is purely downloads
if not request.basket.is_shipping_required():
raise exceptions.PassedSkipCondition(
url=reverse('checkout:shipping-method')
)
def skip_unless_payment_is_required(self, request):
# Check to see if payment is actually required for this order.
shipping_address = self.get_shipping_address(request.basket)
shipping_method = self.get_shipping_method(
request.basket, shipping_address)
if shipping_method:
shipping_charge = shipping_method.calculate(request.basket)
else:
# It's unusual to get here as a shipping method should be set by
# the time this skip-condition is called. In the absence of any
# other evidence, we assume the shipping charge is zero.
shipping_charge = prices.Price(excl_tax=D('0.00'), tax=D('0.00'))
total = self.get_order_totals(request.basket, shipping_charge)
if total.excl_tax == D('0.00'):
raise exceptions.PassedSkipCondition(
url=reverse('checkout:preview')
)
# Helpers
def get_context_data(self, **kwargs):
# Use the proposed submission as template context data. Flatten the
# order kwargs so they are easily available too.
ctx = self.build_submission(**kwargs)
ctx.update(kwargs)
ctx.update(ctx['order_kwargs'])
return ctx
def build_submission(self, **kwargs):
"""
Return a dict of data that contains everything required for an order
submission. This includes payment details (if any).
This can be the right place to perform tax lookups and apply them to
the basket.
"""
basket = kwargs.get('basket', self.request.basket)
shipping_address = self.get_shipping_address(basket)
shipping_method = self.get_shipping_method(
basket, shipping_address)
billing_address = self.get_billing_address(shipping_address)
if not shipping_method:
total = shipping_charge = None
else:
shipping_charge = shipping_method.calculate(basket)
total = self.get_order_totals(
basket, shipping_charge=shipping_charge)
submission = {
'user': self.request.user,
'basket': basket,
'shipping_address': shipping_address,
'shipping_method': shipping_method,
'shipping_charge': shipping_charge,
'billing_address': billing_address,
'order_total': total,
'order_kwargs': {},
'payment_kwargs': {}}
# If there is a billing address, add it to the payment kwargs as calls
# to payment gateways generally require the billing address. Note, that
# it normally makes sense to pass the form instance that captures the
# billing address information. That way, if payment fails, you can
# render bound forms in the template to make re-submission easier.
if billing_address:
submission['payment_kwargs']['billing_address'] = billing_address
# Allow overrides to be passed in
submission.update(kwargs)
# Set guest email after overrides as we need to update the order_kwargs
# entry.
if (not submission['user'].is_authenticated() and
'guest_email' not in submission['order_kwargs']):
email = self.checkout_session.get_guest_email()
submission['order_kwargs']['guest_email'] = email
return submission
def get_shipping_address(self, basket):
"""
Return the (unsaved) shipping address for this checkout session.
If the shipping address was entered manually, then we instantiate a
``ShippingAddress`` model with the appropriate form data (which is
saved in the session).
If the shipping address was selected from the user's address book,
then we convert the ``UserAddress`` to a ``ShippingAddress``.
The ``ShippingAddress`` instance is not saved as sometimes you need a
shipping address instance before the order is placed. For example, if
you are submitting fraud information as part of a payment request.
The ``OrderPlacementMixin.create_shipping_address`` method is
responsible for saving a shipping address when an order is placed.
"""
if not basket.is_shipping_required():
return None
addr_data = self.checkout_session.new_shipping_address_fields()
if addr_data:
# Load address data into a blank shipping address model
return ShippingAddress(**addr_data)
addr_id = self.checkout_session.shipping_user_address_id()
if addr_id:
try:
address = UserAddress._default_manager.get(pk=addr_id)
except UserAddress.DoesNotExist:
# An address was selected but now it has disappeared. This can
# happen if the customer flushes their address book midway
# through checkout. No idea why they would do this but it can
# happen. Checkouts are highly vulnerable to race conditions
# like this.
return None
else:
# Copy user address data into a blank shipping address instance
shipping_addr = ShippingAddress()
address.populate_alternative_model(shipping_addr)
return shipping_addr
def get_shipping_method(self, basket, shipping_address=None, **kwargs):
"""
Return the selected shipping method instance from this checkout session
The shipping address is passed as we need to check that the method
stored in the session is still valid for the shipping address.
"""
code = self.checkout_session.shipping_method_code(basket)
methods = Repository().get_shipping_methods(
basket=basket, user=self.request.user,
shipping_addr=shipping_address, request=self.request)
for method in methods:
if method.code == code:
return method
def get_billing_address(self, shipping_address):
"""
Return an unsaved instance of the billing address (if one exists)
This method only returns a billing address if the session has been used
to store billing address information. It's also possible to capture
billing address information as part of the payment details forms, which
never get stored in the session. In that circumstance, the billing
address can be set directly in the build_submission dict (see Oscar's
demo site for an example of this approach).
"""
if not self.checkout_session.is_billing_address_set():
return None
if self.checkout_session.is_billing_address_same_as_shipping():
if shipping_address:
address = BillingAddress()
shipping_address.populate_alternative_model(address)
return address
addr_data = self.checkout_session.new_billing_address_fields()
if addr_data:
# A new billing address has been entered - load address data into a
# blank billing address model.
return BillingAddress(**addr_data)
addr_id = self.checkout_session.billing_user_address_id()
if addr_id:
# An address from the user's address book has been selected as the
# billing address - load it and convert it into a billing address
# instance.
try:
user_address = UserAddress._default_manager.get(pk=addr_id)
except UserAddress.DoesNotExist:
# An address was selected but now it has disappeared. This can
# happen if the customer flushes their address book midway
# through checkout. No idea why they would do this but it can
# happen. Checkouts are highly vulnerable to race conditions
# like this.
return None
else:
# Copy user address data into a blank shipping address instance
billing_address = BillingAddress()
user_address.populate_alternative_model(billing_address)
return billing_address
def get_order_totals(self, basket, shipping_charge, **kwargs):
"""
Returns the total for the order with and without tax
"""
return OrderTotalCalculator(self.request).calculate(
basket, shipping_charge, **kwargs)
|
davidyezsetz/kuma | refs/heads/master | vendor/packages/pylint/test/regrtest_data/module_global.py | 6 | # pylint: disable-msg=W0603,W0601,W0604,E0602,W0104
"""was causing infinite recursion
"""
__revision__ = 1
global bar
bar.foo
|
Bysmyyr/chromium-crosswalk | refs/heads/master | third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/msgutil.py | 658 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Message related utilities.
Note: request.connection.write/read are used in this module, even though
mod_python document says that they should be used only in connection
handlers. Unfortunately, we have no other options. For example,
request.write/read are not suitable because they don't allow direct raw
bytes writing/reading.
"""
import Queue
import threading
# Export Exception symbols from msgutil for backward compatibility
from mod_pywebsocket._stream_base import ConnectionTerminatedException
from mod_pywebsocket._stream_base import InvalidFrameException
from mod_pywebsocket._stream_base import BadOperationException
from mod_pywebsocket._stream_base import UnsupportedFrameException
# An API for handler to send/receive WebSocket messages.
def close_connection(request):
"""Close connection.
Args:
request: mod_python request.
"""
request.ws_stream.close_connection()
def send_message(request, payload_data, end=True, binary=False):
"""Send a message (or part of a message).
Args:
request: mod_python request.
payload_data: unicode text or str binary to send.
end: True to terminate a message.
False to send payload_data as part of a message that is to be
terminated by next or later send_message call with end=True.
binary: send payload_data as binary frame(s).
Raises:
BadOperationException: when server already terminated.
"""
request.ws_stream.send_message(payload_data, end, binary)
def receive_message(request):
"""Receive a WebSocket frame and return its payload as a text in
unicode or a binary in str.
Args:
request: mod_python request.
Raises:
InvalidFrameException: when client send invalid frame.
UnsupportedFrameException: when client send unsupported frame e.g. some
of reserved bit is set but no extension can
recognize it.
InvalidUTF8Exception: when client send a text frame containing any
invalid UTF-8 string.
ConnectionTerminatedException: when the connection is closed
unexpectedly.
BadOperationException: when client already terminated.
"""
return request.ws_stream.receive_message()
def send_ping(request, body=''):
request.ws_stream.send_ping(body)
class MessageReceiver(threading.Thread):
"""This class receives messages from the client.
This class provides three ways to receive messages: blocking,
non-blocking, and via callback. Callback has the highest precedence.
Note: This class should not be used with the standalone server for wss
because pyOpenSSL used by the server raises a fatal error if the socket
is accessed from multiple threads.
"""
def __init__(self, request, onmessage=None):
"""Construct an instance.
Args:
request: mod_python request.
onmessage: a function to be called when a message is received.
May be None. If not None, the function is called on
another thread. In that case, MessageReceiver.receive
and MessageReceiver.receive_nowait are useless
because they will never return any messages.
"""
threading.Thread.__init__(self)
self._request = request
self._queue = Queue.Queue()
self._onmessage = onmessage
self._stop_requested = False
self.setDaemon(True)
self.start()
def run(self):
try:
while not self._stop_requested:
message = receive_message(self._request)
if self._onmessage:
self._onmessage(message)
else:
self._queue.put(message)
finally:
close_connection(self._request)
def receive(self):
""" Receive a message from the channel, blocking.
Returns:
message as a unicode string.
"""
return self._queue.get()
def receive_nowait(self):
""" Receive a message from the channel, non-blocking.
Returns:
message as a unicode string if available. None otherwise.
"""
try:
message = self._queue.get_nowait()
except Queue.Empty:
message = None
return message
def stop(self):
"""Request to stop this instance.
The instance will be stopped after receiving the next message.
This method may not be very useful, but there is no clean way
in Python to forcefully stop a running thread.
"""
self._stop_requested = True
class MessageSender(threading.Thread):
"""This class sends messages to the client.
This class provides both synchronous and asynchronous ways to send
messages.
Note: This class should not be used with the standalone server for wss
because pyOpenSSL used by the server raises a fatal error if the socket
is accessed from multiple threads.
"""
def __init__(self, request):
"""Construct an instance.
Args:
request: mod_python request.
"""
threading.Thread.__init__(self)
self._request = request
self._queue = Queue.Queue()
self.setDaemon(True)
self.start()
def run(self):
while True:
message, condition = self._queue.get()
condition.acquire()
send_message(self._request, message)
condition.notify()
condition.release()
def send(self, message):
"""Send a message, blocking."""
condition = threading.Condition()
condition.acquire()
self._queue.put((message, condition))
condition.wait()
def send_nowait(self, message):
"""Send a message, non-blocking."""
self._queue.put((message, threading.Condition()))
# vi:sts=4 sw=4 et
|
jeremycline/pulp | refs/heads/master | playpen/deploy/run-integration-tests.py | 10 | #!/usr/bin/env python
import argparse
import sys
from fabric.api import get, run, settings
from utils import config_utils, setup_utils
# The nosetests command to run the integration tests
NOSETESTS_COMMAND = 'cd pulp-automation && nosetests -vs --with-xunit --nologcapture'
# Setup the CLI
description = 'Run integration tests using a deployed environment by deploy-environment.py'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--config', help='path to the configuration file produced by deploy-environment.py', required=True)
parser.add_argument('--tests-destination', help='the location to place the nosetests.xml file on completion')
args = parser.parse_args()
config = config_utils.load_config(args.config)
flattened_config = config_utils.flatten_structure(config)
tester_config = filter(lambda conf: conf[setup_utils.ROLE] == setup_utils.PULP_TESTER_ROLE, flattened_config)[0]
with settings(host_string=tester_config[setup_utils.HOST_STRING], key_file=tester_config[setup_utils.PRIVATE_KEY]):
test_result = run(NOSETESTS_COMMAND, warn_only=True)
get('pulp-automation/nosetests.xml', args.tests_destination or tester_config['tests_destination'])
sys.exit(test_result.return_code)
|
asadziach/tensorflow | refs/heads/pedestrian_detection_walabot_tf | tensorflow/contrib/tensor_forest/python/kernel_tests/best_splits_op_test.py | 80 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.contrib.tensor_forest.ops.best_splits_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow # pylint: disable=unused-import
from tensorflow.contrib.tensor_forest.python.ops import tensor_forest_ops
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class BestSplitsClassificationTests(test_util.TensorFlowTestCase):
def setUp(self):
self.finished = [3, 5]
self.node_map = [-1, -1, -1, 0, -1, 3, -1, -1, -1]
self.candidate_counts = [[[153., 50., 60., 40., 3.],
[200., 70., 30., 70., 30.]],
[[0., 0., 0., 0., 0.], [0., 0., 0., 0., 0.]],
[[0., 0., 0., 0., 0.], [0., 0., 0., 0., 0.]],
[[40., 10., 10., 10., 10.],
[30., 10., 5., 5., 10.]]]
self.total_counts = [[400., 100., 100., 100., 100.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[400., 100., 100., 100., 100.]]
self.squares = []
def testSimple(self):
with self.test_session():
split_indices = tensor_forest_ops.best_splits(
self.finished,
self.node_map,
self.candidate_counts,
self.squares,
self.total_counts,
self.squares,
regression=False)
self.assertAllEqual([0, 1], split_indices.eval())
def testNoFinished(self):
with self.test_session():
split_indices = tensor_forest_ops.best_splits(
[],
self.node_map,
self.candidate_counts,
self.squares,
self.total_counts,
self.squares,
regression=False)
self.assertAllEqual([], split_indices.eval())
def testBadInput(self):
del self.total_counts[1]
with self.test_session():
with self.assertRaisesOpError(
'Number of accumulators should be the same in split_sums '
'and accumulator_sums.'):
tensor_forest_ops.best_splits(
self.finished,
self.node_map,
self.candidate_counts,
self.squares,
self.total_counts,
self.squares,
regression=False).eval()
class BestSplitsRegressionTests(test_util.TensorFlowTestCase):
def setUp(self):
self.finished = [3, 5]
self.node_map = [-1, -1, -1, 0, -1, 3, -1, -1, -1]
self.candidate_sums = [[[5., 8., 8., 8.], [5., 10., 10., 10.]],
[[0., 0., 0., 0.], [0., 0., 0., 0.]],
[[0., 0., 0., 0.], [0., 0., 0., 0.]],
[[10., 10., 20., 10.], [10., 5., 5., 5.]]]
self.candidate_squares = [[[5., 50., 50., 50.], [5., 50., 50., 50.]],
[[0., 0., 0., 0.], [0., 0., 0., 0.]],
[[0., 0., 0., 0.], [0., 0., 0., 0.]],
[[10., 40., 50., 60.], [10., 40., 40., 40.]]]
self.total_sums = [[15., 10., 10., 10.],
[0., 0., 0., 0.],
[0., 0., 0., 0.],
[20., 20., 20., 20.]]
self.total_squares = [[15., 50., 50., 50.],
[0., 0., 0., 0.],
[0., 0., 0., 0.],
[20., 60., 60., 60.]]
def testSimple(self):
with self.test_session():
split_indices = tensor_forest_ops.best_splits(
self.finished,
self.node_map,
self.candidate_sums,
self.candidate_squares,
self.total_sums,
self.total_squares,
regression=True)
self.assertAllEqual([1, 0], split_indices.eval())
if __name__ == '__main__':
googletest.main()
|
yoava333/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/_stream_hixie75.py | 681 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file provides a class for parsing/building frames of the WebSocket
protocol version HyBi 00 and Hixie 75.
Specification:
- HyBi 00 http://tools.ietf.org/html/draft-ietf-hybi-thewebsocketprotocol-00
- Hixie 75 http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-75
"""
from mod_pywebsocket import common
from mod_pywebsocket._stream_base import BadOperationException
from mod_pywebsocket._stream_base import ConnectionTerminatedException
from mod_pywebsocket._stream_base import InvalidFrameException
from mod_pywebsocket._stream_base import StreamBase
from mod_pywebsocket._stream_base import UnsupportedFrameException
from mod_pywebsocket import util
class StreamHixie75(StreamBase):
"""A class for parsing/building frames of the WebSocket protocol version
HyBi 00 and Hixie 75.
"""
def __init__(self, request, enable_closing_handshake=False):
"""Construct an instance.
Args:
request: mod_python request.
enable_closing_handshake: to let StreamHixie75 perform closing
handshake as specified in HyBi 00, set
this option to True.
"""
StreamBase.__init__(self, request)
self._logger = util.get_class_logger(self)
self._enable_closing_handshake = enable_closing_handshake
self._request.client_terminated = False
self._request.server_terminated = False
def send_message(self, message, end=True, binary=False):
"""Send message.
Args:
message: unicode string to send.
binary: not used in hixie75.
Raises:
BadOperationException: when called on a server-terminated
connection.
"""
if not end:
raise BadOperationException(
'StreamHixie75 doesn\'t support send_message with end=False')
if binary:
raise BadOperationException(
'StreamHixie75 doesn\'t support send_message with binary=True')
if self._request.server_terminated:
raise BadOperationException(
'Requested send_message after sending out a closing handshake')
self._write(''.join(['\x00', message.encode('utf-8'), '\xff']))
def _read_payload_length_hixie75(self):
"""Reads a length header in a Hixie75 version frame with length.
Raises:
ConnectionTerminatedException: when read returns empty string.
"""
length = 0
while True:
b_str = self._read(1)
b = ord(b_str)
length = length * 128 + (b & 0x7f)
if (b & 0x80) == 0:
break
return length
def receive_message(self):
"""Receive a WebSocket frame and return its payload an unicode string.
Returns:
payload unicode string in a WebSocket frame.
Raises:
ConnectionTerminatedException: when read returns empty
string.
BadOperationException: when called on a client-terminated
connection.
"""
if self._request.client_terminated:
raise BadOperationException(
'Requested receive_message after receiving a closing '
'handshake')
while True:
# Read 1 byte.
# mp_conn.read will block if no bytes are available.
# Timeout is controlled by TimeOut directive of Apache.
frame_type_str = self.receive_bytes(1)
frame_type = ord(frame_type_str)
if (frame_type & 0x80) == 0x80:
# The payload length is specified in the frame.
# Read and discard.
length = self._read_payload_length_hixie75()
if length > 0:
_ = self.receive_bytes(length)
# 5.3 3. 12. if /type/ is 0xFF and /length/ is 0, then set the
# /client terminated/ flag and abort these steps.
if not self._enable_closing_handshake:
continue
if frame_type == 0xFF and length == 0:
self._request.client_terminated = True
if self._request.server_terminated:
self._logger.debug(
'Received ack for server-initiated closing '
'handshake')
return None
self._logger.debug(
'Received client-initiated closing handshake')
self._send_closing_handshake()
self._logger.debug(
'Sent ack for client-initiated closing handshake')
return None
else:
# The payload is delimited with \xff.
bytes = self._read_until('\xff')
# The WebSocket protocol section 4.4 specifies that invalid
# characters must be replaced with U+fffd REPLACEMENT
# CHARACTER.
message = bytes.decode('utf-8', 'replace')
if frame_type == 0x00:
return message
# Discard data of other types.
def _send_closing_handshake(self):
if not self._enable_closing_handshake:
raise BadOperationException(
'Closing handshake is not supported in Hixie 75 protocol')
self._request.server_terminated = True
# 5.3 the server may decide to terminate the WebSocket connection by
# running through the following steps:
# 1. send a 0xFF byte and a 0x00 byte to the client to indicate the
# start of the closing handshake.
self._write('\xff\x00')
def close_connection(self, unused_code='', unused_reason=''):
"""Closes a WebSocket connection.
Raises:
ConnectionTerminatedException: when closing handshake was
not successfull.
"""
if self._request.server_terminated:
self._logger.debug(
'Requested close_connection but server is already terminated')
return
if not self._enable_closing_handshake:
self._request.server_terminated = True
self._logger.debug('Connection closed')
return
self._send_closing_handshake()
self._logger.debug('Sent server-initiated closing handshake')
# TODO(ukai): 2. wait until the /client terminated/ flag has been set,
# or until a server-defined timeout expires.
#
# For now, we expect receiving closing handshake right after sending
# out closing handshake, and if we couldn't receive non-handshake
# frame, we take it as ConnectionTerminatedException.
message = self.receive_message()
if message is not None:
raise ConnectionTerminatedException(
'Didn\'t receive valid ack for closing handshake')
# TODO: 3. close the WebSocket connection.
# note: mod_python Connection (mp_conn) doesn't have close method.
def send_ping(self, body):
raise BadOperationException(
'StreamHixie75 doesn\'t support send_ping')
# vi:sts=4 sw=4 et
|
vmax-feihu/hue | refs/heads/master | apps/security/src/security/views.py | 26 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from desktop.lib.django_util import render
from libsentry.sentry_site import get_hive_sentry_provider, get_sentry_server_admin_groups
def hive(request):
return render("hive.mako", request, {
'initial': json.dumps({
'user': request.user.username, 'sentry_provider': get_hive_sentry_provider(),
'is_sentry_admin': request.user.groups.filter(name__in=get_sentry_server_admin_groups()).exists()
}),
'has_impersonation_perm': _has_impersonation_perm(request.user),
})
def hdfs(request):
return render("hdfs.mako", request, {
'initial': json.dumps({'user': request.user.username}),
'has_impersonation_perm': _has_impersonation_perm(request.user)
})
def _has_impersonation_perm(user):
return user.is_superuser or user.has_hue_permission(action="impersonate", app="security")
|
renyi533/tensorflow | refs/heads/master | tensorflow/python/ops/distributions/categorical.py | 17 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Categorical distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.ops.distributions import kullback_leibler
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
def _broadcast_cat_event_and_params(event, params, base_dtype):
"""Broadcasts the event or distribution parameters."""
if event.dtype.is_integer:
pass
elif event.dtype.is_floating:
# When `validate_args=True` we've already ensured int/float casting
# is closed.
event = math_ops.cast(event, dtype=dtypes.int32)
else:
raise TypeError("`value` should have integer `dtype` or "
"`self.dtype` ({})".format(base_dtype))
shape_known_statically = (
params.shape.ndims is not None and
params.shape[:-1].is_fully_defined() and
event.shape.is_fully_defined())
if not shape_known_statically or params.shape[:-1] != event.shape:
params *= array_ops.ones_like(event[..., array_ops.newaxis],
dtype=params.dtype)
params_shape = array_ops.shape(params)[:-1]
event *= array_ops.ones(params_shape, dtype=event.dtype)
if params.shape.ndims is not None:
event.set_shape(tensor_shape.TensorShape(params.shape[:-1]))
return event, params
@tf_export(v1=["distributions.Categorical"])
class Categorical(distribution.Distribution):
"""Categorical distribution.
The Categorical distribution is parameterized by either probabilities or
log-probabilities of a set of `K` classes. It is defined over the integers
`{0, 1, ..., K}`.
The Categorical distribution is closely related to the `OneHotCategorical` and
`Multinomial` distributions. The Categorical distribution can be intuited as
generating samples according to `argmax{ OneHotCategorical(probs) }` itself
being identical to `argmax{ Multinomial(probs, total_count=1) }`.
#### Mathematical Details
The probability mass function (pmf) is,
```none
pmf(k; pi) = prod_j pi_j**[k == j]
```
#### Pitfalls
The number of classes, `K`, must not exceed:
- the largest integer representable by `self.dtype`, i.e.,
`2**(mantissa_bits+1)` (IEEE 754),
- the maximum `Tensor` index, i.e., `2**31-1`.
In other words,
```python
K <= min(2**31-1, {
tf.float16: 2**11,
tf.float32: 2**24,
tf.float64: 2**53 }[param.dtype])
```
Note: This condition is validated only when `self.validate_args = True`.
#### Examples
Creates a 3-class distribution with the 2nd class being most likely.
```python
dist = Categorical(probs=[0.1, 0.5, 0.4])
n = 1e4
empirical_prob = tf.cast(
tf.histogram_fixed_width(
dist.sample(int(n)),
[0., 2],
nbins=3),
dtype=tf.float32) / n
# ==> array([ 0.1005, 0.5037, 0.3958], dtype=float32)
```
Creates a 3-class distribution with the 2nd class being most likely.
Parameterized by [logits](https://en.wikipedia.org/wiki/Logit) rather than
probabilities.
```python
dist = Categorical(logits=np.log([0.1, 0.5, 0.4])
n = 1e4
empirical_prob = tf.cast(
tf.histogram_fixed_width(
dist.sample(int(n)),
[0., 2],
nbins=3),
dtype=tf.float32) / n
# ==> array([0.1045, 0.5047, 0.3908], dtype=float32)
```
Creates a 3-class distribution with the 3rd class being most likely.
The distribution functions can be evaluated on counts.
```python
# counts is a scalar.
p = [0.1, 0.4, 0.5]
dist = Categorical(probs=p)
dist.prob(0) # Shape []
# p will be broadcast to [[0.1, 0.4, 0.5], [0.1, 0.4, 0.5]] to match counts.
counts = [1, 0]
dist.prob(counts) # Shape [2]
# p will be broadcast to shape [3, 5, 7, 3] to match counts.
counts = [[...]] # Shape [5, 7, 3]
dist.prob(counts) # Shape [5, 7, 3]
```
"""
@deprecation.deprecated(
"2019-01-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all references to use `tfp.distributions` "
"instead of `tf.distributions`.",
warn_once=True)
def __init__(
self,
logits=None,
probs=None,
dtype=dtypes.int32,
validate_args=False,
allow_nan_stats=True,
name="Categorical"):
"""Initialize Categorical distributions using class log-probabilities.
Args:
logits: An N-D `Tensor`, `N >= 1`, representing the log probabilities
of a set of Categorical distributions. The first `N - 1` dimensions
index into a batch of independent distributions and the last dimension
represents a vector of logits for each class. Only one of `logits` or
`probs` should be passed in.
probs: An N-D `Tensor`, `N >= 1`, representing the probabilities
of a set of Categorical distributions. The first `N - 1` dimensions
index into a batch of independent distributions and the last dimension
represents a vector of probabilities for each class. Only one of
`logits` or `probs` should be passed in.
dtype: The type of the event samples (default: int32).
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
with ops.name_scope(name, values=[logits, probs]) as name:
self._logits, self._probs = distribution_util.get_logits_and_probs(
logits=logits,
probs=probs,
validate_args=validate_args,
multidimensional=True,
name=name)
if validate_args:
self._logits = distribution_util.embed_check_categorical_event_shape(
self._logits)
logits_shape_static = self._logits.get_shape().with_rank_at_least(1)
if logits_shape_static.ndims is not None:
self._batch_rank = ops.convert_to_tensor(
logits_shape_static.ndims - 1,
dtype=dtypes.int32,
name="batch_rank")
else:
with ops.name_scope(name="batch_rank"):
self._batch_rank = array_ops.rank(self._logits) - 1
logits_shape = array_ops.shape(self._logits, name="logits_shape")
if tensor_shape.dimension_value(logits_shape_static[-1]) is not None:
self._event_size = ops.convert_to_tensor(
logits_shape_static.dims[-1].value,
dtype=dtypes.int32,
name="event_size")
else:
with ops.name_scope(name="event_size"):
self._event_size = logits_shape[self._batch_rank]
if logits_shape_static[:-1].is_fully_defined():
self._batch_shape_val = constant_op.constant(
logits_shape_static[:-1].as_list(),
dtype=dtypes.int32,
name="batch_shape")
else:
with ops.name_scope(name="batch_shape"):
self._batch_shape_val = logits_shape[:-1]
super(Categorical, self).__init__(
dtype=dtype,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._logits,
self._probs],
name=name)
@property
def event_size(self):
"""Scalar `int32` tensor: the number of classes."""
return self._event_size
@property
def logits(self):
"""Vector of coordinatewise logits."""
return self._logits
@property
def probs(self):
"""Vector of coordinatewise probabilities."""
return self._probs
def _batch_shape_tensor(self):
return array_ops.identity(self._batch_shape_val)
def _batch_shape(self):
return self.logits.get_shape()[:-1]
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.TensorShape([])
def _sample_n(self, n, seed=None):
if self.logits.get_shape().ndims == 2:
logits_2d = self.logits
else:
logits_2d = array_ops.reshape(self.logits, [-1, self.event_size])
sample_dtype = dtypes.int64 if self.dtype.size > 4 else dtypes.int32
draws = random_ops.multinomial(
logits_2d, n, seed=seed, output_dtype=sample_dtype)
draws = array_ops.reshape(
array_ops.transpose(draws),
array_ops.concat([[n], self.batch_shape_tensor()], 0))
return math_ops.cast(draws, self.dtype)
def _cdf(self, k):
k = ops.convert_to_tensor(k, name="k")
if self.validate_args:
k = distribution_util.embed_check_integer_casting_closed(
k, target_dtype=dtypes.int32)
k, probs = _broadcast_cat_event_and_params(
k, self.probs, base_dtype=self.dtype.base_dtype)
# batch-flatten everything in order to use `sequence_mask()`.
batch_flattened_probs = array_ops.reshape(probs,
(-1, self._event_size))
batch_flattened_k = array_ops.reshape(k, [-1])
to_sum_over = array_ops.where(
array_ops.sequence_mask(batch_flattened_k, self._event_size),
batch_flattened_probs,
array_ops.zeros_like(batch_flattened_probs))
batch_flattened_cdf = math_ops.reduce_sum(to_sum_over, axis=-1)
# Reshape back to the shape of the argument.
return array_ops.reshape(batch_flattened_cdf, array_ops.shape(k))
def _log_prob(self, k):
k = ops.convert_to_tensor(k, name="k")
if self.validate_args:
k = distribution_util.embed_check_integer_casting_closed(
k, target_dtype=dtypes.int32)
k, logits = _broadcast_cat_event_and_params(
k, self.logits, base_dtype=self.dtype.base_dtype)
return -nn_ops.sparse_softmax_cross_entropy_with_logits(labels=k,
logits=logits)
def _entropy(self):
return -math_ops.reduce_sum(
nn_ops.log_softmax(self.logits) * self.probs, axis=-1)
def _mode(self):
ret = math_ops.argmax(self.logits, axis=self._batch_rank)
ret = math_ops.cast(ret, self.dtype)
ret.set_shape(self.batch_shape)
return ret
@kullback_leibler.RegisterKL(Categorical, Categorical)
def _kl_categorical_categorical(a, b, name=None):
"""Calculate the batched KL divergence KL(a || b) with a and b Categorical.
Args:
a: instance of a Categorical distribution object.
b: instance of a Categorical distribution object.
name: (optional) Name to use for created operations.
default is "kl_categorical_categorical".
Returns:
Batchwise KL(a || b)
"""
with ops.name_scope(name, "kl_categorical_categorical",
values=[a.logits, b.logits]):
# sum(probs log(probs / (1 - probs)))
delta_log_probs1 = (nn_ops.log_softmax(a.logits) -
nn_ops.log_softmax(b.logits))
return math_ops.reduce_sum(nn_ops.softmax(a.logits) * delta_log_probs1,
axis=-1)
|
crookedreyes/py4e-specialization | refs/heads/master | code3/giturl.py | 1 | import urllib.request, urllib.parse, urllib.error
import ssl
import gitsecrets
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
def urlopen(url):
secrets = gitsecrets.secrets()
parms = urllib.parse.urlencode(secrets)
if url.find('?') > 0 :
url = url + '&'
else:
url = url + '?'
url = url + parms
# print('Retrieving', url)
req = urllib.request.Request(
url,
data=None,
headers={'User-Agent': 'giturl.py from www.py4e.com/code3'
}
)
connection = urllib.request.urlopen(req, context=ctx)
str_json = connection.read().decode()
headers = dict(connection.getheaders())
return (str_json, headers)
|
rhelmer/socorro-lib | refs/heads/master | socorro/unittest/external/postgresql/test_platforms.py | 11 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from nose.plugins.attrib import attr
from nose.tools import eq_
from socorro.external.postgresql.platforms import Platforms
from .unittestbase import PostgreSQLTestCase
#==============================================================================
@attr(integration='postgres') # for nosetests
class IntegrationTestPlatforms(PostgreSQLTestCase):
"""Test socorro.external.postgresql.platforms.Platforms class. """
#--------------------------------------------------------------------------
def setUp(self):
"""Set up this test class by populating the os_names table with fake
data. """
super(IntegrationTestPlatforms, self).setUp()
cursor = self.connection.cursor()
# Insert data
cursor.execute("""
INSERT INTO os_names
(os_name, os_short_name)
VALUES
(
'Windows NT',
'win'
),
(
'Mac OS X',
'mac'
),
(
'Linux',
'lin'
);
""")
self.connection.commit()
#--------------------------------------------------------------------------
def tearDown(self):
"""Clean up the database, delete tables and functions. """
cursor = self.connection.cursor()
cursor.execute("""
TRUNCATE os_names CASCADE
""")
self.connection.commit()
super(IntegrationTestPlatforms, self).tearDown()
#--------------------------------------------------------------------------
def test_get(self):
platforms = Platforms(config=self.config)
res = platforms.get()
res_expected = {
"hits": [
{
"name": "Windows NT",
"code": "win"
},
{
"name": "Mac OS X",
"code": "mac"
},
{
"name": "Linux",
"code": "lin"
}
],
"total": 3
}
eq_(res, res_expected)
|
gvb/odoo | refs/heads/8.0 | addons/hr_payroll/__init__.py | 433 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_payroll
import report
import wizard
import res_config
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
kangkot/arangodb | refs/heads/devel | 3rdParty/V8-4.3.61/third_party/python_26/Lib/glob.py | 173 | """Filename globbing utility."""
import sys
import os
import re
import fnmatch
__all__ = ["glob", "iglob"]
def glob(pathname):
"""Return a list of paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la fnmatch.
"""
return list(iglob(pathname))
def iglob(pathname):
"""Return an iterator which yields the paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la fnmatch.
"""
if not has_magic(pathname):
if os.path.lexists(pathname):
yield pathname
return
dirname, basename = os.path.split(pathname)
if not dirname:
for name in glob1(os.curdir, basename):
yield name
return
if has_magic(dirname):
dirs = iglob(dirname)
else:
dirs = [dirname]
if has_magic(basename):
glob_in_dir = glob1
else:
glob_in_dir = glob0
for dirname in dirs:
for name in glob_in_dir(dirname, basename):
yield os.path.join(dirname, name)
# These 2 helper functions non-recursively glob inside a literal directory.
# They return a list of basenames. `glob1` accepts a pattern while `glob0`
# takes a literal basename (so it only has to check for its existence).
def glob1(dirname, pattern):
if not dirname:
dirname = os.curdir
if isinstance(pattern, unicode) and not isinstance(dirname, unicode):
dirname = unicode(dirname, sys.getfilesystemencoding() or
sys.getdefaultencoding())
try:
names = os.listdir(dirname)
except os.error:
return []
if pattern[0] != '.':
names = filter(lambda x: x[0] != '.', names)
return fnmatch.filter(names, pattern)
def glob0(dirname, basename):
if basename == '':
# `os.path.split()` returns an empty basename for paths ending with a
# directory separator. 'q*x/' should match only directories.
if os.path.isdir(dirname):
return [basename]
else:
if os.path.lexists(os.path.join(dirname, basename)):
return [basename]
return []
magic_check = re.compile('[*?[]')
def has_magic(s):
return magic_check.search(s) is not None
|
openpeer/webrtc-gyp | refs/heads/master | test/rules/gyptest-all.py | 25 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple rules when using an explicit build target of 'all'.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('no_action_with_rules_fails.gyp', chdir='src/noaction', status=1,
stderr=None)
test.run_gyp('actions.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('actions.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello from program.c
Hello from function1.in
Hello from function2.in
"""
if test.format == 'xcode':
chdir = 'relocate/src/subdir1'
else:
chdir = 'relocate/src'
test.run_built_executable('program', chdir=chdir, stdout=expect)
expect = """\
Hello from program.c
Hello from function3.in
"""
if test.format == 'xcode':
chdir = 'relocate/src/subdir3'
else:
chdir = 'relocate/src'
test.run_built_executable('program2', chdir=chdir, stdout=expect)
test.must_match('relocate/src/subdir2/file1.out', 'Hello from file1.in\n')
test.must_match('relocate/src/subdir2/file2.out', 'Hello from file2.in\n')
test.must_match('relocate/src/subdir2/file1.out2', 'Hello from file1.in\n')
test.must_match('relocate/src/subdir2/file2.out2', 'Hello from file2.in\n')
test.must_match('relocate/src/subdir2/file1.out4', 'Hello from file1.in\n')
test.must_match('relocate/src/subdir2/file2.out4', 'Hello from file2.in\n')
test.must_match('relocate/src/subdir2/file1.copy', 'Hello from file1.in\n')
test.must_match('relocate/src/external/file1.external_rules.out',
'Hello from file1.in\n')
test.must_match('relocate/src/external/file2.external_rules.out',
'Hello from file2.in\n')
expect = """\
Hello from program.c
Got 41.
"""
if test.format == 'xcode':
chdir = 'relocate/src/subdir4'
else:
chdir = 'relocate/src'
test.run_built_executable('program4', chdir=chdir, stdout=expect)
test.pass_test()
|
livni/old-OK | refs/heads/master | src/knesset/laws/migrations/0005_add_votes_count_to_vote_data.py | 1 |
from south.db import db
from django.db import models
from knesset.laws.models import *
class Migration:
no_dry_run = True
def forwards(self, orm):
for v in orm.Vote.objects.all():
v.votes_count = orm.VoteAction.objects.filter(vote=v).count()
v.save()
def backwards(self, orm):
for v in orm.Vote.objects.all():
v.votes_count = None
v.save()
models = {
'laws.membervotingstatistics': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'voting_statistics'", 'unique': 'True', 'to': "orm['mks.Member']"})
},
'laws.partyvotingstatistics': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'party': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'voting_statistics'", 'unique': 'True', 'to': "orm['mks.Party']"})
},
'laws.vote': {
'against_party': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'controversy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'full_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_text_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance': ('django.db.models.fields.FloatField', [], {}),
'meeting_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'src_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'src_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'time_string': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'vote_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'votes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mks.Member']", 'blank': 'True'}),
'votes_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'laws.voteaction': {
'against_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'against_opposition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'against_party': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'vote': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['laws.Vote']"})
},
'mks.member': {
'current_party': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'members'", 'null': 'True', 'to': "orm['mks.Party']"}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_of_death': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'family_status': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_children': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parties': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mks.Party']"}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'place_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'year_of_aliyah': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'mks.party': {
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_members': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'number_of_seats': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['laws']
|
YuriGural/erpnext | refs/heads/master | erpnext/accounts/doctype/shipping_rule/test_shipping_rule.py | 97 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from erpnext.accounts.doctype.shipping_rule.shipping_rule import FromGreaterThanToError, ManyBlankToValuesError, OverlappingConditionError
test_records = frappe.get_test_records('Shipping Rule')
class TestShippingRule(unittest.TestCase):
def test_from_greater_than_to(self):
shipping_rule = frappe.copy_doc(test_records[0])
shipping_rule.name = test_records[0].get('name')
shipping_rule.get("conditions")[0].from_value = 101
self.assertRaises(FromGreaterThanToError, shipping_rule.insert)
def test_many_zero_to_values(self):
shipping_rule = frappe.copy_doc(test_records[0])
shipping_rule.name = test_records[0].get('name')
shipping_rule.get("conditions")[0].to_value = 0
self.assertRaises(ManyBlankToValuesError, shipping_rule.insert)
def test_overlapping_conditions(self):
for range_a, range_b in [
((50, 150), (0, 100)),
((50, 150), (100, 200)),
((50, 150), (75, 125)),
((50, 150), (25, 175)),
((50, 150), (50, 150)),
]:
shipping_rule = frappe.copy_doc(test_records[0])
shipping_rule.name = test_records[0].get('name')
shipping_rule.get("conditions")[0].from_value = range_a[0]
shipping_rule.get("conditions")[0].to_value = range_a[1]
shipping_rule.get("conditions")[1].from_value = range_b[0]
shipping_rule.get("conditions")[1].to_value = range_b[1]
self.assertRaises(OverlappingConditionError, shipping_rule.insert)
|
kchodorow/tensorflow | refs/heads/master | tensorflow/contrib/learn/python/learn/learn_io/graph_io_test.py | 17 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for learn.io.graph_io."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import base64
import os
import random
import sys
import tempfile
# TODO: #6568 Remove this hack that makes dlopen() not crash.
if hasattr(sys, "getdlopenflags") and hasattr(sys, "setdlopenflags"):
import ctypes
sys.setdlopenflags(sys.getdlopenflags() | ctypes.RTLD_GLOBAL)
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.contrib.learn.python.learn.learn_io import graph_io
from tensorflow.contrib.learn.python.learn.learn_io.graph_io import _read_keyed_batch_examples_shared_queue
from tensorflow.python.client import session as session_lib
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.training import coordinator
from tensorflow.python.training import queue_runner_impl
from tensorflow.python.training import server_lib
_VALID_FILE_PATTERN = "VALID"
_FILE_NAMES = [b"abc", b"def", b"ghi", b"jkl"]
_INVALID_FILE_PATTERN = "INVALID"
class GraphIOTest(test.TestCase):
def _mock_glob(self, pattern):
if _VALID_FILE_PATTERN == pattern:
return _FILE_NAMES
self.assertEqual(_INVALID_FILE_PATTERN, pattern)
return []
def setUp(self):
super(GraphIOTest, self).setUp()
random.seed(42)
self._orig_glob = gfile.Glob
gfile.Glob = self._mock_glob
def tearDown(self):
gfile.Glob = self._orig_glob
super(GraphIOTest, self).tearDown()
def test_dequeue_batch_value_errors(self):
default_batch_size = 17
queue_capacity = 1234
num_threads = 3
name = "my_batch"
self.assertRaisesRegexp(
ValueError,
"No files match",
graph_io.read_batch_examples,
_INVALID_FILE_PATTERN,
default_batch_size,
io_ops.TFRecordReader,
False,
num_epochs=None,
queue_capacity=queue_capacity,
num_threads=num_threads,
name=name)
self.assertRaisesRegexp(
ValueError,
"Invalid batch_size",
graph_io.read_batch_examples,
_VALID_FILE_PATTERN,
None,
io_ops.TFRecordReader,
False,
num_epochs=None,
queue_capacity=queue_capacity,
num_threads=num_threads,
name=name)
self.assertRaisesRegexp(
ValueError,
"Invalid batch_size",
graph_io.read_batch_examples,
_VALID_FILE_PATTERN,
-1,
io_ops.TFRecordReader,
False,
num_epochs=None,
queue_capacity=queue_capacity,
num_threads=num_threads,
name=name)
self.assertRaisesRegexp(
ValueError,
"Invalid queue_capacity",
graph_io.read_batch_examples,
_VALID_FILE_PATTERN,
default_batch_size,
io_ops.TFRecordReader,
False,
num_epochs=None,
queue_capacity=None,
num_threads=num_threads,
name=name)
self.assertRaisesRegexp(
ValueError,
"Invalid num_threads",
graph_io.read_batch_examples,
_VALID_FILE_PATTERN,
default_batch_size,
io_ops.TFRecordReader,
False,
num_epochs=None,
queue_capacity=queue_capacity,
num_threads=None,
name=name)
self.assertRaisesRegexp(
ValueError,
"Invalid num_threads",
graph_io.read_batch_examples,
_VALID_FILE_PATTERN,
default_batch_size,
io_ops.TFRecordReader,
False,
num_epochs=None,
queue_capacity=queue_capacity,
num_threads=-1,
name=name)
self.assertRaisesRegexp(
ValueError,
"Invalid batch_size",
graph_io.read_batch_examples,
_VALID_FILE_PATTERN,
queue_capacity + 1,
io_ops.TFRecordReader,
False,
num_epochs=None,
queue_capacity=queue_capacity,
num_threads=1,
name=name)
self.assertRaisesRegexp(
ValueError,
"Invalid num_epochs",
graph_io.read_batch_examples,
_VALID_FILE_PATTERN,
default_batch_size,
io_ops.TFRecordReader,
False,
num_epochs=-1,
queue_capacity=queue_capacity,
num_threads=1,
name=name)
self.assertRaisesRegexp(
ValueError,
"Invalid read_batch_size",
graph_io.read_batch_examples,
_VALID_FILE_PATTERN,
default_batch_size,
io_ops.TFRecordReader,
False,
num_epochs=None,
queue_capacity=queue_capacity,
num_threads=1,
read_batch_size=0,
name=name)
def test_batch_record_features(self):
batch_size = 17
queue_capacity = 1234
name = "my_batch"
shape = (0,)
features = {
"feature":
parsing_ops.FixedLenFeature(
shape=shape, dtype=dtypes_lib.float32)
}
with ops.Graph().as_default() as g, self.test_session(graph=g) as sess:
features = graph_io.read_batch_record_features(
_VALID_FILE_PATTERN,
batch_size,
features,
randomize_input=False,
queue_capacity=queue_capacity,
reader_num_threads=2,
name=name)
self.assertTrue("feature" in features,
"'feature' missing from %s." % features.keys())
feature = features["feature"]
self.assertEqual("%s/fifo_queue_1_Dequeue:0" % name, feature.name)
self.assertAllEqual((batch_size,) + shape, feature.get_shape().as_list())
file_name_queue_name = "%s/file_name_queue" % name
file_names_name = "%s/input" % file_name_queue_name
example_queue_name = "%s/fifo_queue" % name
parse_example_queue_name = "%s/fifo_queue" % name
op_nodes = test_util.assert_ops_in_graph({
file_names_name: "Const",
file_name_queue_name: "FIFOQueueV2",
"%s/read/TFRecordReaderV2" % name: "TFRecordReaderV2",
example_queue_name: "FIFOQueueV2",
parse_example_queue_name: "FIFOQueueV2",
name: "QueueDequeueManyV2"
}, g)
self.assertAllEqual(_FILE_NAMES, sess.run(["%s:0" % file_names_name])[0])
self.assertEqual(queue_capacity,
op_nodes[example_queue_name].attr["capacity"].i)
def test_one_epoch(self):
batch_size = 17
queue_capacity = 1234
name = "my_batch"
with ops.Graph().as_default() as g, self.test_session(graph=g) as sess:
inputs = graph_io.read_batch_examples(
_VALID_FILE_PATTERN,
batch_size,
reader=io_ops.TFRecordReader,
randomize_input=True,
num_epochs=1,
queue_capacity=queue_capacity,
name=name)
self.assertAllEqual((None,), inputs.get_shape().as_list())
self.assertEqual("%s:1" % name, inputs.name)
file_name_queue_name = "%s/file_name_queue" % name
file_name_queue_limit_name = ("%s/limit_epochs/epochs" %
file_name_queue_name)
file_names_name = "%s/input" % file_name_queue_name
example_queue_name = "%s/random_shuffle_queue" % name
op_nodes = test_util.assert_ops_in_graph({
file_names_name: "Const",
file_name_queue_name: "FIFOQueueV2",
"%s/read/TFRecordReaderV2" % name: "TFRecordReaderV2",
example_queue_name: "RandomShuffleQueueV2",
name: "QueueDequeueUpToV2",
file_name_queue_limit_name: "VariableV2"
}, g)
self.assertEqual(
set(_FILE_NAMES), set(sess.run(["%s:0" % file_names_name])[0]))
self.assertEqual(queue_capacity,
op_nodes[example_queue_name].attr["capacity"].i)
def test_batch_randomized(self):
batch_size = 17
queue_capacity = 1234
name = "my_batch"
with ops.Graph().as_default() as g, self.test_session(graph=g) as sess:
inputs = graph_io.read_batch_examples(
_VALID_FILE_PATTERN,
batch_size,
reader=io_ops.TFRecordReader,
randomize_input=True,
queue_capacity=queue_capacity,
name=name)
self.assertAllEqual((batch_size,), inputs.get_shape().as_list())
self.assertEqual("%s:1" % name, inputs.name)
file_name_queue_name = "%s/file_name_queue" % name
file_names_name = "%s/input" % file_name_queue_name
example_queue_name = "%s/random_shuffle_queue" % name
op_nodes = test_util.assert_ops_in_graph({
file_names_name: "Const",
file_name_queue_name: "FIFOQueueV2",
"%s/read/TFRecordReaderV2" % name: "TFRecordReaderV2",
example_queue_name: "RandomShuffleQueueV2",
name: "QueueDequeueManyV2"
}, g)
self.assertEqual(
set(_FILE_NAMES), set(sess.run(["%s:0" % file_names_name])[0]))
self.assertEqual(queue_capacity,
op_nodes[example_queue_name].attr["capacity"].i)
def _create_temp_file(self, lines):
tempdir = tempfile.mkdtemp()
filename = os.path.join(tempdir, "temp_file")
gfile.Open(filename, "w").write(lines)
return filename
def _create_sorted_temp_files(self, lines_list):
tempdir = tempfile.mkdtemp()
filenames = []
for i, lines in enumerate(lines_list):
filename = os.path.join(tempdir, "temp_file%05d" % i)
gfile.Open(filename, "w").write(lines)
filenames.append(filename)
return filenames
def test_read_text_lines(self):
gfile.Glob = self._orig_glob
filename = self._create_temp_file("ABC\nDEF\nGHK\n")
batch_size = 1
queue_capacity = 5
name = "my_batch"
with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
inputs = graph_io.read_batch_examples(
filename,
batch_size,
reader=io_ops.TextLineReader,
randomize_input=False,
num_epochs=1,
queue_capacity=queue_capacity,
name=name)
self.assertAllEqual((None,), inputs.get_shape().as_list())
session.run(variables.local_variables_initializer())
coord = coordinator.Coordinator()
threads = queue_runner_impl.start_queue_runners(session, coord=coord)
self.assertAllEqual(session.run(inputs), [b"ABC"])
self.assertAllEqual(session.run(inputs), [b"DEF"])
self.assertAllEqual(session.run(inputs), [b"GHK"])
with self.assertRaises(errors.OutOfRangeError):
session.run(inputs)
coord.request_stop()
coord.join(threads)
def test_read_text_lines_large(self):
gfile.Glob = self._orig_glob
sequence_prefix = "abcdefghijklmnopqrstuvwxyz123456789"
num_records = 49999
lines = [
"".join([sequence_prefix, str(l)]).encode("ascii")
for l in xrange(num_records)
]
json_lines = [
"".join([
'{"features": { "feature": { "sequence": {',
'"bytes_list": { "value": ["', base64.b64encode(l).decode("ascii"),
'"]}}}}}\n'
]) for l in lines
]
filename = self._create_temp_file("".join(json_lines))
batch_size = 10000
queue_capacity = 10000
name = "my_large_batch"
features = {"sequence": parsing_ops.FixedLenFeature([], dtypes_lib.string)}
with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
keys, result = graph_io.read_keyed_batch_features(
filename,
batch_size,
features,
io_ops.TextLineReader,
randomize_input=False,
num_epochs=1,
queue_capacity=queue_capacity,
num_enqueue_threads=2,
parse_fn=parsing_ops.decode_json_example,
name=name)
self.assertAllEqual((None,), keys.get_shape().as_list())
self.assertEqual(1, len(result))
self.assertAllEqual((None,), result["sequence"].get_shape().as_list())
session.run(variables.local_variables_initializer())
coord = coordinator.Coordinator()
threads = queue_runner_impl.start_queue_runners(session, coord=coord)
data = []
try:
while not coord.should_stop():
data.append(session.run(result))
except errors.OutOfRangeError:
pass
finally:
coord.request_stop()
coord.join(threads)
parsed_records = [
item for sublist in [d["sequence"] for d in data] for item in sublist
]
# Check that the number of records matches expected and all records
# are present.
self.assertEqual(len(parsed_records), num_records)
self.assertEqual(set(parsed_records), set(lines))
def test_read_text_lines_multifile(self):
gfile.Glob = self._orig_glob
filenames = self._create_sorted_temp_files(["ABC\n", "DEF\nGHK\n"])
batch_size = 1
queue_capacity = 5
name = "my_batch"
with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
inputs = graph_io.read_batch_examples(
filenames,
batch_size,
reader=io_ops.TextLineReader,
randomize_input=False,
num_epochs=1,
queue_capacity=queue_capacity,
name=name)
self.assertAllEqual((None,), inputs.get_shape().as_list())
session.run(variables.local_variables_initializer())
coord = coordinator.Coordinator()
threads = queue_runner_impl.start_queue_runners(session, coord=coord)
self.assertEqual("%s:1" % name, inputs.name)
file_name_queue_name = "%s/file_name_queue" % name
file_names_name = "%s/input" % file_name_queue_name
example_queue_name = "%s/fifo_queue" % name
test_util.assert_ops_in_graph({
file_names_name: "Const",
file_name_queue_name: "FIFOQueueV2",
"%s/read/TextLineReaderV2" % name: "TextLineReaderV2",
example_queue_name: "FIFOQueueV2",
name: "QueueDequeueUpToV2"
}, g)
self.assertAllEqual(session.run(inputs), [b"ABC"])
self.assertAllEqual(session.run(inputs), [b"DEF"])
self.assertAllEqual(session.run(inputs), [b"GHK"])
with self.assertRaises(errors.OutOfRangeError):
session.run(inputs)
coord.request_stop()
coord.join(threads)
def test_read_text_lines_multifile_with_shared_queue(self):
gfile.Glob = self._orig_glob
filenames = self._create_sorted_temp_files(["ABC\n", "DEF\nGHK\n"])
batch_size = 1
queue_capacity = 5
name = "my_batch"
with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
keys, inputs = _read_keyed_batch_examples_shared_queue(
filenames,
batch_size,
reader=io_ops.TextLineReader,
randomize_input=False,
num_epochs=1,
queue_capacity=queue_capacity,
name=name)
self.assertAllEqual((None,), keys.get_shape().as_list())
self.assertAllEqual((None,), inputs.get_shape().as_list())
session.run([
variables.local_variables_initializer(),
variables.global_variables_initializer()
])
coord = coordinator.Coordinator()
threads = queue_runner_impl.start_queue_runners(session, coord=coord)
self.assertEqual("%s:1" % name, inputs.name)
example_queue_name = "%s/fifo_queue" % name
worker_file_name_queue_name = "%s/file_name_queue/fifo_queue" % name
test_util.assert_ops_in_graph({
"%s/read/TextLineReaderV2" % name: "TextLineReaderV2",
example_queue_name: "FIFOQueueV2",
worker_file_name_queue_name: "FIFOQueueV2",
name: "QueueDequeueUpToV2"
}, g)
self.assertAllEqual(session.run(inputs), [b"ABC"])
self.assertAllEqual(session.run(inputs), [b"DEF"])
self.assertAllEqual(session.run(inputs), [b"GHK"])
with self.assertRaises(errors.OutOfRangeError):
session.run(inputs)
coord.request_stop()
coord.join(threads)
def _get_qr(self, name):
for qr in ops.get_collection(ops.GraphKeys.QUEUE_RUNNERS):
if qr.name == name:
return qr
def _run_queue(self, name, session):
qr = self._get_qr(name)
for op in qr.enqueue_ops:
session.run(op)
def test_multiple_workers_with_shared_queue(self):
gfile.Glob = self._orig_glob
filenames = self._create_sorted_temp_files([
"ABC\n", "DEF\n", "GHI\n", "JKL\n", "MNO\n", "PQR\n", "STU\n", "VWX\n",
"YZ\n"
])
batch_size = 1
queue_capacity = 5
name = "my_batch"
example_queue_name = "%s/fifo_queue" % name
worker_file_name_queue_name = "%s/file_name_queue/fifo_queue" % name
server = server_lib.Server.create_local_server()
with ops.Graph().as_default() as g1, session_lib.Session(
server.target, graph=g1) as session:
keys, inputs = _read_keyed_batch_examples_shared_queue(
filenames,
batch_size,
reader=io_ops.TextLineReader,
randomize_input=False,
num_epochs=1,
queue_capacity=queue_capacity,
name=name)
self.assertAllEqual((None,), keys.get_shape().as_list())
self.assertAllEqual((None,), inputs.get_shape().as_list())
session.run([
variables.local_variables_initializer(),
variables.global_variables_initializer()
])
# Run the two queues once manually.
self._run_queue(worker_file_name_queue_name, session)
self._run_queue(example_queue_name, session)
self.assertAllEqual(session.run(inputs), [b"ABC"])
# Run the worker and the example queue.
self._run_queue(worker_file_name_queue_name, session)
self._run_queue(example_queue_name, session)
self.assertAllEqual(session.run(inputs), [b"DEF"])
with ops.Graph().as_default() as g2, session_lib.Session(
server.target, graph=g2) as session:
keys, inputs = _read_keyed_batch_examples_shared_queue(
filenames,
batch_size,
reader=io_ops.TextLineReader,
randomize_input=False,
num_epochs=1,
queue_capacity=queue_capacity,
name=name)
self.assertAllEqual((None,), keys.get_shape().as_list())
self.assertAllEqual((None,), inputs.get_shape().as_list())
# Run the worker and the example queue.
self._run_queue(worker_file_name_queue_name, session)
self._run_queue(example_queue_name, session)
self.assertAllEqual(session.run(inputs), [b"GHI"])
self.assertTrue(g1 is not g2)
def test_batch_text_lines(self):
gfile.Glob = self._orig_glob
filename = self._create_temp_file("A\nB\nC\nD\nE\n")
batch_size = 3
queue_capacity = 10
name = "my_batch"
with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
inputs = graph_io.read_batch_examples(
[filename],
batch_size,
reader=io_ops.TextLineReader,
randomize_input=False,
num_epochs=1,
queue_capacity=queue_capacity,
read_batch_size=10,
name=name)
self.assertAllEqual((None,), inputs.get_shape().as_list())
session.run(variables.local_variables_initializer())
coord = coordinator.Coordinator()
threads = queue_runner_impl.start_queue_runners(session, coord=coord)
self.assertAllEqual(session.run(inputs), [b"A", b"B", b"C"])
self.assertAllEqual(session.run(inputs), [b"D", b"E"])
with self.assertRaises(errors.OutOfRangeError):
session.run(inputs)
coord.request_stop()
coord.join(threads)
def test_keyed_read_text_lines(self):
gfile.Glob = self._orig_glob
filename = self._create_temp_file("ABC\nDEF\nGHK\n")
batch_size = 1
queue_capacity = 5
name = "my_batch"
with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
keys, inputs = graph_io.read_keyed_batch_examples(
filename,
batch_size,
reader=io_ops.TextLineReader,
randomize_input=False,
num_epochs=1,
queue_capacity=queue_capacity,
name=name)
self.assertAllEqual((None,), keys.get_shape().as_list())
self.assertAllEqual((None,), inputs.get_shape().as_list())
session.run(variables.local_variables_initializer())
coord = coordinator.Coordinator()
threads = queue_runner_impl.start_queue_runners(session, coord=coord)
self.assertAllEqual(
session.run([keys, inputs]),
[[filename.encode("utf-8") + b":1"], [b"ABC"]])
self.assertAllEqual(
session.run([keys, inputs]),
[[filename.encode("utf-8") + b":2"], [b"DEF"]])
self.assertAllEqual(
session.run([keys, inputs]),
[[filename.encode("utf-8") + b":3"], [b"GHK"]])
with self.assertRaises(errors.OutOfRangeError):
session.run(inputs)
coord.request_stop()
coord.join(threads)
def test_keyed_parse_json(self):
gfile.Glob = self._orig_glob
filename = self._create_temp_file(
'{"features": {"feature": {"age": {"int64_list": {"value": [0]}}}}}\n'
'{"features": {"feature": {"age": {"int64_list": {"value": [1]}}}}}\n'
'{"features": {"feature": {"age": {"int64_list": {"value": [2]}}}}}\n')
batch_size = 1
queue_capacity = 5
name = "my_batch"
with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
dtypes = {"age": parsing_ops.FixedLenFeature([1], dtypes_lib.int64)}
parse_fn = lambda example: parsing_ops.parse_single_example( # pylint: disable=g-long-lambda
parsing_ops.decode_json_example(example), dtypes)
keys, inputs = graph_io.read_keyed_batch_examples(
filename,
batch_size,
reader=io_ops.TextLineReader,
randomize_input=False,
num_epochs=1,
queue_capacity=queue_capacity,
parse_fn=parse_fn,
name=name)
self.assertAllEqual((None,), keys.get_shape().as_list())
self.assertEqual(1, len(inputs))
self.assertAllEqual((None, 1), inputs["age"].get_shape().as_list())
session.run(variables.local_variables_initializer())
coord = coordinator.Coordinator()
threads = queue_runner_impl.start_queue_runners(session, coord=coord)
key, age = session.run([keys, inputs["age"]])
self.assertAllEqual(age, [[0]])
self.assertAllEqual(key, [filename.encode("utf-8") + b":1"])
key, age = session.run([keys, inputs["age"]])
self.assertAllEqual(age, [[1]])
self.assertAllEqual(key, [filename.encode("utf-8") + b":2"])
key, age = session.run([keys, inputs["age"]])
self.assertAllEqual(age, [[2]])
self.assertAllEqual(key, [filename.encode("utf-8") + b":3"])
with self.assertRaises(errors.OutOfRangeError):
session.run(inputs)
coord.request_stop()
coord.join(threads)
def test_keyed_features_filter(self):
gfile.Glob = self._orig_glob
lines = [
'{"features": {"feature": {"age": {"int64_list": {"value": [2]}}}}}',
'{"features": {"feature": {"age": {"int64_list": {"value": [0]}}}}}',
'{"features": {"feature": {"age": {"int64_list": {"value": [1]}}}}}',
'{"features": {"feature": {"age": {"int64_list": {"value": [0]}}}}}',
'{"features": {"feature": {"age": {"int64_list": {"value": [3]}}}}}',
'{"features": {"feature": {"age": {"int64_list": {"value": [5]}}}}}'
]
filename = self._create_temp_file("\n".join(lines))
batch_size = 2
queue_capacity = 4
name = "my_batch"
features = {"age": parsing_ops.FixedLenFeature([], dtypes_lib.int64)}
def filter_fn(keys, examples_json):
del keys
serialized = parsing_ops.decode_json_example(examples_json)
examples = parsing_ops.parse_example(serialized, features)
return math_ops.less(examples["age"], 2)
with ops.Graph().as_default() as g, self.test_session(graph=g) as session:
keys, inputs = graph_io._read_keyed_batch_examples_helper(
filename,
batch_size,
reader=io_ops.TextLineReader,
randomize_input=False,
num_epochs=1,
read_batch_size=batch_size,
queue_capacity=queue_capacity,
filter_fn=filter_fn,
name=name)
self.assertAllEqual((None,), keys.get_shape().as_list())
self.assertAllEqual((None,), inputs.get_shape().as_list())
session.run(variables.local_variables_initializer())
coord = coordinator.Coordinator()
threads = queue_runner_impl.start_queue_runners(session, coord=coord)
# First batch of two filtered examples.
out_keys, out_vals = session.run((keys, inputs))
self.assertAllEqual(
[filename.encode("utf-8") + b":2", filename.encode("utf-8") + b":3"],
out_keys)
self.assertAllEqual([lines[1].encode("utf-8"), lines[2].encode("utf-8")],
out_vals)
# Second batch will only have one filtered example as that's the only
# remaining example that satisfies the filtering criterion.
out_keys, out_vals = session.run((keys, inputs))
self.assertAllEqual([filename.encode("utf-8") + b":4"], out_keys)
self.assertAllEqual([lines[3].encode("utf-8")], out_vals)
# Exhausted input.
with self.assertRaises(errors.OutOfRangeError):
session.run((keys, inputs))
coord.request_stop()
coord.join(threads)
if __name__ == "__main__":
test.main()
|
yashrastogi16/python_koans | refs/heads/master | python3/koans/about_triangle_project2.py | 72 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
# You need to finish implementing triangle() in the file 'triangle.py'
from .triangle import *
class AboutTriangleProject2(Koan):
# The first assignment did not talk about how to handle errors.
# Let's handle that part now.
def test_illegal_triangles_throw_exceptions(self):
with self.assertRaises(TriangleError):
triangle(0, 0, 0)
with self.assertRaises(TriangleError):
triangle(3, 4, -5)
with self.assertRaises(TriangleError):
triangle(1, 1, 3)
with self.assertRaises(TriangleError):
triangle(2, 5, 2)
|
gavin-feng/odoo | refs/heads/8.0 | addons/crm/wizard/crm_phonecall_to_phonecall.py | 337 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
import time
class crm_phonecall2phonecall(osv.osv_memory):
_name = 'crm.phonecall2phonecall'
_description = 'Phonecall To Phonecall'
_columns = {
'name' : fields.char('Call summary', required=True, select=1),
'user_id' : fields.many2one('res.users',"Assign To"),
'contact_name':fields.char('Contact'),
'phone':fields.char('Phone'),
'categ_id': fields.many2one('crm.case.categ', 'Category', \
domain="['|',('section_id','=',False),('section_id','=',section_id),\
('object_id.model', '=', 'crm.phonecall')]"),
'date': fields.datetime('Date'),
'section_id':fields.many2one('crm.case.section','Sales Team'),
'action': fields.selection([('schedule','Schedule a call'), ('log','Log a call')], 'Action', required=True),
'partner_id' : fields.many2one('res.partner', "Partner"),
'note':fields.text('Note')
}
def action_cancel(self, cr, uid, ids, context=None):
"""
Closes Phonecall to Phonecall form
"""
return {'type':'ir.actions.act_window_close'}
def action_schedule(self, cr, uid, ids, context=None):
value = {}
if context is None:
context = {}
phonecall = self.pool.get('crm.phonecall')
phonecall_ids = context and context.get('active_ids') or []
for this in self.browse(cr, uid, ids, context=context):
phocall_ids = phonecall.schedule_another_phonecall(cr, uid, phonecall_ids, this.date, this.name, \
this.user_id and this.user_id.id or False, \
this.section_id and this.section_id.id or False, \
this.categ_id and this.categ_id.id or False, \
action=this.action, context=context)
return phonecall.redirect_phonecall_view(cr, uid, phocall_ids[phonecall_ids[0]], context=context)
def default_get(self, cr, uid, fields, context=None):
"""
This function gets default values
"""
res = super(crm_phonecall2phonecall, self).default_get(cr, uid, fields, context=context)
record_id = context and context.get('active_id', False) or False
res.update({'action': 'schedule', 'date': time.strftime('%Y-%m-%d %H:%M:%S')})
if record_id:
phonecall = self.pool.get('crm.phonecall').browse(cr, uid, record_id, context=context)
categ_id = False
data_obj = self.pool.get('ir.model.data')
try:
res_id = data_obj._get_id(cr, uid, 'crm', 'categ_phone2')
categ_id = data_obj.browse(cr, uid, res_id, context=context).res_id
except ValueError:
pass
if 'name' in fields:
res.update({'name': phonecall.name})
if 'user_id' in fields:
res.update({'user_id': phonecall.user_id and phonecall.user_id.id or False})
if 'date' in fields:
res.update({'date': False})
if 'section_id' in fields:
res.update({'section_id': phonecall.section_id and phonecall.section_id.id or False})
if 'categ_id' in fields:
res.update({'categ_id': categ_id})
if 'partner_id' in fields:
res.update({'partner_id': phonecall.partner_id and phonecall.partner_id.id or False})
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ChronoMonochrome/android_external_chromium_org | refs/heads/cm-11.0 | native_client_sdk/src/build_tools/sdk_tools/sdk_update_common.py | 168 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions for sdk_update.py and sdk_update_main.py."""
import errno
import logging
import os
import shutil
import subprocess
import sys
import time
class Error(Exception):
"""Generic error/exception for sdk_update module"""
pass
def MakeDirs(directory):
if not os.path.exists(directory):
logging.info('Making directory %s' % (directory,))
os.makedirs(directory)
def RemoveDir(outdir):
"""Removes the given directory
On Unix systems, this just runs shutil.rmtree, but on Windows, this doesn't
work when the directory contains junctions (as does our SDK installer).
Therefore, on Windows, it runs rmdir /S /Q as a shell command. This always
does the right thing on Windows. If the directory already didn't exist,
RemoveDir will return successfully without taking any action.
Args:
outdir: The directory to delete
Raises:
Error - If this operation fails for any reason.
"""
max_tries = 5
last_exception = None
for num_tries in xrange(max_tries):
try:
shutil.rmtree(outdir)
return
except OSError as e:
if not os.path.exists(outdir):
# The directory can't be removed because it doesn't exist.
return
last_exception = e
# On Windows this could be an issue with junctions, so try again with
# rmdir.
if sys.platform == 'win32':
try:
cmd = ['rmdir', '/S', '/Q', outdir]
process = subprocess.Popen(cmd, stderr=subprocess.PIPE, shell=True)
_, stderr = process.communicate()
if process.returncode != 0:
raise Error('\"%s\" failed with code %d. Output:\n %s' % (
' '.join(cmd), process.returncode, stderr))
return
# Ignore failures, we'll just try again.
except subprocess.CalledProcessError as e:
# CalledProcessError has no error message, generate one.
last_exception = Error('\"%s\" failed with code %d.' % (
' '.join(e.cmd), e.returncode))
except Error as e:
last_exception = e
# Didn't work, sleep and try again.
time.sleep(num_tries + 1)
# Failed.
raise Error('Unable to remove directory "%s"\n %s' % (outdir,
last_exception))
def RenameDir(srcdir, destdir):
"""Renames srcdir to destdir. Removes destdir before doing the
rename if it already exists."""
max_tries = 5
num_tries = 0
for num_tries in xrange(max_tries):
try:
RemoveDir(destdir)
shutil.move(srcdir, destdir)
return
except OSError as err:
if err.errno != errno.EACCES:
raise err
# If we are here, we didn't exit due to raised exception, so we are
# handling a Windows flaky access error. Sleep one second and try
# again.
time.sleep(num_tries + 1)
# end of while loop -- could not RenameDir
raise Error('Could not RenameDir %s => %s after %d tries.\n'
'Please check that no shells or applications '
'are accessing files in %s.'
% (srcdir, destdir, num_tries + 1, destdir))
|
Tejal011089/digitales_erpnext | refs/heads/develop | erpnext/home/doctype/feed/feed.py | 37 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import frappe.defaults
import frappe.permissions
from frappe.model.document import Document
class Feed(Document):
pass
def on_doctype_update():
if not frappe.db.sql("""show index from `tabFeed`
where Key_name="feed_doctype_docname_index" """):
frappe.db.commit()
frappe.db.sql("""alter table `tabFeed`
add index feed_doctype_docname_index(doc_type, doc_name)""")
def get_permission_query_conditions(user):
if not user: user = frappe.session.user
if not frappe.permissions.apply_user_permissions("Feed", "read", user):
return ""
user_permissions = frappe.defaults.get_user_permissions(user)
can_read = frappe.get_user(user).get_can_read()
can_read_doctypes = ['"{}"'.format(doctype) for doctype in
list(set(can_read) - set(user_permissions.keys()))]
if not can_read_doctypes:
return ""
conditions = ["tabFeed.doc_type in ({})".format(", ".join(can_read_doctypes))]
if user_permissions:
can_read_docs = []
for doctype, names in user_permissions.items():
for n in names:
can_read_docs.append('"{}|{}"'.format(doctype, n))
if can_read_docs:
conditions.append("concat_ws('|', tabFeed.doc_type, tabFeed.doc_name) in ({})".format(
", ".join(can_read_docs)))
return "(" + " or ".join(conditions) + ")"
def has_permission(doc, user):
return frappe.has_permission(doc.doc_type, "read", doc.doc_name, user=user)
|
ahmed-mahran/hue | refs/heads/master | desktop/core/ext-py/Django-1.6.10/tests/urlpatterns_reverse/urls.py | 40 | from __future__ import absolute_import
from django.conf.urls import patterns, url, include
from .views import empty_view, empty_view_partial, empty_view_wrapped, absolute_kwargs_view
other_patterns = patterns('',
url(r'non_path_include/$', empty_view, name='non_path_include'),
url(r'nested_path/$', 'urlpatterns_reverse.views.nested_view'),
)
urlpatterns = patterns('',
url(r'^places/(\d+)/$', empty_view, name='places'),
url(r'^places?/$', empty_view, name="places?"),
url(r'^places+/$', empty_view, name="places+"),
url(r'^places*/$', empty_view, name="places*"),
url(r'^(?:places/)?$', empty_view, name="places2?"),
url(r'^(?:places/)+$', empty_view, name="places2+"),
url(r'^(?:places/)*$', empty_view, name="places2*"),
url(r'^places/(\d+|[a-z_]+)/', empty_view, name="places3"),
url(r'^places/(?P<id>\d+)/$', empty_view, name="places4"),
url(r'^people/(?P<name>\w+)/$', empty_view, name="people"),
url(r'^people/(?:name/)', empty_view, name="people2"),
url(r'^people/(?:name/(\w+)/)?', empty_view, name="people2a"),
url(r'^people/(?P<name>\w+)-(?P=name)/$', empty_view, name="people_backref"),
url(r'^optional/(?P<name>.*)/(?:.+/)?', empty_view, name="optional"),
url(r'^hardcoded/$', empty_view, name="hardcoded"),
url(r'^hardcoded/doc\.pdf$', empty_view, name="hardcoded2"),
url(r'^people/(?P<state>\w\w)/(?P<name>\w+)/$', empty_view, name="people3"),
url(r'^people/(?P<state>\w\w)/(?P<name>\d)/$', empty_view, name="people4"),
url(r'^people/((?P<state>\w\w)/test)?/(\w+)/$', empty_view, name="people6"),
url(r'^character_set/[abcdef0-9]/$', empty_view, name="range"),
url(r'^character_set/[\w]/$', empty_view, name="range2"),
url(r'^price/\$(\d+)/$', empty_view, name="price"),
url(r'^price/[$](\d+)/$', empty_view, name="price2"),
url(r'^price/[\$](\d+)/$', empty_view, name="price3"),
url(r'^product/(?P<product>\w+)\+\(\$(?P<price>\d+(\.\d+)?)\)/$',
empty_view, name="product"),
url(r'^headlines/(?P<year>\d+)\.(?P<month>\d+)\.(?P<day>\d+)/$', empty_view,
name="headlines"),
url(r'^windows_path/(?P<drive_name>[A-Z]):\\(?P<path>.+)/$', empty_view,
name="windows"),
url(r'^special_chars/(?P<chars>.+)/$', empty_view, name="special"),
url(r'^(?P<name>.+)/\d+/$', empty_view, name="mixed"),
url(r'^repeats/a{1,2}/$', empty_view, name="repeats"),
url(r'^repeats/a{2,4}/$', empty_view, name="repeats2"),
url(r'^repeats/a{2}/$', empty_view, name="repeats3"),
url(r'^(?i)CaseInsensitive/(\w+)', empty_view, name="insensitive"),
url(r'^test/1/?', empty_view, name="test"),
url(r'^(?i)test/2/?$', empty_view, name="test2"),
url(r'^outer/(?P<outer>\d+)/',
include('urlpatterns_reverse.included_urls')),
url('', include('urlpatterns_reverse.extra_urls')),
# This is non-reversible, but we shouldn't blow up when parsing it.
url(r'^(?:foo|bar)(\w+)/$', empty_view, name="disjunction"),
# Partials should be fine.
url(r'^partial/', empty_view_partial, name="partial"),
url(r'^partial_wrapped/', empty_view_wrapped, name="partial_wrapped"),
# Regression views for #9038. See tests for more details
url(r'arg_view/$', 'kwargs_view'),
url(r'arg_view/(?P<arg1>\d+)/$', 'kwargs_view'),
url(r'absolute_arg_view/(?P<arg1>\d+)/$', absolute_kwargs_view),
url(r'absolute_arg_view/$', absolute_kwargs_view),
# Tests for #13154. Mixed syntax to test both ways of defining URLs.
url(r'defaults_view1/(?P<arg1>\d+)/', 'defaults_view', {'arg2': 1}, name='defaults'),
(r'defaults_view2/(?P<arg1>\d+)/', 'defaults_view', {'arg2': 2}, 'defaults'),
url('^includes/', include(other_patterns)),
# Security tests
url('(.+)/security/$', empty_view, name='security'),
)
|
ncnll/CameraConfig | refs/heads/master | captureUsbImageAndUpload.py | 1 | from threading import Timer
import pygame
import pygame.camera #https://www.pygame.org/docs/ref/camera.html
import pygame.image
from pygame.locals import *
from poster.encode import multipart_encode
from poster.streaminghttp import register_openers
import time
import threading
import os
import json
import sqlite3
import picamera
from datetime import datetime
#from subprocess import Popen, PIPE
#(stdout, stderr) = Popen(["cat","foo.txt"], stdout=PIPE).communicate()
import urllib2
pygame.init()
pygame.camera.init()
#Initiate the camera
###Get current milliseconds
current_milli_time = lambda: int(round(time.time() * 1000))
########Get cpu serial number
def getCpuSerial():
cpuseiral = "0000000000000000"
try:
cpufile = open('/proc/cpuinfo','r')
for line in cpufile:
if line[0:6]=='Serial':
cpuserial=line[10:26]
cpufile.close()
except:
cpuserial="Error00000000000"
return cpuserial
#Set cpu serial to global
cpu_serial = getCpuSerial()
#Current cameraConfigDict
cameraConfigDict = None #getCameraConfigInfo(cpuSerial)
#Change row to dict
def dict_factory(cursor, row):
d = {}
for idx,col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def getSqliteDBConnection():
#if db is not exist
directory_path = '/home/pi/database/uploadconfig/'
uploadConfig_file_path=directory_path+'uploadConfig.db'
if not os.path.exists(uploadConfig_file_path) :
return None
connPre = sqlite3.connect(uploadConfig_file_path)#/home/pi/database/test.db
return connPre
#Get config table info
def getCameraConfigInfo(serialNumber):
connPre = getSqliteDBConnection()
cPre=connPre.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='CAMERACONFIG'")
row1 = cPre.fetchone()
if row1 is None :
return None
else :
print row1[0]
cPre=connPre.execute("SELECT * FROM CAMERACONFIG where serialNumber='%s'" % (serialNumber,))
row1 = cPre.fetchone()
print row1[0]
rowDict = dict_factory(cPre, row1)
return rowDict
#Get config param by serialNumber then send to server
def sendImageToLocalAndRemoteServer(serialNumber, uploadImageName):
#Get config param
configDict = getCameraConfigInfo(serialNumber)
print configDict
if configDict is None :
return None
#Send to local server
register_openers()#Why to use this?
with open(uploadImageName, 'r') as f:
datagen, headers = multipart_encode({"file":f,"cameraSerialId":configDict["serialNumber"], "deviceCpuId":cpu_serial,"index":"0", "viewIndex":"0", "cameraBoardUploadTime":str(current_milli_time())})
#request = urllib2.Request("http://192.168.1.105:3000/index/uploadCameraPhoto", datagen, headers)
#url should load from server, or changed by hand
#request = urllib2.Request("http://192.168.1.105:3000/upload/single", datagen, headers)
headers["User-agent"] = "Mozilla/5.0"
print headers
request = urllib2.Request("http://192.168.1.105:3000/upload/single", datagen, headers)
try:
response = urllib2.urlopen(request,timeout=30)
print response.read()
print "local is ok"
except Exception, e:
print e
#Record failure
#Compare time
#Send to remote server
is_remote_on = isRemoteTakePhotoOn()
if is_remote_on:
datagen, headers = multipart_encode({"file":f,"cameraSerialId":configDict["serialNumber"],"deviceCpuId":cpu_serial,"index":"0", "viewIndex":"0", "cameraBoardUploadTime":str(current_milli_time())})
print f
#request = urllib2.Request("http://192.168.1.105:3000/index/uploadCameraPhoto", datagen, headers)
#url should load from server, or changed by hand
#request = urllib2.Request("http://192.168.1.105:3000/upload/single", datagen, headers)
headers["User-agent"] = "Mozilla/5.0"
print headers
request = urllib2.Request("http://www.ncnll.com:3000/upload/single", datagen, headers)
try:
response = urllib2.urlopen(request,timeout=30)
print response.read()
print "remote is ok"
except Exception, e:
print e
os.remove(uploadImageName)
#Check is take photo time is arrived
#Check is take photo is closed
def isLocalTakePhotoOn():
latestUploadTime=datetime.strptime(cameraConfigDict["latestLocalUploadTime"], "%Y-%m-%dT%H:%M:%S.%fZ")
currentTime = datetime.now()
totalDiff = (currentTime-latestUploadTime).total_seconds()
uploadShootInterval = cameraConfigDict["localShootInterval"]
print cameraConfigDict
print '------------------------'
print uploadShootInterval
print totalDiff
if uploadShootInterval<=totalDiff :
connPre = getSqliteDBConnection()
execsql = "UPDATE CAMERACONFIG SET latestLocalUploadTime='%s' where serialNumber='%s' " % (str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")), str(cameraConfigDict["serialNumber"]),)
connPre.execute(execsql)
connPre.commit()
return True
else :
return False
#execSql = "UPDATE CAMERACONFIG SET cameraType=%d, localShootInterval=%d, uploadShootInterval=%d, workingTime='%s', localResolution='%s', remoteResolution='%s', updateTime='%s', startDate='%s', endDate='%s', uploadPath='%s' where serialNumber='%s' " % (responseJson['rows'][0]['cameraType'], responseJson['rows'][0]['localShootInterval'], responseJson['rows'][0]['uploadShootInterval'], str(responseJson['rows'][0]['workingTime']), str(responseJson['rows'][0]['localResolution']), str(responseJson['rows'][0]['remoteResolution']), str(responseJson['rows'][0]['updateTime']), str(responseJson['rows'][0]['startDate']), str(responseJson['rows'][0]['endDate']), str(responseJson['rows'][0]['uploadPath']),str(responseJson['rows'][0]['serialNumber']),)
#Check is take photo time is arrived
#Check is take photo is closed
def isRemoteTakePhotoOn():
latestUploadTime=datetime.strptime(cameraConfigDict["latestUploadTime"], "%Y-%m-%dT%H:%M:%S.%fZ")
currentTime = datetime.now()
totalDiff = (currentTime-latestUploadTime).total_seconds()
uploadShootInterval = cameraConfigDict["uploadShootInterval"]
#print cameraConfigDict
#print '------------------------'
#print uploadShootInterval
#print totalDiff
if uploadShootInterval<=totalDiff :
connPre = getSqliteDBConnection()
execsql = "UPDATE CAMERACONFIG SET latestUploadTime='%s' where serialNumber='%s' " % (str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")), str(cameraConfigDict["serialNumber"]),)
connPre.execute(execsql)
connPre.commit()
return True
else :
return False
#execSql = "UPDATE CAMERACONFIG SET cameraType=%d, localShootInterval=%d, uploadShootInterval=%d, workingTime='%s', localResolution='%s', remoteResolution='%s', updateTime='%s', startDate='%s', endDate='%s', uploadPath='%s' where serialNumber='%s' " % (responseJson['rows'][0]['cameraType'], responseJson['rows'][0]['localShootInterval'], responseJson['rows'][0]['uploadShootInterval'], str(responseJson['rows'][0]['workingTime']), str(responseJson['rows'][0]['localResolution']), str(responseJson['rows'][0]['remoteResolution']), str(responseJson['rows'][0]['updateTime']), str(responseJson['rows'][0]['startDate']), str(responseJson['rows'][0]['endDate']), str(responseJson['rows'][0]['uploadPath']),str(responseJson['rows'][0]['serialNumber']),)
#capture onboard camera image and send to local and remote server
def captureCSIImageAndSendOut():
isOn = isLocalTakePhotoOn()
if isOn:
try:
camera = picamera.PiCamera()
#cameraConfigDict["latestLocalUploadTime"]
camera.resolution = (1920, 1080)
#camera.start_preview()
# Camera warm-up time
time.sleep(2)
#Image name
fileName = str(current_milli_time())+".jpg"
camera.capture(fileName)
camera.close()
#camera.stop_preview()
print "SHOOOOOTTTTTing CSI image"
sendImageToLocalAndRemoteServer(cpu_serial, fileName)
except Exception,e:
print str(e)
#capture usb camera image and send to local and remote server
def captureUsbImageAndSendOut(usbSerial, usbIndex):
isOn = isLocalTakePhotoOn()
if isOn:
print "SHOOOOOTTTTTing USB image"
#Image name
imageName = str(current_milli_time())+".jpg"
#Capture image
try:
devicePath = "/dev/video"+usbIndex
cam = pygame.camera.Camera(devicePath,(1920, 1080))
cam.start()
image=cam.get_image()
pygame.image.save(image, imageName)
cam.stop()
sendImageToLocalAndRemoteServer(usbSerial, imageName)
except Exception,e:
print str(e)
else:
print "Usb send time is not arrived!--"+usbSerial
#Query camera config table to see whether should take a photo
def inspectCameraConfig():
global cameraConfigDict
#Add cpu serialNumber(onboard csi camera serialId)
#boardCameraSerialNumber = getCpuSerial()
#CSI camera
cameraConfigDict = getCameraConfigInfo(cpu_serial)
captureCSIImageAndSendOut()
#Add usb camera serialNumber
stdout = os.listdir("/home/pi/v4l/by-id/")
for line in stdout:
usbserial=line[4:22]
usbIndex=line[-1:]
#check is device exists
isVideoDeviceExists = os.path.exists("/dev/video"+usbIndex)
#print isVideoDeviceExists
if isVideoDeviceExists:
print 'doing isVideoDeviceExists'
cameraConfigDict = getCameraConfigInfo(usbserial)
captureUsbImageAndSendOut(usbserial, usbIndex)
#Iterate the config table periodically
###Interval function to update config
def inspectCameraConfigIntervalTimer():
inspectCameraConfig()
#Update every 300 seconds
Timer(5, inspectCameraConfigIntervalTimer).start()
#Application Start
inspectCameraConfigIntervalTimer() |
ingokegel/intellij-community | refs/heads/master | python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_nonzero.py | 326 | """Fixer for __nonzero__ -> __bool__ methods."""
# Author: Collin Winter
# Local imports
from .. import fixer_base
from ..fixer_util import Name, syms
class FixNonzero(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
classdef< 'class' any+ ':'
suite< any*
funcdef< 'def' name='__nonzero__'
parameters< '(' NAME ')' > any+ >
any* > >
"""
def transform(self, node, results):
name = results["name"]
new = Name(u"__bool__", prefix=name.prefix)
name.replace(new)
|
willdavidc/piel | refs/heads/master | catkin_ws/src/piel/scripts/venv/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.py | 412 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2016 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from __future__ import unicode_literals
import base64
import codecs
import datetime
import distutils.util
from email import message_from_file
import hashlib
import imp
import json
import logging
import os
import posixpath
import re
import shutil
import sys
import tempfile
import zipfile
from . import __version__, DistlibException
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
from .database import InstalledDistribution
from .metadata import Metadata, METADATA_FILENAME
from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
cached_property, get_cache_base, read_exports, tempdir)
from .version import NormalizedVersion, UnsupportedVersionError
logger = logging.getLogger(__name__)
cache = None # created when needed
if hasattr(sys, 'pypy_version_info'):
IMP_PREFIX = 'pp'
elif sys.platform.startswith('java'):
IMP_PREFIX = 'jy'
elif sys.platform == 'cli':
IMP_PREFIX = 'ip'
else:
IMP_PREFIX = 'cp'
VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
if not VER_SUFFIX: # pragma: no cover
VER_SUFFIX = '%s%s' % sys.version_info[:2]
PYVER = 'py' + VER_SUFFIX
IMPVER = IMP_PREFIX + VER_SUFFIX
ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_')
ABI = sysconfig.get_config_var('SOABI')
if ABI and ABI.startswith('cpython-'):
ABI = ABI.replace('cpython-', 'cp')
else:
def _derive_abi():
parts = ['cp', VER_SUFFIX]
if sysconfig.get_config_var('Py_DEBUG'):
parts.append('d')
if sysconfig.get_config_var('WITH_PYMALLOC'):
parts.append('m')
if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4:
parts.append('u')
return ''.join(parts)
ABI = _derive_abi()
del _derive_abi
FILENAME_RE = re.compile(r'''
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?
-(?P<py>\w+\d+(\.\w+\d+)*)
-(?P<bi>\w+)
-(?P<ar>\w+(\.\w+)*)
\.whl$
''', re.IGNORECASE | re.VERBOSE)
NAME_VERSION_RE = re.compile(r'''
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?$
''', re.IGNORECASE | re.VERBOSE)
SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
SHEBANG_PYTHON = b'#!python'
SHEBANG_PYTHONW = b'#!pythonw'
if os.sep == '/':
to_posix = lambda o: o
else:
to_posix = lambda o: o.replace(os.sep, '/')
class Mounter(object):
def __init__(self):
self.impure_wheels = {}
self.libs = {}
def add(self, pathname, extensions):
self.impure_wheels[pathname] = extensions
self.libs.update(extensions)
def remove(self, pathname):
extensions = self.impure_wheels.pop(pathname)
for k, v in extensions:
if k in self.libs:
del self.libs[k]
def find_module(self, fullname, path=None):
if fullname in self.libs:
result = self
else:
result = None
return result
def load_module(self, fullname):
if fullname in sys.modules:
result = sys.modules[fullname]
else:
if fullname not in self.libs:
raise ImportError('unable to find extension for %s' % fullname)
result = imp.load_dynamic(fullname, self.libs[fullname])
result.__loader__ = self
parts = fullname.rsplit('.', 1)
if len(parts) > 1:
result.__package__ = parts[0]
return result
_hook = Mounter()
class Wheel(object):
"""
Class to build and install from Wheel files (PEP 427).
"""
wheel_version = (1, 1)
hash_kind = 'sha256'
def __init__(self, filename=None, sign=False, verify=False):
"""
Initialise an instance using a (valid) filename.
"""
self.sign = sign
self.should_verify = verify
self.buildver = ''
self.pyver = [PYVER]
self.abi = ['none']
self.arch = ['any']
self.dirname = os.getcwd()
if filename is None:
self.name = 'dummy'
self.version = '0.1'
self._filename = self.filename
else:
m = NAME_VERSION_RE.match(filename)
if m:
info = m.groupdict('')
self.name = info['nm']
# Reinstate the local version separator
self.version = info['vn'].replace('_', '-')
self.buildver = info['bn']
self._filename = self.filename
else:
dirname, filename = os.path.split(filename)
m = FILENAME_RE.match(filename)
if not m:
raise DistlibException('Invalid name or '
'filename: %r' % filename)
if dirname:
self.dirname = os.path.abspath(dirname)
self._filename = filename
info = m.groupdict('')
self.name = info['nm']
self.version = info['vn']
self.buildver = info['bn']
self.pyver = info['py'].split('.')
self.abi = info['bi'].split('.')
self.arch = info['ar'].split('.')
@property
def filename(self):
"""
Build and return a filename from the various components.
"""
if self.buildver:
buildver = '-' + self.buildver
else:
buildver = ''
pyver = '.'.join(self.pyver)
abi = '.'.join(self.abi)
arch = '.'.join(self.arch)
# replace - with _ as a local version separator
version = self.version.replace('-', '_')
return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
pyver, abi, arch)
@property
def exists(self):
path = os.path.join(self.dirname, self.filename)
return os.path.isfile(path)
@property
def tags(self):
for pyver in self.pyver:
for abi in self.abi:
for arch in self.arch:
yield pyver, abi, arch
@cached_property
def metadata(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
wheel_metadata = self.get_wheel_metadata(zf)
wv = wheel_metadata['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
if file_version < (1, 1):
fn = 'METADATA'
else:
fn = METADATA_FILENAME
try:
metadata_filename = posixpath.join(info_dir, fn)
with zf.open(metadata_filename) as bf:
wf = wrapper(bf)
result = Metadata(fileobj=wf)
except KeyError:
raise ValueError('Invalid wheel, because %s is '
'missing' % fn)
return result
def get_wheel_metadata(self, zf):
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
metadata_filename = posixpath.join(info_dir, 'WHEEL')
with zf.open(metadata_filename) as bf:
wf = codecs.getreader('utf-8')(bf)
message = message_from_file(wf)
return dict(message)
@cached_property
def info(self):
pathname = os.path.join(self.dirname, self.filename)
with ZipFile(pathname, 'r') as zf:
result = self.get_wheel_metadata(zf)
return result
def process_shebang(self, data):
m = SHEBANG_RE.match(data)
if m:
end = m.end()
shebang, data_after_shebang = data[:end], data[end:]
# Preserve any arguments after the interpreter
if b'pythonw' in shebang.lower():
shebang_python = SHEBANG_PYTHONW
else:
shebang_python = SHEBANG_PYTHON
m = SHEBANG_DETAIL_RE.match(shebang)
if m:
args = b' ' + m.groups()[-1]
else:
args = b''
shebang = shebang_python + args
data = shebang + data_after_shebang
else:
cr = data.find(b'\r')
lf = data.find(b'\n')
if cr < 0 or cr > lf:
term = b'\n'
else:
if data[cr:cr + 2] == b'\r\n':
term = b'\r\n'
else:
term = b'\r'
data = SHEBANG_PYTHON + term + data
return data
def get_hash(self, data, hash_kind=None):
if hash_kind is None:
hash_kind = self.hash_kind
try:
hasher = getattr(hashlib, hash_kind)
except AttributeError:
raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
result = hasher(data).digest()
result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
return hash_kind, result
def write_record(self, records, record_path, base):
records = list(records) # make a copy for sorting
p = to_posix(os.path.relpath(record_path, base))
records.append((p, '', ''))
records.sort()
with CSVWriter(record_path) as writer:
for row in records:
writer.writerow(row)
def write_records(self, info, libdir, archive_paths):
records = []
distinfo, info_dir = info
hasher = getattr(hashlib, self.hash_kind)
for ap, p in archive_paths:
with open(p, 'rb') as f:
data = f.read()
digest = '%s=%s' % self.get_hash(data)
size = os.path.getsize(p)
records.append((ap, digest, size))
p = os.path.join(distinfo, 'RECORD')
self.write_record(records, p, libdir)
ap = to_posix(os.path.join(info_dir, 'RECORD'))
archive_paths.append((ap, p))
def build_zip(self, pathname, archive_paths):
with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
for ap, p in archive_paths:
logger.debug('Wrote %s to %s in wheel', p, ap)
zf.write(p, ap)
def build(self, paths, tags=None, wheel_version=None):
"""
Build a wheel from files in specified paths, and use any specified tags
when determining the name of the wheel.
"""
if tags is None:
tags = {}
libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
if libkey == 'platlib':
is_pure = 'false'
default_pyver = [IMPVER]
default_abi = [ABI]
default_arch = [ARCH]
else:
is_pure = 'true'
default_pyver = [PYVER]
default_abi = ['none']
default_arch = ['any']
self.pyver = tags.get('pyver', default_pyver)
self.abi = tags.get('abi', default_abi)
self.arch = tags.get('arch', default_arch)
libdir = paths[libkey]
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
archive_paths = []
# First, stuff which is not in site-packages
for key in ('data', 'headers', 'scripts'):
if key not in paths:
continue
path = paths[key]
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
for fn in files:
p = fsdecode(os.path.join(root, fn))
rp = os.path.relpath(p, path)
ap = to_posix(os.path.join(data_dir, key, rp))
archive_paths.append((ap, p))
if key == 'scripts' and not p.endswith('.exe'):
with open(p, 'rb') as f:
data = f.read()
data = self.process_shebang(data)
with open(p, 'wb') as f:
f.write(data)
# Now, stuff which is in site-packages, other than the
# distinfo stuff.
path = libdir
distinfo = None
for root, dirs, files in os.walk(path):
if root == path:
# At the top level only, save distinfo for later
# and skip it for now
for i, dn in enumerate(dirs):
dn = fsdecode(dn)
if dn.endswith('.dist-info'):
distinfo = os.path.join(root, dn)
del dirs[i]
break
assert distinfo, '.dist-info directory expected, not found'
for fn in files:
# comment out next suite to leave .pyc files in
if fsdecode(fn).endswith(('.pyc', '.pyo')):
continue
p = os.path.join(root, fn)
rp = to_posix(os.path.relpath(p, path))
archive_paths.append((rp, p))
# Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
files = os.listdir(distinfo)
for fn in files:
if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
p = fsdecode(os.path.join(distinfo, fn))
ap = to_posix(os.path.join(info_dir, fn))
archive_paths.append((ap, p))
wheel_metadata = [
'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
'Generator: distlib %s' % __version__,
'Root-Is-Purelib: %s' % is_pure,
]
for pyver, abi, arch in self.tags:
wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
p = os.path.join(distinfo, 'WHEEL')
with open(p, 'w') as f:
f.write('\n'.join(wheel_metadata))
ap = to_posix(os.path.join(info_dir, 'WHEEL'))
archive_paths.append((ap, p))
# Now, at last, RECORD.
# Paths in here are archive paths - nothing else makes sense.
self.write_records((distinfo, info_dir), libdir, archive_paths)
# Now, ready to build the zip file
pathname = os.path.join(self.dirname, self.filename)
self.build_zip(pathname, archive_paths)
return pathname
def install(self, paths, maker, **kwargs):
"""
Install a wheel to the specified paths. If kwarg ``warner`` is
specified, it should be a callable, which will be called with two
tuples indicating the wheel version of this software and the wheel
version in the file, if there is a discrepancy in the versions.
This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are
not written.
The return value is a :class:`InstalledDistribution` instance unless
``options.lib_only`` is True, in which case the return value is ``None``.
"""
dry_run = maker.dry_run
warner = kwargs.get('warner')
lib_only = kwargs.get('lib_only', False)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
with zf.open(wheel_metadata_name) as bwf:
wf = wrapper(bwf)
message = message_from_file(wf)
wv = message['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
if (file_version != self.wheel_version) and warner:
warner(self.wheel_version, file_version)
if message['Root-Is-Purelib'] == 'true':
libdir = paths['purelib']
else:
libdir = paths['platlib']
records = {}
with zf.open(record_name) as bf:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
data_pfx = posixpath.join(data_dir, '')
info_pfx = posixpath.join(info_dir, '')
script_pfx = posixpath.join(data_dir, 'scripts', '')
# make a new instance rather than a copy of maker's,
# as we mutate it
fileop = FileOperator(dry_run=dry_run)
fileop.record = True # so we can rollback if needed
bc = not sys.dont_write_bytecode # Double negatives. Lovely!
outfiles = [] # for RECORD writing
# for script copying/shebang processing
workdir = tempfile.mkdtemp()
# set target dir later
# we default add_launchers to False, as the
# Python Launcher should be used instead
maker.source_dir = workdir
maker.target_dir = None
try:
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
# The signature file won't be in RECORD,
# and we don't currently don't do anything with it
if u_arcname.endswith('/RECORD.jws'):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
'%s' % u_arcname)
if row[1]:
kind, value = row[1].split('=', 1)
with zf.open(arcname) as bf:
data = bf.read()
_, digest = self.get_hash(data, kind)
if digest != value:
raise DistlibException('digest mismatch for '
'%s' % arcname)
if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
logger.debug('lib_only: skipping %s', u_arcname)
continue
is_script = (u_arcname.startswith(script_pfx)
and not u_arcname.endswith('.exe'))
if u_arcname.startswith(data_pfx):
_, where, rp = u_arcname.split('/', 2)
outfile = os.path.join(paths[where], convert_path(rp))
else:
# meant for site-packages.
if u_arcname in (wheel_metadata_name, record_name):
continue
outfile = os.path.join(libdir, convert_path(u_arcname))
if not is_script:
with zf.open(arcname) as bf:
fileop.copy_stream(bf, outfile)
outfiles.append(outfile)
# Double check the digest of the written file
if not dry_run and row[1]:
with open(outfile, 'rb') as bf:
data = bf.read()
_, newdigest = self.get_hash(data, kind)
if newdigest != digest:
raise DistlibException('digest mismatch '
'on write for '
'%s' % outfile)
if bc and outfile.endswith('.py'):
try:
pyc = fileop.byte_compile(outfile)
outfiles.append(pyc)
except Exception:
# Don't give up if byte-compilation fails,
# but log it and perhaps warn the user
logger.warning('Byte-compilation failed',
exc_info=True)
else:
fn = os.path.basename(convert_path(arcname))
workname = os.path.join(workdir, fn)
with zf.open(arcname) as bf:
fileop.copy_stream(bf, workname)
dn, fn = os.path.split(outfile)
maker.target_dir = dn
filenames = maker.make(fn)
fileop.set_executable_mode(filenames)
outfiles.extend(filenames)
if lib_only:
logger.debug('lib_only: returning None')
dist = None
else:
# Generate scripts
# Try to get pydist.json so we can see if there are
# any commands to generate. If this fails (e.g. because
# of a legacy wheel), log a warning but don't give up.
commands = None
file_version = self.info['Wheel-Version']
if file_version == '1.0':
# Use legacy info
ep = posixpath.join(info_dir, 'entry_points.txt')
try:
with zf.open(ep) as bwf:
epdata = read_exports(bwf)
commands = {}
for key in ('console', 'gui'):
k = '%s_scripts' % key
if k in epdata:
commands['wrap_%s' % key] = d = {}
for v in epdata[k].values():
s = '%s:%s' % (v.prefix, v.suffix)
if v.flags:
s += ' %s' % v.flags
d[v.name] = s
except Exception:
logger.warning('Unable to read legacy script '
'metadata, so cannot generate '
'scripts')
else:
try:
with zf.open(metadata_name) as bwf:
wf = wrapper(bwf)
commands = json.load(wf).get('extensions')
if commands:
commands = commands.get('python.commands')
except Exception:
logger.warning('Unable to read JSON metadata, so '
'cannot generate scripts')
if commands:
console_scripts = commands.get('wrap_console', {})
gui_scripts = commands.get('wrap_gui', {})
if console_scripts or gui_scripts:
script_dir = paths.get('scripts', '')
if not os.path.isdir(script_dir):
raise ValueError('Valid script path not '
'specified')
maker.target_dir = script_dir
for k, v in console_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script)
fileop.set_executable_mode(filenames)
if gui_scripts:
options = {'gui': True }
for k, v in gui_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script, options)
fileop.set_executable_mode(filenames)
p = os.path.join(libdir, info_dir)
dist = InstalledDistribution(p)
# Write SHARED
paths = dict(paths) # don't change passed in dict
del paths['purelib']
del paths['platlib']
paths['lib'] = libdir
p = dist.write_shared_locations(paths, dry_run)
if p:
outfiles.append(p)
# Write RECORD
dist.write_installed_files(outfiles, paths['prefix'],
dry_run)
return dist
except Exception: # pragma: no cover
logger.exception('installation failed.')
fileop.rollback()
raise
finally:
shutil.rmtree(workdir)
def _get_dylib_cache(self):
global cache
if cache is None:
# Use native string to avoid issues on 2.x: see Python #20140.
base = os.path.join(get_cache_base(), str('dylib-cache'),
sys.version[:3])
cache = Cache(base)
return cache
def _get_extensions(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
arcname = posixpath.join(info_dir, 'EXTENSIONS')
wrapper = codecs.getreader('utf-8')
result = []
with ZipFile(pathname, 'r') as zf:
try:
with zf.open(arcname) as bf:
wf = wrapper(bf)
extensions = json.load(wf)
cache = self._get_dylib_cache()
prefix = cache.prefix_to_dir(pathname)
cache_base = os.path.join(cache.base, prefix)
if not os.path.isdir(cache_base):
os.makedirs(cache_base)
for name, relpath in extensions.items():
dest = os.path.join(cache_base, convert_path(relpath))
if not os.path.exists(dest):
extract = True
else:
file_time = os.stat(dest).st_mtime
file_time = datetime.datetime.fromtimestamp(file_time)
info = zf.getinfo(relpath)
wheel_time = datetime.datetime(*info.date_time)
extract = wheel_time > file_time
if extract:
zf.extract(relpath, cache_base)
result.append((name, dest))
except KeyError:
pass
return result
def is_compatible(self):
"""
Determine if a wheel is compatible with the running system.
"""
return is_compatible(self)
def is_mountable(self):
"""
Determine if a wheel is asserted as mountable by its metadata.
"""
return True # for now - metadata details TBD
def mount(self, append=False):
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
if not self.is_compatible():
msg = 'Wheel %s not compatible with this Python.' % pathname
raise DistlibException(msg)
if not self.is_mountable():
msg = 'Wheel %s is marked as not mountable.' % pathname
raise DistlibException(msg)
if pathname in sys.path:
logger.debug('%s already in path', pathname)
else:
if append:
sys.path.append(pathname)
else:
sys.path.insert(0, pathname)
extensions = self._get_extensions()
if extensions:
if _hook not in sys.meta_path:
sys.meta_path.append(_hook)
_hook.add(pathname, extensions)
def unmount(self):
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
if pathname not in sys.path:
logger.debug('%s not in path', pathname)
else:
sys.path.remove(pathname)
if pathname in _hook.impure_wheels:
_hook.remove(pathname)
if not _hook.impure_wheels:
if _hook in sys.meta_path:
sys.meta_path.remove(_hook)
def verify(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
with zf.open(wheel_metadata_name) as bwf:
wf = wrapper(bwf)
message = message_from_file(wf)
wv = message['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
# TODO version verification
records = {}
with zf.open(record_name) as bf:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
if '..' in u_arcname:
raise DistlibException('invalid entry in '
'wheel: %r' % u_arcname)
# The signature file won't be in RECORD,
# and we don't currently don't do anything with it
if u_arcname.endswith('/RECORD.jws'):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
'%s' % u_arcname)
if row[1]:
kind, value = row[1].split('=', 1)
with zf.open(arcname) as bf:
data = bf.read()
_, digest = self.get_hash(data, kind)
if digest != value:
raise DistlibException('digest mismatch for '
'%s' % arcname)
def update(self, modifier, dest_dir=None, **kwargs):
"""
Update the contents of a wheel in a generic way. The modifier should
be a callable which expects a dictionary argument: its keys are
archive-entry paths, and its values are absolute filesystem paths
where the contents the corresponding archive entries can be found. The
modifier is free to change the contents of the files pointed to, add
new entries and remove entries, before returning. This method will
extract the entire contents of the wheel to a temporary location, call
the modifier, and then use the passed (and possibly updated)
dictionary to write a new wheel. If ``dest_dir`` is specified, the new
wheel is written there -- otherwise, the original wheel is overwritten.
The modifier should return True if it updated the wheel, else False.
This method returns the same value the modifier returns.
"""
def get_version(path_map, info_dir):
version = path = None
key = '%s/%s' % (info_dir, METADATA_FILENAME)
if key not in path_map:
key = '%s/PKG-INFO' % info_dir
if key in path_map:
path = path_map[key]
version = Metadata(path=path).version
return version, path
def update_version(version, path):
updated = None
try:
v = NormalizedVersion(version)
i = version.find('-')
if i < 0:
updated = '%s+1' % version
else:
parts = [int(s) for s in version[i + 1:].split('.')]
parts[-1] += 1
updated = '%s+%s' % (version[:i],
'.'.join(str(i) for i in parts))
except UnsupportedVersionError:
logger.debug('Cannot update non-compliant (PEP-440) '
'version %r', version)
if updated:
md = Metadata(path=path)
md.version = updated
legacy = not path.endswith(METADATA_FILENAME)
md.write(path=path, legacy=legacy)
logger.debug('Version updated from %r to %r', version,
updated)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
record_name = posixpath.join(info_dir, 'RECORD')
with tempdir() as workdir:
with ZipFile(pathname, 'r') as zf:
path_map = {}
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
if u_arcname == record_name:
continue
if '..' in u_arcname:
raise DistlibException('invalid entry in '
'wheel: %r' % u_arcname)
zf.extract(zinfo, workdir)
path = os.path.join(workdir, convert_path(u_arcname))
path_map[u_arcname] = path
# Remember the version.
original_version, _ = get_version(path_map, info_dir)
# Files extracted. Call the modifier.
modified = modifier(path_map, **kwargs)
if modified:
# Something changed - need to build a new wheel.
current_version, path = get_version(path_map, info_dir)
if current_version and (current_version == original_version):
# Add or update local version to signify changes.
update_version(current_version, path)
# Decide where the new wheel goes.
if dest_dir is None:
fd, newpath = tempfile.mkstemp(suffix='.whl',
prefix='wheel-update-',
dir=workdir)
os.close(fd)
else:
if not os.path.isdir(dest_dir):
raise DistlibException('Not a directory: %r' % dest_dir)
newpath = os.path.join(dest_dir, self.filename)
archive_paths = list(path_map.items())
distinfo = os.path.join(workdir, info_dir)
info = distinfo, info_dir
self.write_records(info, workdir, archive_paths)
self.build_zip(newpath, archive_paths)
if dest_dir is None:
shutil.copyfile(newpath, pathname)
return modified
def compatible_tags():
"""
Return (pyver, abi, arch) tuples compatible with this Python.
"""
versions = [VER_SUFFIX]
major = VER_SUFFIX[0]
for minor in range(sys.version_info[1] - 1, - 1, -1):
versions.append(''.join([major, str(minor)]))
abis = []
for suffix, _, _ in imp.get_suffixes():
if suffix.startswith('.abi'):
abis.append(suffix.split('.', 2)[1])
abis.sort()
if ABI != 'none':
abis.insert(0, ABI)
abis.append('none')
result = []
arches = [ARCH]
if sys.platform == 'darwin':
m = re.match('(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
if m:
name, major, minor, arch = m.groups()
minor = int(minor)
matches = [arch]
if arch in ('i386', 'ppc'):
matches.append('fat')
if arch in ('i386', 'ppc', 'x86_64'):
matches.append('fat3')
if arch in ('ppc64', 'x86_64'):
matches.append('fat64')
if arch in ('i386', 'x86_64'):
matches.append('intel')
if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
matches.append('universal')
while minor >= 0:
for match in matches:
s = '%s_%s_%s_%s' % (name, major, minor, match)
if s != ARCH: # already there
arches.append(s)
minor -= 1
# Most specific - our Python version, ABI and arch
for abi in abis:
for arch in arches:
result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
# where no ABI / arch dependency, but IMP_PREFIX dependency
for i, version in enumerate(versions):
result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
if i == 0:
result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
# no IMP_PREFIX, ABI or arch dependency
for i, version in enumerate(versions):
result.append((''.join(('py', version)), 'none', 'any'))
if i == 0:
result.append((''.join(('py', version[0])), 'none', 'any'))
return set(result)
COMPATIBLE_TAGS = compatible_tags()
del compatible_tags
def is_compatible(wheel, tags=None):
if not isinstance(wheel, Wheel):
wheel = Wheel(wheel) # assume it's a filename
result = False
if tags is None:
tags = COMPATIBLE_TAGS
for ver, abi, arch in tags:
if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
result = True
break
return result
|
Fusxfaranto/Robcxjo | refs/heads/master | plugins/raw.py | 1 | name = 'raw'
enabled = False
operator = True
def cmd(self, c, e, line, message_source, target):
self.connection.send(line[1])
|
holmes/intellij-community | refs/heads/master | python/testData/intentions/beforeTypeInDocstring3.py | 83 | def foo3(x, y, z, aa): #comment
i = xx + <caret>y + z
return i |
alanjw/GreenOpenERP-Win-X86 | refs/heads/7.0 | openerp/addons/multi_company/__init__.py | 886 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
tebeka/arrow | refs/heads/master | python/pyarrow/flight.py | 1 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from pyarrow._flight import (Action, # noqa
ActionType,
DescriptorType,
FlightClient,
FlightDescriptor,
FlightEndpoint,
FlightInfo,
FlightServerBase,
Location,
Ticket,
RecordBatchStream,
Result)
|
mikewiebe-ansible/ansible | refs/heads/devel | lib/ansible/parsing/quoting.py | 241 | # (c) 2014 James Cammarata, <jcammarata@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
def is_quoted(data):
return len(data) > 1 and data[0] == data[-1] and data[0] in ('"', "'") and data[-2] != '\\'
def unquote(data):
''' removes first and last quotes from a string, if the string starts and ends with the same quotes '''
if is_quoted(data):
return data[1:-1]
return data
|
openstack/neutron-classifier | refs/heads/master | neutron_classifier/cli/openstack_cli/classification_type.py | 1 | # Copyright (c) 2017 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from osc_lib.command import command
from osc_lib import utils
object_path = "/classification_type"
resource = 'classification_type'
class ListClassificationType(command.Lister):
"""List the Classification Types available."""
def take_action(self, parsed_args):
data = self.app.client_manager.neutronclient.list(
collection='classification_type',
path=object_path, retrieve_all=True)
headers = ('Name', 'Definition')
columns = ('type', 'supported_parameters')
return (headers, (utils.get_dict_properties(
s, columns) for s in data['classification_type']))
|
Fireblend/chromium-crosswalk | refs/heads/master | tools/telemetry/telemetry/core/profile_types_unittest.py | 63 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry.core import profile_types
class ProfileTypesTest(unittest.TestCase):
def testGetProfileTypes(self):
types = profile_types.GetProfileTypes()
self.assertTrue('clean' in types)
self.assertTrue(len(types) > 0)
def testGetProfileDir(self):
self.assertFalse(profile_types.GetProfileDir('typical_user') is None)
|
eahneahn/free | refs/heads/master | djangoproject/gh_frespo_integration/tests/test_github_adapter.py | 1 | from django.test import TestCase
from gh_frespo_integration.utils import github_adapter
__author__ = 'tony'
def _assert_repo_contains(test, repos, name):
repo_names = []
found = False
for repo in repos:
if repo['name'] == name:
found = True
break
repo_names.append(repo['name'])
err = "repo %s not found in %s"%(name, repo_names)
test.assertTrue(found, err)
class GithubAdapterTest(TestCase):
def test_fetch_repos(self):
repos = github_adapter.fetch_repos("tonylampada")
self.assertTrue(len(repos) >= 6)
_assert_repo_contains(self, repos, "liquibase")
_assert_repo_contains(self, repos, "PituKontrol")
_assert_repo_contains(self, repos, "Svn-Hooks-In-Java")
_assert_repo_contains(self, repos, "www.freedomsponsors.org")
_assert_repo_contains(self, repos, "freedomsponsors-jira-plugin")
_assert_repo_contains(self, repos, "freedomsponsors.github.com")
|
CingHu/neutron-ustack | refs/heads/master | neutron/db/migration/alembic_migrations/versions/492a106273f8_brocade_ml2_mech_dri.py | 3 | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Brocade ML2 Mech. Driver
Revision ID: 492a106273f8
Revises: fcac4c42e2cc
Create Date: 2014-03-03 15:35:46.974523
"""
# revision identifiers, used by Alembic.
revision = '492a106273f8'
down_revision = 'fcac4c42e2cc'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.ml2.plugin.Ml2Plugin'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.create_table(
'ml2_brocadenetworks',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('vlan', sa.String(length=10), nullable=True),
sa.Column('segment_id', sa.String(length=36), nullable=True),
sa.Column('network_type', sa.String(length=10), nullable=True),
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'))
op.create_table(
'ml2_brocadeports',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_id', sa.String(length=36), nullable=False),
sa.Column('admin_state_up', sa.Boolean()),
sa.Column('physical_interface', sa.String(length=36), nullable=True),
sa.Column('vlan_id', sa.String(length=36), nullable=True),
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'))
def downgrade(active_plugins=None, options=None):
pass
|
teslaji/homebase | refs/heads/master | venv/HomeBase/lib/python3.5/site-packages/django/db/__init__.py | 34 | from django.core import signals
from django.db.utils import (
DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, ConnectionHandler,
ConnectionRouter, DatabaseError, DataError, Error, IntegrityError,
InterfaceError, InternalError, NotSupportedError, OperationalError,
ProgrammingError,
)
__all__ = [
'connection', 'connections', 'router', 'DatabaseError', 'IntegrityError',
'InternalError', 'ProgrammingError', 'DataError', 'NotSupportedError',
'Error', 'InterfaceError', 'OperationalError', 'DEFAULT_DB_ALIAS',
'DJANGO_VERSION_PICKLE_KEY',
]
connections = ConnectionHandler()
router = ConnectionRouter()
# DatabaseWrapper.__init__() takes a dictionary, not a settings module, so we
# manually create the dictionary from the settings, passing only the settings
# that the database backends care about.
# We load all these up for backwards compatibility, you should use
# connections['default'] instead.
class DefaultConnectionProxy(object):
"""
Proxy for accessing the default DatabaseWrapper object's attributes. If you
need to access the DatabaseWrapper object itself, use
connections[DEFAULT_DB_ALIAS] instead.
"""
def __getattr__(self, item):
return getattr(connections[DEFAULT_DB_ALIAS], item)
def __setattr__(self, name, value):
return setattr(connections[DEFAULT_DB_ALIAS], name, value)
def __delattr__(self, name):
return delattr(connections[DEFAULT_DB_ALIAS], name)
def __eq__(self, other):
return connections[DEFAULT_DB_ALIAS] == other
def __ne__(self, other):
return connections[DEFAULT_DB_ALIAS] != other
connection = DefaultConnectionProxy()
# Register an event to reset saved queries when a Django request is started.
def reset_queries(**kwargs):
for conn in connections.all():
conn.queries_log.clear()
signals.request_started.connect(reset_queries)
# Register an event to reset transaction state and close connections past
# their lifetime.
def close_old_connections(**kwargs):
for conn in connections.all():
conn.close_if_unusable_or_obsolete()
signals.request_started.connect(close_old_connections)
signals.request_finished.connect(close_old_connections)
|
MartinHjelmare/home-assistant | refs/heads/dev | homeassistant/components/miflora/__init__.py | 36 | """The miflora component."""
|
vipul-sharma20/oh-mainline | refs/heads/master | vendor/packages/south/south/management/commands/graphmigrations.py | 129 | """
Outputs a graphviz dot file of the dependencies.
"""
from __future__ import print_function
from optparse import make_option
import re
import textwrap
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from south.migration import Migrations, all_migrations
class Command(BaseCommand):
help = "Outputs a GraphViz dot file of all migration dependencies to stdout."
def handle(self, **options):
# Resolve dependencies
Migrations.calculate_dependencies()
colors = [ 'crimson', 'darkgreen', 'darkgoldenrod', 'navy',
'brown', 'darkorange', 'aquamarine' , 'blueviolet' ]
color_index = 0
wrapper = textwrap.TextWrapper(width=40)
print("digraph G {")
# Group each app in a subgraph
for migrations in all_migrations():
print(" subgraph %s {" % migrations.app_label())
print(" node [color=%s];" % colors[color_index])
for migration in migrations:
# Munge the label - text wrap and change _ to spaces
label = "%s - %s" % (
migration.app_label(), migration.name())
label = re.sub(r"_+", " ", label)
label= "\\n".join(wrapper.wrap(label))
print(' "%s.%s" [label="%s"];' % (
migration.app_label(), migration.name(), label))
print(" }")
color_index = (color_index + 1) % len(colors)
# For every migration, print its links.
for migrations in all_migrations():
for migration in migrations:
for other in migration.dependencies:
# Added weight tends to keep migrations from the same app
# in vertical alignment
attrs = "[weight=2.0]"
# But the more interesting edges are those between apps
if other.app_label() != migration.app_label():
attrs = "[style=bold]"
print(' "%s.%s" -> "%s.%s" %s;' % (
other.app_label(), other.name(),
migration.app_label(), migration.name(),
attrs
))
print("}");
|
teltek/edx-platform | refs/heads/master | openedx/core/djangoapps/course_groups/cohorts.py | 2 | """
This file contains the logic for cohorts, as exposed internally to the
forums, and to the cohort admin views.
"""
import logging
import random
from courseware import courses
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.db import IntegrityError, transaction
from django.db.models.signals import m2m_changed, post_save
from django.dispatch import receiver
from django.http import Http404
from django.utils.translation import ugettext as _
from eventtracking import tracker
from edx_django_utils.cache import RequestCache
from openedx.core.lib.cache_utils import request_cached
from student.models import get_user_by_username_or_email
from .models import (
CohortMembership,
CourseCohort,
CourseCohortsSettings,
CourseUserGroup,
CourseUserGroupPartitionGroup,
UnregisteredLearnerCohortAssignments
)
from .signals.signals import COHORT_MEMBERSHIP_UPDATED
log = logging.getLogger(__name__)
@receiver(post_save, sender=CourseUserGroup)
def _cohort_added(sender, **kwargs):
"""Emits a tracking log event each time a cohort is created"""
instance = kwargs["instance"]
if kwargs["created"] and instance.group_type == CourseUserGroup.COHORT:
tracker.emit(
"edx.cohort.created",
{"cohort_id": instance.id, "cohort_name": instance.name}
)
@receiver(m2m_changed, sender=CourseUserGroup.users.through)
def _cohort_membership_changed(sender, **kwargs):
"""Emits a tracking log event each time cohort membership is modified"""
def get_event_iter(user_id_iter, cohort_iter):
"""
Returns a dictionary containing a mashup of cohort and user information for the given lists
"""
return (
{"cohort_id": cohort.id, "cohort_name": cohort.name, "user_id": user_id}
for user_id in user_id_iter
for cohort in cohort_iter
)
action = kwargs["action"]
instance = kwargs["instance"]
pk_set = kwargs["pk_set"]
reverse = kwargs["reverse"]
if action == "post_add":
event_name = "edx.cohort.user_added"
elif action in ["post_remove", "pre_clear"]:
event_name = "edx.cohort.user_removed"
else:
return
if reverse:
user_id_iter = [instance.id]
if action == "pre_clear":
cohort_iter = instance.course_groups.filter(group_type=CourseUserGroup.COHORT)
else:
cohort_iter = CourseUserGroup.objects.filter(pk__in=pk_set, group_type=CourseUserGroup.COHORT)
else:
cohort_iter = [instance] if instance.group_type == CourseUserGroup.COHORT else []
if action == "pre_clear":
user_id_iter = (user.id for user in instance.users.all())
else:
user_id_iter = pk_set
for event in get_event_iter(user_id_iter, cohort_iter):
tracker.emit(event_name, event)
# A 'default cohort' is an auto-cohort that is automatically created for a course if no cohort with automatic
# assignment have been specified. It is intended to be used in a cohorted course for users who have yet to be assigned
# to a cohort, if the course staff have not explicitly created a cohort of type "RANDOM".
# Note that course staff have the ability to change the name of this cohort after creation via the cohort
# management UI in the instructor dashboard.
DEFAULT_COHORT_NAME = _("Default Group")
# tl;dr: global state is bad. capa reseeds random every time a problem is loaded. Even
# if and when that's fixed, it's a good idea to have a local generator to avoid any other
# code that messes with the global random module.
_local_random = None
def local_random():
"""
Get the local random number generator. In a function so that we don't run
random.Random() at import time.
"""
# ironic, isn't it?
global _local_random
if _local_random is None:
_local_random = random.Random()
return _local_random
def is_course_cohorted(course_key):
"""
Given a course key, return a boolean for whether or not the course is
cohorted.
Raises:
Http404 if the course doesn't exist.
"""
return _get_course_cohort_settings(course_key).is_cohorted
def get_course_cohort_id(course_key):
"""
Given a course key, return the int id for the cohort settings.
Raises:
Http404 if the course doesn't exist.
"""
return _get_course_cohort_settings(course_key).id
def set_course_cohorted(course_key, cohorted):
"""
Given a course course and a boolean, sets whether or not the course is cohorted.
Raises:
Value error if `cohorted` is not a boolean
"""
if not isinstance(cohorted, bool):
raise ValueError("Cohorted must be a boolean")
course_cohort_settings = _get_course_cohort_settings(course_key)
course_cohort_settings.is_cohorted = cohorted
course_cohort_settings.save()
def get_cohort_id(user, course_key, use_cached=False):
"""
Given a course key and a user, return the id of the cohort that user is
assigned to in that course. If they don't have a cohort, return None.
"""
cohort = get_cohort(user, course_key, use_cached=use_cached)
return None if cohort is None else cohort.id
COHORT_CACHE_NAMESPACE = u"cohorts.get_cohort"
def _cohort_cache_key(user_id, course_key):
"""
Returns the cache key for the given user_id and course_key.
"""
return u"{}.{}".format(user_id, course_key)
def bulk_cache_cohorts(course_key, users):
"""
Pre-fetches and caches the cohort assignments for the
given users, for later fast retrieval by get_cohort.
"""
# before populating the cache with another bulk set of data,
# remove previously cached entries to keep memory usage low.
RequestCache(COHORT_CACHE_NAMESPACE).clear()
cache = RequestCache(COHORT_CACHE_NAMESPACE).data
if is_course_cohorted(course_key):
cohorts_by_user = {
membership.user: membership
for membership in
CohortMembership.objects.filter(user__in=users, course_id=course_key).select_related('user')
}
for user, membership in cohorts_by_user.iteritems():
cache[_cohort_cache_key(user.id, course_key)] = membership.course_user_group
uncohorted_users = filter(lambda u: u not in cohorts_by_user, users)
else:
uncohorted_users = users
for user in uncohorted_users:
cache[_cohort_cache_key(user.id, course_key)] = None
def get_cohort(user, course_key, assign=True, use_cached=False):
"""
Returns the user's cohort for the specified course.
The cohort for the user is cached for the duration of a request. Pass
use_cached=True to use the cached value instead of fetching from the
database.
Arguments:
user: a Django User object.
course_key: CourseKey
assign (bool): if False then we don't assign a group to user
use_cached (bool): Whether to use the cached value or fetch from database.
Returns:
A CourseUserGroup object if the course is cohorted and the User has a
cohort, else None.
Raises:
ValueError if the CourseKey doesn't exist.
"""
cache = RequestCache(COHORT_CACHE_NAMESPACE).data
cache_key = _cohort_cache_key(user.id, course_key)
if use_cached and cache_key in cache:
return cache[cache_key]
cache.pop(cache_key, None)
# First check whether the course is cohorted (users shouldn't be in a cohort
# in non-cohorted courses, but settings can change after course starts)
if not is_course_cohorted(course_key):
return cache.setdefault(cache_key, None)
# If course is cohorted, check if the user already has a cohort.
try:
membership = CohortMembership.objects.get(
course_id=course_key,
user_id=user.id,
)
return cache.setdefault(cache_key, membership.course_user_group)
except CohortMembership.DoesNotExist:
# Didn't find the group. If we do not want to assign, return here.
if not assign:
# Do not cache the cohort here, because in the next call assign
# may be True, and we will have to assign the user a cohort.
return None
# Otherwise assign the user a cohort.
try:
# If learner has been pre-registered in a cohort, get that cohort. Otherwise assign to a random cohort.
course_user_group = None
for assignment in UnregisteredLearnerCohortAssignments.objects.filter(email=user.email, course_id=course_key):
course_user_group = assignment.course_user_group
assignment.delete()
break
else:
course_user_group = get_random_cohort(course_key)
add_user_to_cohort(course_user_group, user)
return course_user_group
except ValueError:
# user already in cohort
return course_user_group
except IntegrityError as integrity_error:
# An IntegrityError is raised when multiple workers attempt to
# create the same row in one of the cohort model entries:
# CourseCohort, CohortMembership.
log.info(
"HANDLING_INTEGRITY_ERROR: IntegrityError encountered for course '%s' and user '%s': %s",
course_key, user.id, unicode(integrity_error)
)
return get_cohort(user, course_key, assign, use_cached)
def get_random_cohort(course_key):
"""
Helper method to get a cohort for random assignment.
If there are multiple cohorts of type RANDOM in the course, one of them will be randomly selected.
If there are no existing cohorts of type RANDOM in the course, one will be created.
"""
course = courses.get_course(course_key)
cohorts = get_course_cohorts(course, assignment_type=CourseCohort.RANDOM)
if cohorts:
cohort = local_random().choice(cohorts)
else:
cohort = CourseCohort.create(
cohort_name=DEFAULT_COHORT_NAME,
course_id=course_key,
assignment_type=CourseCohort.RANDOM
).course_user_group
return cohort
def migrate_cohort_settings(course):
"""
Migrate all the cohort settings associated with this course from modulestore to mysql.
After that we will never touch modulestore for any cohort related settings.
"""
cohort_settings, created = CourseCohortsSettings.objects.get_or_create(
course_id=course.id,
defaults=_get_cohort_settings_from_modulestore(course)
)
# Add the new and update the existing cohorts
if created:
# Update the manual cohorts already present in CourseUserGroup
manual_cohorts = CourseUserGroup.objects.filter(
course_id=course.id,
group_type=CourseUserGroup.COHORT
).exclude(name__in=course.auto_cohort_groups)
for cohort in manual_cohorts:
CourseCohort.create(course_user_group=cohort)
for group_name in course.auto_cohort_groups:
CourseCohort.create(cohort_name=group_name, course_id=course.id, assignment_type=CourseCohort.RANDOM)
return cohort_settings
def get_course_cohorts(course, assignment_type=None):
"""
Get a list of all the cohorts in the given course. This will include auto cohorts,
regardless of whether or not the auto cohorts include any users.
Arguments:
course: the course for which cohorts should be returned
assignment_type: cohort assignment type
Returns:
A list of CourseUserGroup objects. Empty if there are no cohorts. Does
not check whether the course is cohorted.
"""
# Migrate cohort settings for this course
migrate_cohort_settings(course)
query_set = CourseUserGroup.objects.filter(
course_id=course.location.course_key,
group_type=CourseUserGroup.COHORT
)
query_set = query_set.filter(cohort__assignment_type=assignment_type) if assignment_type else query_set
return list(query_set)
def get_cohort_names(course):
"""Return a dict that maps cohort ids to names for the given course"""
return {cohort.id: cohort.name for cohort in get_course_cohorts(course)}
# Helpers for cohort management views
def get_cohort_by_name(course_key, name):
"""
Return the CourseUserGroup object for the given cohort. Raises DoesNotExist
it isn't present.
"""
return CourseUserGroup.objects.get(
course_id=course_key,
group_type=CourseUserGroup.COHORT,
name=name
)
def get_cohort_by_id(course_key, cohort_id):
"""
Return the CourseUserGroup object for the given cohort. Raises DoesNotExist
it isn't present. Uses the course_key for extra validation.
"""
return CourseUserGroup.objects.get(
course_id=course_key,
group_type=CourseUserGroup.COHORT,
id=cohort_id
)
def add_cohort(course_key, name, assignment_type):
"""
Add a cohort to a course. Raises ValueError if a cohort of the same name already
exists.
"""
log.debug("Adding cohort %s to %s", name, course_key)
if is_cohort_exists(course_key, name):
raise ValueError(_("You cannot create two cohorts with the same name"))
try:
course = courses.get_course_by_id(course_key)
except Http404:
raise ValueError("Invalid course_key")
cohort = CourseCohort.create(
cohort_name=name,
course_id=course.id,
assignment_type=assignment_type
).course_user_group
tracker.emit(
"edx.cohort.creation_requested",
{"cohort_name": cohort.name, "cohort_id": cohort.id}
)
return cohort
def is_cohort_exists(course_key, name):
"""
Check if a cohort already exists.
"""
return CourseUserGroup.objects.filter(course_id=course_key, group_type=CourseUserGroup.COHORT, name=name).exists()
def remove_user_from_cohort(cohort, username_or_email):
"""
Look up the given user, and if successful, remove them from the specified cohort.
Arguments:
cohort: CourseUserGroup
username_or_email: string. Treated as email if has '@'
Raises:
User.DoesNotExist if can't find user.
ValueError if user not already present in this cohort.
"""
user = get_user_by_username_or_email(username_or_email)
try:
membership = CohortMembership.objects.get(course_user_group=cohort, user=user)
course_key = membership.course_id
membership.delete()
COHORT_MEMBERSHIP_UPDATED.send(sender=None, user=user, course_key=course_key)
except CohortMembership.DoesNotExist:
raise ValueError("User {} was not present in cohort {}".format(username_or_email, cohort))
def add_user_to_cohort(cohort, username_or_email_or_user):
"""
Look up the given user, and if successful, add them to the specified cohort.
Arguments:
cohort: CourseUserGroup
username_or_email_or_user: user or string. Treated as email if has '@'
Returns:
User object (or None if the email address is preassigned),
string (or None) indicating previous cohort,
and whether the user is a preassigned user or not
Raises:
User.DoesNotExist if can't find user. However, if a valid email is provided for the user, it is stored
in a database so that the user can be added to the cohort if they eventually enroll in the course.
ValueError if user already present in this cohort.
ValidationError if an invalid email address is entered.
User.DoesNotExist if a user could not be found.
"""
try:
if hasattr(username_or_email_or_user, 'email'):
user = username_or_email_or_user
else:
user = get_user_by_username_or_email(username_or_email_or_user)
membership, previous_cohort = CohortMembership.assign(cohort, user)
tracker.emit(
"edx.cohort.user_add_requested",
{
"user_id": user.id,
"cohort_id": cohort.id,
"cohort_name": cohort.name,
"previous_cohort_id": getattr(previous_cohort, 'id', None),
"previous_cohort_name": getattr(previous_cohort, 'name', None),
}
)
cache = RequestCache(COHORT_CACHE_NAMESPACE).data
cache_key = _cohort_cache_key(user.id, membership.course_id)
cache[cache_key] = membership.course_user_group
COHORT_MEMBERSHIP_UPDATED.send(sender=None, user=user, course_key=membership.course_id)
return user, getattr(previous_cohort, 'name', None), False
except User.DoesNotExist as ex:
# If username_or_email is an email address, store in database.
try:
validate_email(username_or_email_or_user)
try:
assignment = UnregisteredLearnerCohortAssignments.objects.get(
email=username_or_email_or_user, course_id=cohort.course_id
)
assignment.course_user_group = cohort
assignment.save()
except UnregisteredLearnerCohortAssignments.DoesNotExist:
assignment = UnregisteredLearnerCohortAssignments.objects.create(
course_user_group=cohort, email=username_or_email_or_user, course_id=cohort.course_id
)
tracker.emit(
"edx.cohort.email_address_preassigned",
{
"user_email": assignment.email,
"cohort_id": cohort.id,
"cohort_name": cohort.name,
}
)
return (None, None, True)
except ValidationError as invalid:
if "@" in username_or_email_or_user:
raise invalid
else:
raise ex
def get_group_info_for_cohort(cohort, use_cached=False):
"""
Get the ids of the group and partition to which this cohort has been linked
as a tuple of (int, int).
If the cohort has not been linked to any group/partition, both values in the
tuple will be None.
The partition group info is cached for the duration of a request. Pass
use_cached=True to use the cached value instead of fetching from the
database.
"""
cache = RequestCache(u"cohorts.get_group_info_for_cohort").data
cache_key = unicode(cohort.id)
if use_cached and cache_key in cache:
return cache[cache_key]
cache.pop(cache_key, None)
try:
partition_group = CourseUserGroupPartitionGroup.objects.get(course_user_group=cohort)
return cache.setdefault(cache_key, (partition_group.group_id, partition_group.partition_id))
except CourseUserGroupPartitionGroup.DoesNotExist:
pass
return cache.setdefault(cache_key, (None, None))
def set_assignment_type(user_group, assignment_type):
"""
Set assignment type for cohort.
"""
course_cohort = user_group.cohort
if is_last_random_cohort(user_group) and course_cohort.assignment_type != assignment_type:
raise ValueError(_("There must be one cohort to which students can automatically be assigned."))
course_cohort.assignment_type = assignment_type
course_cohort.save()
def get_assignment_type(user_group):
"""
Get assignment type for cohort.
"""
course_cohort = user_group.cohort
return course_cohort.assignment_type
def is_last_random_cohort(user_group):
"""
Check if this cohort is the only random cohort in the course.
"""
random_cohorts = CourseUserGroup.objects.filter(
course_id=user_group.course_id,
group_type=CourseUserGroup.COHORT,
cohort__assignment_type=CourseCohort.RANDOM
)
return len(random_cohorts) == 1 and random_cohorts[0].name == user_group.name
@request_cached()
def _get_course_cohort_settings(course_key):
"""
Return cohort settings for a course. NOTE that the only non-deprecated fields in
CourseCohortSettings are `course_id` and `is_cohorted`. Other fields should only be used for
migration purposes.
Arguments:
course_key: CourseKey
Returns:
A CourseCohortSettings object. NOTE that the only non-deprecated field in
CourseCohortSettings are `course_id` and `is_cohorted`. Other fields should only be used
for migration purposes.
Raises:
Http404 if course_key is invalid.
"""
try:
course_cohort_settings = CourseCohortsSettings.objects.get(course_id=course_key)
except CourseCohortsSettings.DoesNotExist:
course = courses.get_course_by_id(course_key)
course_cohort_settings = migrate_cohort_settings(course)
return course_cohort_settings
def get_legacy_discussion_settings(course_key):
try:
course_cohort_settings = CourseCohortsSettings.objects.get(course_id=course_key)
return {
'is_cohorted': course_cohort_settings.is_cohorted,
'cohorted_discussions': course_cohort_settings.cohorted_discussions,
'always_cohort_inline_discussions': course_cohort_settings.always_cohort_inline_discussions
}
except CourseCohortsSettings.DoesNotExist:
course = courses.get_course_by_id(course_key)
return _get_cohort_settings_from_modulestore(course)
def _get_cohort_settings_from_modulestore(course):
return {
'is_cohorted': course.is_cohorted,
'cohorted_discussions': list(course.cohorted_discussions),
'always_cohort_inline_discussions': course.always_cohort_inline_discussions
}
|
woylaski/notebook | refs/heads/master | graphic/kivy-master/kivy/input/providers/mtdev.py | 15 | '''
Native support for Multitouch devices on Linux, using libmtdev.
===============================================================
The Mtdev project is a part of the Ubuntu Maverick multitouch architecture.
You can read more on http://wiki.ubuntu.com/Multitouch
To configure MTDev, it's preferable to use probesysfs providers.
Check :py:class:`~kivy.input.providers.probesysfs` for more information.
Otherwise, add this to your configuration::
[input]
# devicename = hidinput,/dev/input/eventXX
acert230h = mtdev,/dev/input/event2
.. note::
You must have read access to the input event.
You can use a custom range for the X, Y and pressure values.
On some drivers, the range reported is invalid.
To fix that, you can add these options to the argument line:
* invert_x : 1 to invert X axis
* invert_y : 1 to invert Y axis
* min_position_x : X minimum
* max_position_x : X maximum
* min_position_y : Y minimum
* max_position_y : Y maximum
* min_pressure : pressure minimum
* max_pressure : pressure maximum
* min_touch_major : width shape minimum
* max_touch_major : width shape maximum
* min_touch_minor : width shape minimum
* max_touch_minor : height shape maximum
'''
__all__ = ('MTDMotionEventProvider', 'MTDMotionEvent')
import os
from kivy.input.motionevent import MotionEvent
from kivy.input.shape import ShapeRect
class MTDMotionEvent(MotionEvent):
def depack(self, args):
self.is_touch = True
self.sx = args['x']
self.sy = args['y']
self.profile = ['pos']
if 'size_w' in args and 'size_h' in args:
self.shape = ShapeRect()
self.shape.width = args['size_w']
self.shape.height = args['size_h']
self.profile.append('shape')
if 'pressure' in args:
self.pressure = args['pressure']
self.profile.append('pressure')
super(MTDMotionEvent, self).depack(args)
def __str__(self):
i, sx, sy, d = (self.id, self.sx, self.sy, self.device)
return '<MTDMotionEvent id=%d pos=(%f, %f) device=%s>' % (i, sx, sy, d)
if 'KIVY_DOC' in os.environ:
# documentation hack
MTDMotionEventProvider = None
else:
import threading
import collections
from kivy.lib.mtdev import Device, \
MTDEV_TYPE_EV_ABS, MTDEV_CODE_SLOT, MTDEV_CODE_POSITION_X, \
MTDEV_CODE_POSITION_Y, MTDEV_CODE_PRESSURE, \
MTDEV_CODE_TOUCH_MAJOR, MTDEV_CODE_TOUCH_MINOR, \
MTDEV_CODE_TRACKING_ID, MTDEV_ABS_POSITION_X, \
MTDEV_ABS_POSITION_Y, MTDEV_ABS_TOUCH_MINOR, \
MTDEV_ABS_TOUCH_MAJOR
from kivy.input.provider import MotionEventProvider
from kivy.input.factory import MotionEventFactory
from kivy.logger import Logger
class MTDMotionEventProvider(MotionEventProvider):
options = ('min_position_x', 'max_position_x',
'min_position_y', 'max_position_y',
'min_pressure', 'max_pressure',
'min_touch_major', 'max_touch_major',
'min_touch_minor', 'min_touch_major',
'invert_x', 'invert_y')
def __init__(self, device, args):
super(MTDMotionEventProvider, self).__init__(device, args)
self._device = None
self.input_fn = None
self.default_ranges = dict()
# split arguments
args = args.split(',')
if not args:
Logger.error('MTD: No filename pass to MTD configuration')
Logger.error('MTD: Use /dev/input/event0 for example')
return None
# read filename
self.input_fn = args[0]
Logger.info('MTD: Read event from <%s>' % self.input_fn)
# read parameters
for arg in args[1:]:
if arg == '':
continue
arg = arg.split('=')
# ensure it's a key = value
if len(arg) != 2:
err = 'MTD: Bad parameter %s: Not in key=value format' %\
arg
Logger.error()
continue
# ensure the key exist
key, value = arg
if key not in MTDMotionEventProvider.options:
Logger.error('MTD: unknown %s option' % key)
continue
# ensure the value
try:
self.default_ranges[key] = int(value)
except ValueError:
err = 'MTD: invalid value %s for option %s' % (key, value)
Logger.error(err)
continue
# all good!
Logger.info('MTD: Set custom %s to %d' % (key, int(value)))
def start(self):
if self.input_fn is None:
return
self.uid = 0
self.queue = collections.deque()
self.thread = threading.Thread(
target=self._thread_run,
kwargs=dict(
queue=self.queue,
input_fn=self.input_fn,
device=self.device,
default_ranges=self.default_ranges))
self.thread.daemon = True
self.thread.start()
def _thread_run(self, **kwargs):
input_fn = kwargs.get('input_fn')
queue = kwargs.get('queue')
device = kwargs.get('device')
drs = kwargs.get('default_ranges').get
touches = {}
touches_sent = []
point = {}
l_points = {}
def process(points):
for args in points:
# this can happen if we have a touch going on already at the
# start of the app
if 'id' not in args:
continue
tid = args['id']
try:
touch = touches[tid]
except KeyError:
touch = MTDMotionEvent(device, tid, args)
touches[touch.id] = touch
touch.move(args)
action = 'update'
if tid not in touches_sent:
action = 'begin'
touches_sent.append(tid)
if 'delete' in args:
action = 'end'
del args['delete']
del touches[touch.id]
touches_sent.remove(tid)
touch.update_time_end()
queue.append((action, touch))
def normalize(value, vmin, vmax):
return (value - vmin) / float(vmax - vmin)
# open mtdev device
_fn = input_fn
_slot = 0
_device = Device(_fn)
_changes = set()
# prepare some vars to get limit of some component
ab = _device.get_abs(MTDEV_ABS_POSITION_X)
range_min_position_x = drs('min_position_x', ab.minimum)
range_max_position_x = drs('max_position_x', ab.maximum)
Logger.info('MTD: <%s> range position X is %d - %d' %
(_fn, range_min_position_x, range_max_position_x))
ab = _device.get_abs(MTDEV_ABS_POSITION_Y)
range_min_position_y = drs('min_position_y', ab.minimum)
range_max_position_y = drs('max_position_y', ab.maximum)
Logger.info('MTD: <%s> range position Y is %d - %d' %
(_fn, range_min_position_y, range_max_position_y))
ab = _device.get_abs(MTDEV_ABS_TOUCH_MAJOR)
range_min_major = drs('min_touch_major', ab.minimum)
range_max_major = drs('max_touch_major', ab.maximum)
Logger.info('MTD: <%s> range touch major is %d - %d' %
(_fn, range_min_major, range_max_major))
ab = _device.get_abs(MTDEV_ABS_TOUCH_MINOR)
range_min_minor = drs('min_touch_minor', ab.minimum)
range_max_minor = drs('max_touch_minor', ab.maximum)
Logger.info('MTD: <%s> range touch minor is %d - %d' %
(_fn, range_min_minor, range_max_minor))
range_min_pressure = drs('min_pressure', 0)
range_max_pressure = drs('max_pressure', 255)
Logger.info('MTD: <%s> range pressure is %d - %d' %
(_fn, range_min_pressure, range_max_pressure))
invert_x = int(bool(drs('invert_x', 0)))
invert_y = int(bool(drs('invert_y', 0)))
Logger.info('MTD: <%s> axes invertion: X is %d, Y is %d' %
(_fn, invert_x, invert_y))
while _device:
# idle as much as we can.
while _device.idle(1000):
continue
# got data, read all without redoing idle
while True:
data = _device.get()
if data is None:
break
# set the working slot
if data.type == MTDEV_TYPE_EV_ABS and \
data.code == MTDEV_CODE_SLOT:
_slot = data.value
continue
# fill the slot
if not _slot in l_points:
l_points[_slot] = dict()
point = l_points[_slot]
ev_value = data.value
ev_code = data.code
if ev_code == MTDEV_CODE_POSITION_X:
val = normalize(ev_value,
range_min_position_x,
range_max_position_x)
if invert_x:
val = 1. - val
point['x'] = val
elif ev_code == MTDEV_CODE_POSITION_Y:
val = 1. - normalize(ev_value,
range_min_position_y,
range_max_position_y)
if invert_y:
val = 1. - val
point['y'] = val
elif ev_code == MTDEV_CODE_PRESSURE:
point['pressure'] = normalize(ev_value,
range_min_pressure,
range_max_pressure)
elif ev_code == MTDEV_CODE_TOUCH_MAJOR:
point['size_w'] = normalize(ev_value,
range_min_major,
range_max_major)
elif ev_code == MTDEV_CODE_TOUCH_MINOR:
point['size_h'] = normalize(ev_value,
range_min_minor,
range_max_minor)
elif ev_code == MTDEV_CODE_TRACKING_ID:
if ev_value == -1:
point['delete'] = True
# force process of changes here, as the slot can be
# reused.
_changes.add(_slot)
process([l_points[x] for x in _changes])
_changes.clear()
continue
else:
point['id'] = ev_value
else:
# unrecognized command, ignore.
continue
_changes.add(_slot)
# push all changes
if _changes:
process([l_points[x] for x in _changes])
_changes.clear()
def update(self, dispatch_fn):
# dispatch all event from threads
try:
while True:
event_type, touch = self.queue.popleft()
dispatch_fn(event_type, touch)
except:
pass
MotionEventFactory.register('mtdev', MTDMotionEventProvider)
|
trueblue2704/AskMeAnything | refs/heads/master | lib/python2.7/site-packages/itsdangerous.py | 626 | # -*- coding: utf-8 -*-
"""
itsdangerous
~~~~~~~~~~~~
A module that implements various functions to deal with untrusted
sources. Mainly useful for web applications.
:copyright: (c) 2014 by Armin Ronacher and the Django Software Foundation.
:license: BSD, see LICENSE for more details.
"""
import sys
import hmac
import zlib
import time
import base64
import hashlib
import operator
from datetime import datetime
PY2 = sys.version_info[0] == 2
if PY2:
from itertools import izip
text_type = unicode
int_to_byte = chr
number_types = (int, long, float)
else:
from functools import reduce
izip = zip
text_type = str
int_to_byte = operator.methodcaller('to_bytes', 1, 'big')
number_types = (int, float)
try:
import simplejson as json
except ImportError:
import json
class _CompactJSON(object):
"""Wrapper around simplejson that strips whitespace.
"""
def loads(self, payload):
return json.loads(payload)
def dumps(self, obj):
return json.dumps(obj, separators=(',', ':'))
compact_json = _CompactJSON()
# 2011/01/01 in UTC
EPOCH = 1293840000
def want_bytes(s, encoding='utf-8', errors='strict'):
if isinstance(s, text_type):
s = s.encode(encoding, errors)
return s
def is_text_serializer(serializer):
"""Checks wheather a serializer generates text or binary."""
return isinstance(serializer.dumps({}), text_type)
# Starting with 3.3 the standard library has a c-implementation for
# constant time string compares.
_builtin_constant_time_compare = getattr(hmac, 'compare_digest', None)
def constant_time_compare(val1, val2):
"""Returns True if the two strings are equal, False otherwise.
The time taken is independent of the number of characters that match. Do
not use this function for anything else than comparision with known
length targets.
This is should be implemented in C in order to get it completely right.
"""
if _builtin_constant_time_compare is not None:
return _builtin_constant_time_compare(val1, val2)
len_eq = len(val1) == len(val2)
if len_eq:
result = 0
left = val1
else:
result = 1
left = val2
for x, y in izip(bytearray(left), bytearray(val2)):
result |= x ^ y
return result == 0
class BadData(Exception):
"""Raised if bad data of any sort was encountered. This is the
base for all exceptions that itsdangerous is currently using.
.. versionadded:: 0.15
"""
message = None
def __init__(self, message):
Exception.__init__(self, message)
self.message = message
def __str__(self):
return text_type(self.message)
if PY2:
__unicode__ = __str__
def __str__(self):
return self.__unicode__().encode('utf-8')
class BadPayload(BadData):
"""This error is raised in situations when payload is loaded without
checking the signature first and an exception happend as a result of
that. The original exception that caused that will be stored on the
exception as :attr:`original_error`.
This can also happen with a :class:`JSONWebSignatureSerializer` that
is subclassed and uses a different serializer for the payload than
the expected one.
.. versionadded:: 0.15
"""
def __init__(self, message, original_error=None):
BadData.__init__(self, message)
#: If available, the error that indicates why the payload
#: was not valid. This might be `None`.
self.original_error = original_error
class BadSignature(BadData):
"""This error is raised if a signature does not match. As of
itsdangerous 0.14 there are helpful attributes on the exception
instances. You can also catch down the baseclass :exc:`BadData`.
"""
def __init__(self, message, payload=None):
BadData.__init__(self, message)
#: The payload that failed the signature test. In some
#: situations you might still want to inspect this, even if
#: you know it was tampered with.
#:
#: .. versionadded:: 0.14
self.payload = payload
class BadTimeSignature(BadSignature):
"""Raised for time based signatures that fail. This is a subclass
of :class:`BadSignature` so you can catch those down as well.
"""
def __init__(self, message, payload=None, date_signed=None):
BadSignature.__init__(self, message, payload)
#: If the signature expired this exposes the date of when the
#: signature was created. This can be helpful in order to
#: tell the user how long a link has been gone stale.
#:
#: .. versionadded:: 0.14
self.date_signed = date_signed
class BadHeader(BadSignature):
"""Raised if a signed header is invalid in some form. This only
happens for serializers that have a header that goes with the
signature.
.. versionadded:: 0.24
"""
def __init__(self, message, payload=None, header=None,
original_error=None):
BadSignature.__init__(self, message, payload)
#: If the header is actually available but just malformed it
#: might be stored here.
self.header = header
#: If available, the error that indicates why the payload
#: was not valid. This might be `None`.
self.original_error = original_error
class SignatureExpired(BadTimeSignature):
"""Signature timestamp is older than required max_age. This is a
subclass of :exc:`BadTimeSignature` so you can use the baseclass for
catching the error.
"""
def base64_encode(string):
"""base64 encodes a single bytestring (and is tolerant to getting
called with a unicode string).
The resulting bytestring is safe for putting into URLs.
"""
string = want_bytes(string)
return base64.urlsafe_b64encode(string).strip(b'=')
def base64_decode(string):
"""base64 decodes a single bytestring (and is tolerant to getting
called with a unicode string).
The result is also a bytestring.
"""
string = want_bytes(string, encoding='ascii', errors='ignore')
return base64.urlsafe_b64decode(string + b'=' * (-len(string) % 4))
def int_to_bytes(num):
assert num >= 0
rv = []
while num:
rv.append(int_to_byte(num & 0xff))
num >>= 8
return b''.join(reversed(rv))
def bytes_to_int(bytestr):
return reduce(lambda a, b: a << 8 | b, bytearray(bytestr), 0)
class SigningAlgorithm(object):
"""Subclasses of `SigningAlgorithm` have to implement `get_signature` to
provide signature generation functionality.
"""
def get_signature(self, key, value):
"""Returns the signature for the given key and value"""
raise NotImplementedError()
def verify_signature(self, key, value, sig):
"""Verifies the given signature matches the expected signature"""
return constant_time_compare(sig, self.get_signature(key, value))
class NoneAlgorithm(SigningAlgorithm):
"""This class provides a algorithm that does not perform any signing and
returns an empty signature.
"""
def get_signature(self, key, value):
return b''
class HMACAlgorithm(SigningAlgorithm):
"""This class provides signature generation using HMACs."""
#: The digest method to use with the MAC algorithm. This defaults to sha1
#: but can be changed for any other function in the hashlib module.
default_digest_method = staticmethod(hashlib.sha1)
def __init__(self, digest_method=None):
if digest_method is None:
digest_method = self.default_digest_method
self.digest_method = digest_method
def get_signature(self, key, value):
mac = hmac.new(key, msg=value, digestmod=self.digest_method)
return mac.digest()
class Signer(object):
"""This class can sign bytes and unsign it and validate the signature
provided.
Salt can be used to namespace the hash, so that a signed string is only
valid for a given namespace. Leaving this at the default value or re-using
a salt value across different parts of your application where the same
signed value in one part can mean something different in another part
is a security risk.
See :ref:`the-salt` for an example of what the salt is doing and how you
can utilize it.
.. versionadded:: 0.14
`key_derivation` and `digest_method` were added as arguments to the
class constructor.
.. versionadded:: 0.18
`algorithm` was added as an argument to the class constructor.
"""
#: The digest method to use for the signer. This defaults to sha1 but can
#: be changed for any other function in the hashlib module.
#:
#: .. versionchanged:: 0.14
default_digest_method = staticmethod(hashlib.sha1)
#: Controls how the key is derived. The default is Django style
#: concatenation. Possible values are ``concat``, ``django-concat``
#: and ``hmac``. This is used for deriving a key from the secret key
#: with an added salt.
#:
#: .. versionadded:: 0.14
default_key_derivation = 'django-concat'
def __init__(self, secret_key, salt=None, sep='.', key_derivation=None,
digest_method=None, algorithm=None):
self.secret_key = want_bytes(secret_key)
self.sep = sep
self.salt = 'itsdangerous.Signer' if salt is None else salt
if key_derivation is None:
key_derivation = self.default_key_derivation
self.key_derivation = key_derivation
if digest_method is None:
digest_method = self.default_digest_method
self.digest_method = digest_method
if algorithm is None:
algorithm = HMACAlgorithm(self.digest_method)
self.algorithm = algorithm
def derive_key(self):
"""This method is called to derive the key. If you're unhappy with
the default key derivation choices you can override them here.
Keep in mind that the key derivation in itsdangerous is not intended
to be used as a security method to make a complex key out of a short
password. Instead you should use large random secret keys.
"""
salt = want_bytes(self.salt)
if self.key_derivation == 'concat':
return self.digest_method(salt + self.secret_key).digest()
elif self.key_derivation == 'django-concat':
return self.digest_method(salt + b'signer' +
self.secret_key).digest()
elif self.key_derivation == 'hmac':
mac = hmac.new(self.secret_key, digestmod=self.digest_method)
mac.update(salt)
return mac.digest()
elif self.key_derivation == 'none':
return self.secret_key
else:
raise TypeError('Unknown key derivation method')
def get_signature(self, value):
"""Returns the signature for the given value"""
value = want_bytes(value)
key = self.derive_key()
sig = self.algorithm.get_signature(key, value)
return base64_encode(sig)
def sign(self, value):
"""Signs the given string."""
return value + want_bytes(self.sep) + self.get_signature(value)
def verify_signature(self, value, sig):
"""Verifies the signature for the given value."""
key = self.derive_key()
try:
sig = base64_decode(sig)
except Exception:
return False
return self.algorithm.verify_signature(key, value, sig)
def unsign(self, signed_value):
"""Unsigns the given string."""
signed_value = want_bytes(signed_value)
sep = want_bytes(self.sep)
if sep not in signed_value:
raise BadSignature('No %r found in value' % self.sep)
value, sig = signed_value.rsplit(sep, 1)
if self.verify_signature(value, sig):
return value
raise BadSignature('Signature %r does not match' % sig,
payload=value)
def validate(self, signed_value):
"""Just validates the given signed value. Returns `True` if the
signature exists and is valid, `False` otherwise."""
try:
self.unsign(signed_value)
return True
except BadSignature:
return False
class TimestampSigner(Signer):
"""Works like the regular :class:`Signer` but also records the time
of the signing and can be used to expire signatures. The unsign
method can rause a :exc:`SignatureExpired` method if the unsigning
failed because the signature is expired. This exception is a subclass
of :exc:`BadSignature`.
"""
def get_timestamp(self):
"""Returns the current timestamp. This implementation returns the
seconds since 1/1/2011. The function must return an integer.
"""
return int(time.time() - EPOCH)
def timestamp_to_datetime(self, ts):
"""Used to convert the timestamp from `get_timestamp` into a
datetime object.
"""
return datetime.utcfromtimestamp(ts + EPOCH)
def sign(self, value):
"""Signs the given string and also attaches a time information."""
value = want_bytes(value)
timestamp = base64_encode(int_to_bytes(self.get_timestamp()))
sep = want_bytes(self.sep)
value = value + sep + timestamp
return value + sep + self.get_signature(value)
def unsign(self, value, max_age=None, return_timestamp=False):
"""Works like the regular :meth:`~Signer.unsign` but can also
validate the time. See the base docstring of the class for
the general behavior. If `return_timestamp` is set to `True`
the timestamp of the signature will be returned as naive
:class:`datetime.datetime` object in UTC.
"""
try:
result = Signer.unsign(self, value)
sig_error = None
except BadSignature as e:
sig_error = e
result = e.payload or b''
sep = want_bytes(self.sep)
# If there is no timestamp in the result there is something
# seriously wrong. In case there was a signature error, we raise
# that one directly, otherwise we have a weird situation in which
# we shouldn't have come except someone uses a time-based serializer
# on non-timestamp data, so catch that.
if not sep in result:
if sig_error:
raise sig_error
raise BadTimeSignature('timestamp missing', payload=result)
value, timestamp = result.rsplit(sep, 1)
try:
timestamp = bytes_to_int(base64_decode(timestamp))
except Exception:
timestamp = None
# Signature is *not* okay. Raise a proper error now that we have
# split the value and the timestamp.
if sig_error is not None:
raise BadTimeSignature(text_type(sig_error), payload=value,
date_signed=timestamp)
# Signature was okay but the timestamp is actually not there or
# malformed. Should not happen, but well. We handle it nonetheless
if timestamp is None:
raise BadTimeSignature('Malformed timestamp', payload=value)
# Check timestamp is not older than max_age
if max_age is not None:
age = self.get_timestamp() - timestamp
if age > max_age:
raise SignatureExpired(
'Signature age %s > %s seconds' % (age, max_age),
payload=value,
date_signed=self.timestamp_to_datetime(timestamp))
if return_timestamp:
return value, self.timestamp_to_datetime(timestamp)
return value
def validate(self, signed_value, max_age=None):
"""Just validates the given signed value. Returns `True` if the
signature exists and is valid, `False` otherwise."""
try:
self.unsign(signed_value, max_age=max_age)
return True
except BadSignature:
return False
class Serializer(object):
"""This class provides a serialization interface on top of the
signer. It provides a similar API to json/pickle and other modules but is
slightly differently structured internally. If you want to change the
underlying implementation for parsing and loading you have to override the
:meth:`load_payload` and :meth:`dump_payload` functions.
This implementation uses simplejson if available for dumping and loading
and will fall back to the standard library's json module if it's not
available.
Starting with 0.14 you do not need to subclass this class in order to
switch out or customer the :class:`Signer`. You can instead also pass a
different class to the constructor as well as keyword arguments as
dictionary that should be forwarded::
s = Serializer(signer_kwargs={'key_derivation': 'hmac'})
.. versionchanged:: 0.14:
The `signer` and `signer_kwargs` parameters were added to the
constructor.
"""
#: If a serializer module or class is not passed to the constructor
#: this one is picked up. This currently defaults to :mod:`json`.
default_serializer = json
#: The default :class:`Signer` class that is being used by this
#: serializer.
#:
#: .. versionadded:: 0.14
default_signer = Signer
def __init__(self, secret_key, salt=b'itsdangerous', serializer=None,
signer=None, signer_kwargs=None):
self.secret_key = want_bytes(secret_key)
self.salt = want_bytes(salt)
if serializer is None:
serializer = self.default_serializer
self.serializer = serializer
self.is_text_serializer = is_text_serializer(serializer)
if signer is None:
signer = self.default_signer
self.signer = signer
self.signer_kwargs = signer_kwargs or {}
def load_payload(self, payload, serializer=None):
"""Loads the encoded object. This function raises :class:`BadPayload`
if the payload is not valid. The `serializer` parameter can be used to
override the serializer stored on the class. The encoded payload is
always byte based.
"""
if serializer is None:
serializer = self.serializer
is_text = self.is_text_serializer
else:
is_text = is_text_serializer(serializer)
try:
if is_text:
payload = payload.decode('utf-8')
return serializer.loads(payload)
except Exception as e:
raise BadPayload('Could not load the payload because an '
'exception occurred on unserializing the data',
original_error=e)
def dump_payload(self, obj):
"""Dumps the encoded object. The return value is always a
bytestring. If the internal serializer is text based the value
will automatically be encoded to utf-8.
"""
return want_bytes(self.serializer.dumps(obj))
def make_signer(self, salt=None):
"""A method that creates a new instance of the signer to be used.
The default implementation uses the :class:`Signer` baseclass.
"""
if salt is None:
salt = self.salt
return self.signer(self.secret_key, salt=salt, **self.signer_kwargs)
def dumps(self, obj, salt=None):
"""Returns a signed string serialized with the internal serializer.
The return value can be either a byte or unicode string depending
on the format of the internal serializer.
"""
payload = want_bytes(self.dump_payload(obj))
rv = self.make_signer(salt).sign(payload)
if self.is_text_serializer:
rv = rv.decode('utf-8')
return rv
def dump(self, obj, f, salt=None):
"""Like :meth:`dumps` but dumps into a file. The file handle has
to be compatible with what the internal serializer expects.
"""
f.write(self.dumps(obj, salt))
def loads(self, s, salt=None):
"""Reverse of :meth:`dumps`, raises :exc:`BadSignature` if the
signature validation fails.
"""
s = want_bytes(s)
return self.load_payload(self.make_signer(salt).unsign(s))
def load(self, f, salt=None):
"""Like :meth:`loads` but loads from a file."""
return self.loads(f.read(), salt)
def loads_unsafe(self, s, salt=None):
"""Like :meth:`loads` but without verifying the signature. This is
potentially very dangerous to use depending on how your serializer
works. The return value is ``(signature_okay, payload)`` instead of
just the payload. The first item will be a boolean that indicates
if the signature is okay (``True``) or if it failed. This function
never fails.
Use it for debugging only and if you know that your serializer module
is not exploitable (eg: do not use it with a pickle serializer).
.. versionadded:: 0.15
"""
return self._loads_unsafe_impl(s, salt)
def _loads_unsafe_impl(self, s, salt, load_kwargs=None,
load_payload_kwargs=None):
"""Lowlevel helper function to implement :meth:`loads_unsafe` in
serializer subclasses.
"""
try:
return True, self.loads(s, salt=salt, **(load_kwargs or {}))
except BadSignature as e:
if e.payload is None:
return False, None
try:
return False, self.load_payload(e.payload,
**(load_payload_kwargs or {}))
except BadPayload:
return False, None
def load_unsafe(self, f, *args, **kwargs):
"""Like :meth:`loads_unsafe` but loads from a file.
.. versionadded:: 0.15
"""
return self.loads_unsafe(f.read(), *args, **kwargs)
class TimedSerializer(Serializer):
"""Uses the :class:`TimestampSigner` instead of the default
:meth:`Signer`.
"""
default_signer = TimestampSigner
def loads(self, s, max_age=None, return_timestamp=False, salt=None):
"""Reverse of :meth:`dumps`, raises :exc:`BadSignature` if the
signature validation fails. If a `max_age` is provided it will
ensure the signature is not older than that time in seconds. In
case the signature is outdated, :exc:`SignatureExpired` is raised
which is a subclass of :exc:`BadSignature`. All arguments are
forwarded to the signer's :meth:`~TimestampSigner.unsign` method.
"""
base64d, timestamp = self.make_signer(salt) \
.unsign(s, max_age, return_timestamp=True)
payload = self.load_payload(base64d)
if return_timestamp:
return payload, timestamp
return payload
def loads_unsafe(self, s, max_age=None, salt=None):
load_kwargs = {'max_age': max_age}
load_payload_kwargs = {}
return self._loads_unsafe_impl(s, salt, load_kwargs, load_payload_kwargs)
class JSONWebSignatureSerializer(Serializer):
"""This serializer implements JSON Web Signature (JWS) support. Only
supports the JWS Compact Serialization.
"""
jws_algorithms = {
'HS256': HMACAlgorithm(hashlib.sha256),
'HS384': HMACAlgorithm(hashlib.sha384),
'HS512': HMACAlgorithm(hashlib.sha512),
'none': NoneAlgorithm(),
}
#: The default algorithm to use for signature generation
default_algorithm = 'HS256'
default_serializer = compact_json
def __init__(self, secret_key, salt=None, serializer=None,
signer=None, signer_kwargs=None, algorithm_name=None):
Serializer.__init__(self, secret_key, salt, serializer,
signer, signer_kwargs)
if algorithm_name is None:
algorithm_name = self.default_algorithm
self.algorithm_name = algorithm_name
self.algorithm = self.make_algorithm(algorithm_name)
def load_payload(self, payload, return_header=False):
payload = want_bytes(payload)
if b'.' not in payload:
raise BadPayload('No "." found in value')
base64d_header, base64d_payload = payload.split(b'.', 1)
try:
json_header = base64_decode(base64d_header)
except Exception as e:
raise BadHeader('Could not base64 decode the header because of '
'an exception', original_error=e)
try:
json_payload = base64_decode(base64d_payload)
except Exception as e:
raise BadPayload('Could not base64 decode the payload because of '
'an exception', original_error=e)
try:
header = Serializer.load_payload(self, json_header,
serializer=json)
except BadData as e:
raise BadHeader('Could not unserialize header because it was '
'malformed', original_error=e)
if not isinstance(header, dict):
raise BadHeader('Header payload is not a JSON object',
header=header)
payload = Serializer.load_payload(self, json_payload)
if return_header:
return payload, header
return payload
def dump_payload(self, header, obj):
base64d_header = base64_encode(self.serializer.dumps(header))
base64d_payload = base64_encode(self.serializer.dumps(obj))
return base64d_header + b'.' + base64d_payload
def make_algorithm(self, algorithm_name):
try:
return self.jws_algorithms[algorithm_name]
except KeyError:
raise NotImplementedError('Algorithm not supported')
def make_signer(self, salt=None, algorithm=None):
if salt is None:
salt = self.salt
key_derivation = 'none' if salt is None else None
if algorithm is None:
algorithm = self.algorithm
return self.signer(self.secret_key, salt=salt, sep='.',
key_derivation=key_derivation, algorithm=algorithm)
def make_header(self, header_fields):
header = header_fields.copy() if header_fields else {}
header['alg'] = self.algorithm_name
return header
def dumps(self, obj, salt=None, header_fields=None):
"""Like :meth:`~Serializer.dumps` but creates a JSON Web Signature. It
also allows for specifying additional fields to be included in the JWS
Header.
"""
header = self.make_header(header_fields)
signer = self.make_signer(salt, self.algorithm)
return signer.sign(self.dump_payload(header, obj))
def loads(self, s, salt=None, return_header=False):
"""Reverse of :meth:`dumps`. If requested via `return_header` it will
return a tuple of payload and header.
"""
payload, header = self.load_payload(
self.make_signer(salt, self.algorithm).unsign(want_bytes(s)),
return_header=True)
if header.get('alg') != self.algorithm_name:
raise BadHeader('Algorithm mismatch', header=header,
payload=payload)
if return_header:
return payload, header
return payload
def loads_unsafe(self, s, salt=None, return_header=False):
kwargs = {'return_header': return_header}
return self._loads_unsafe_impl(s, salt, kwargs, kwargs)
class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer):
"""Works like the regular :class:`JSONWebSignatureSerializer` but also
records the time of the signing and can be used to expire signatures.
JWS currently does not specify this behavior but it mentions a possibility
extension like this in the spec. Expiry date is encoded into the header
similarily as specified in `draft-ietf-oauth-json-web-token
<http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html#expDef`_.
The unsign method can raise a :exc:`SignatureExpired` method if the
unsigning failed because the signature is expired. This exception is a
subclass of :exc:`BadSignature`.
"""
DEFAULT_EXPIRES_IN = 3600
def __init__(self, secret_key, expires_in=None, **kwargs):
JSONWebSignatureSerializer.__init__(self, secret_key, **kwargs)
if expires_in is None:
expires_in = self.DEFAULT_EXPIRES_IN
self.expires_in = expires_in
def make_header(self, header_fields):
header = JSONWebSignatureSerializer.make_header(self, header_fields)
iat = self.now()
exp = iat + self.expires_in
header['iat'] = iat
header['exp'] = exp
return header
def loads(self, s, salt=None, return_header=False):
payload, header = JSONWebSignatureSerializer.loads(
self, s, salt, return_header=True)
if 'exp' not in header:
raise BadSignature('Missing expiry date', payload=payload)
if not (isinstance(header['exp'], number_types)
and header['exp'] > 0):
raise BadSignature('expiry date is not an IntDate',
payload=payload)
if header['exp'] < self.now():
raise SignatureExpired('Signature expired', payload=payload,
date_signed=self.get_issue_date(header))
if return_header:
return payload, header
return payload
def get_issue_date(self, header):
rv = header.get('iat')
if isinstance(rv, number_types):
return datetime.utcfromtimestamp(int(rv))
def now(self):
return int(time.time())
class URLSafeSerializerMixin(object):
"""Mixed in with a regular serializer it will attempt to zlib compress
the string to make it shorter if necessary. It will also base64 encode
the string so that it can safely be placed in a URL.
"""
def load_payload(self, payload):
decompress = False
if payload.startswith(b'.'):
payload = payload[1:]
decompress = True
try:
json = base64_decode(payload)
except Exception as e:
raise BadPayload('Could not base64 decode the payload because of '
'an exception', original_error=e)
if decompress:
try:
json = zlib.decompress(json)
except Exception as e:
raise BadPayload('Could not zlib decompress the payload before '
'decoding the payload', original_error=e)
return super(URLSafeSerializerMixin, self).load_payload(json)
def dump_payload(self, obj):
json = super(URLSafeSerializerMixin, self).dump_payload(obj)
is_compressed = False
compressed = zlib.compress(json)
if len(compressed) < (len(json) - 1):
json = compressed
is_compressed = True
base64d = base64_encode(json)
if is_compressed:
base64d = b'.' + base64d
return base64d
class URLSafeSerializer(URLSafeSerializerMixin, Serializer):
"""Works like :class:`Serializer` but dumps and loads into a URL
safe string consisting of the upper and lowercase character of the
alphabet as well as ``'_'``, ``'-'`` and ``'.'``.
"""
default_serializer = compact_json
class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer):
"""Works like :class:`TimedSerializer` but dumps and loads into a URL
safe string consisting of the upper and lowercase character of the
alphabet as well as ``'_'``, ``'-'`` and ``'.'``.
"""
default_serializer = compact_json
|
SlivTime/iktomi | refs/heads/master | iktomi/web/url_templates.py | 3 | # -*- coding: utf-8 -*-
import six
if six.PY2:
from urllib import quote, unquote
else:# pragma: no cover
from urllib.parse import quote, unquote
import re
import logging
from .url_converters import default_converters, ConvertError
logger = logging.getLogger(__name__)
def urlquote(value):
if isinstance(value, six.integer_types):
value = six.text_type(value)
return quote(value.encode('utf-8'))
class UrlBuildingError(Exception): pass
_split_pattern = re.compile(r'(<[^<]*>)')
#NOTE: taken from werkzeug
_converter_pattern = re.compile(r'''^<
(?:
(?P<converter>[a-zA-Z_][a-zA-Z0-9_]+) # converter name
(?:\((?P<args>.*?)\))? # converter args
\: # delimiter
)?
(?P<variable>[a-zA-Z_][a-zA-Z0-9_]*) # variable name
>$''', re.VERBOSE | re.U)
_static_url_pattern = re.compile(r'^[^<]*?$')
def construct_re(url_template, match_whole_str=False, converters=None,
default_converter='string', anonymous=False):
'''
url_template - str or unicode representing template
Constructed pattern expects urlencoded string!
returns (compiled re pattern,
dict {url param name: [converter name, converter args (str)]},
list of (variable name, converter name, converter args name))
If anonymous=True is set, regexp will be compiled without names of variables.
This is handy for example, if you want to dump an url map to JSON.
'''
# needed for reverse url building (or not needed?)
builder_params = []
# found url params and their converters
url_params = {}
result = r'^'
parts = _split_pattern.split(url_template)
for i, part in enumerate(parts):
is_url_pattern = _static_url_pattern.match(part)
if is_url_pattern:
#NOTE: right order:
# - make part str if it was unicode
# - urlquote part
# - escape all specific for re chars in part
result += re.escape(urlquote(part))
builder_params.append(part)
continue
is_converter = _converter_pattern.match(part)
if is_converter:
groups = is_converter.groupdict()
converter_name = groups['converter'] or default_converter
conv_object = init_converter(converters[converter_name],
groups['args'])
variable = groups['variable']
builder_params.append((variable, conv_object))
url_params[variable] = conv_object
if anonymous:
result += conv_object.regex
else:
result += '(?P<{}>{})'.format(variable, conv_object.regex)
continue
raise ValueError('Incorrect url template {!r}'.format(url_template))
if match_whole_str:
result += '$'
return re.compile(result), url_params, builder_params
def init_converter(conv_class, args):
if args:
#XXX: taken from werkzeug
storage = type('_Storage', (), {'__getitem__': lambda s, x: x})()
args, kwargs = eval(u'(lambda *a, **kw: (a, kw))({})'.format(args),
{}, storage)
return conv_class(*args, **kwargs)
return conv_class()
class UrlTemplate(object):
def __init__(self, template, match_whole_str=True, converters=None,
default_converter='string'):
self.template = template
self.match_whole_str = match_whole_str
self._allowed_converters = self._init_converters(converters)
self._pattern, self._url_params, self._builder_params = \
construct_re(template,
match_whole_str=match_whole_str,
converters=self._allowed_converters,
default_converter=default_converter)
def match(self, path, **kw):
'''
path - str (urlencoded)
'''
m = self._pattern.match(path)
if m:
kwargs = m.groupdict()
# convert params
for url_arg_name, value_urlencoded in kwargs.items():
conv_obj = self._url_params[url_arg_name]
unicode_value = unquote(value_urlencoded)
if isinstance(unicode_value, six.binary_type):
# XXX ??
unicode_value = unicode_value.decode('utf-8', 'replace')
try:
kwargs[url_arg_name] = conv_obj.to_python(unicode_value, **kw)
except ConvertError as err:
logger.debug('ConvertError in parameter "%s" '
'by %r, value "%s"',
url_arg_name,
err.converter.__class__,
err.value)
return None, {}
return m.group(), kwargs
return None, {}
def __call__(self, **kwargs):
'Url building with url params values taken from kwargs. (reverse)'
result = ''
for part in self._builder_params:
if isinstance(part, tuple):
var, conv_obj = part
try:
value = kwargs[var]
except KeyError:
if conv_obj.default is not conv_obj.NotSet:
value = conv_obj.default
else:
raise UrlBuildingError('Missing argument for '
'URL builder: {}'.format(var))
result += conv_obj.to_url(value)
else:
result += part
# result - unicode not quotted string
return result
def _init_converters(self, converters):
convs = default_converters.copy()
if converters is not None:
convs.update(converters)
return convs
def __eq__(self, other):
return self.template == other.template and \
self.match_whole_str == other.match_whole_str
def __repr__(self):
return '{}({!r}, match_whole_str={!r})'.format(
self.__class__.__name__, self.template,
self.match_whole_str)
|
ic-hep/DIRAC | refs/heads/rel-v6r15 | Resources/Computing/BatchSystems/Host.py | 3 | #########################################################################
#
# $HeadURL$
# Host.py
# 4.11.2014
# Author: A.T.
#
#########################################################################
""" Host - class for managing jobs on a host. Host objects are invoked
with LocalComputingElement or SSHComputingElement objects
"""
__RCSID__ = "$Id$"
import commands, os, glob, shutil, signal, subprocess, stat, json, multiprocessing
from datetime import datetime, timedelta
# Clean job info and output after so many days
CLEAN_DELAY = timedelta( 7 )
class Host( object ):
def __init__( self ):
self.nCores = 1
try:
self.nCores = multiprocessing.cpu_count()
except:
pass
def submitJob( self, **kwargs ):
resultDict = {}
args = dict( kwargs )
MANDATORY_PARAMETERS = [ 'Executable', 'SharedDir', 'OutputDir', 'ErrorDir', 'WorkDir',
'InfoDir', 'ExecutionContext', 'JobStamps' ]
for argument in MANDATORY_PARAMETERS:
if not argument in args:
resultDict['Status'] = -1
resultDict['Message'] = 'No %s' % argument
return resultDict
nJobs = args.get( 'NJobs', 1 )
stamps = args['JobStamps']
context = args.get( 'ExecutionContext', 'Local' )
jobidName = context.upper() + '_JOBID'
nCores = args.get( 'NCores', 1 )
# Prepare the executor command
runFileName = os.path.join( args['SharedDir'], 'run_detached.sh' )
runFileName = os.path.expandvars( runFileName )
if os.path.isfile( runFileName ):
os.unlink( runFileName )
runFile = open( runFileName, 'w' )
runFile.write( """
( exec </dev/null
# echo $2
exec > $2
# echo $3
exec 2> $3
# echo $1
exec setsid $1
) &
kill -0 $! > /dev/null 2>&1 || exit 1
echo $!
exit 0
""" )
runFile.close()
os.chmod( runFileName, stat.S_IXUSR | stat.S_IRUSR )
jobs = []
output = ''
args['RunFile'] = runFileName
for _i in range( int(nJobs) ):
args['Stamp'] = stamps[_i]
envDict = os.environ
envDict[jobidName] = stamps[_i]
try:
jobDir = '%(WorkDir)s/%(Stamp)s' % args
jobDir = os.path.expandvars( jobDir )
os.makedirs( jobDir )
os.chdir( jobDir )
popenObject = subprocess.Popen( [ "%(RunFile)s %(Executable)s %(OutputDir)s/%(Stamp)s.out %(ErrorDir)s/%(Stamp)s.err" % args ],
stdout = subprocess.PIPE,
shell = True,
env = envDict )
pid = popenObject.communicate()[0]
except OSError, x:
output = str(x)
break
pid = int( pid )
if pid:
# Store the job info
jobInfo = { 'PID': pid,
'SubmissionTime': datetime.utcnow().strftime( "%Y-%m-%d %H:%M:%S" ),
'JOBID': stamps[_i],
'NCores': nCores
}
jobString = json.dumps( jobInfo )
pidFileName = "%(InfoDir)s/%(Stamp)s.info" % args
pidFileName = os.path.expandvars( pidFileName )
pidFile = open( pidFileName, 'w' )
pidFile.write( jobString )
pidFile.close()
jobs.append( stamps[_i] )
else:
break
if jobs:
resultDict['Status'] = 0
resultDict['Jobs'] = jobs
else:
resultDict['Status'] = 1
resultDict['Message'] = output
return resultDict
def __cleanJob( self, stamp, infoDir, workDir, outputDir = None, errorDir = None ):
jobDir = os.path.join( workDir, stamp )
if os.path.isdir( jobDir ):
shutil.rmtree( jobDir )
pidFile = os.path.join( infoDir, '%s.info' % stamp )
if os.path.isfile( pidFile ):
os.unlink( pidFile )
if outputDir:
outFile = os.path.join( outputDir, '%s.out' % stamp )
if os.path.isfile( outFile ):
os.unlink( outFile )
if errorDir:
errFile = os.path.join( errorDir, '%s.err' % stamp )
if os.path.isfile( errFile ):
os.unlink( errFile )
def __getJobInfo( self, infoDir, stamp ):
jobInfo = {}
infoFileName = os.path.join( infoDir, '%s.info' % stamp )
infoFileName = os.path.expandvars( infoFileName )
if os.path.exists( infoFileName ):
infoFile = open( infoFileName, 'r' )
jobInfo = infoFile.read().strip()
infoFile.close()
jobInfo = json.loads( jobInfo )
return jobInfo
def getCEStatus( self, **kwargs ):
""" Get the overall CE status
"""
resultDict = { 'Running': 0, 'Waiting': 0 }
MANDATORY_PARAMETERS = [ 'InfoDir', 'WorkDir', 'OutputDir', 'ErrorDir', 'User' ]
for argument in MANDATORY_PARAMETERS:
if not argument in kwargs:
resultDict['Status'] = -1
resultDict['Message'] = 'No %s' % argument
return resultDict
user = kwargs.get( 'User' )
infoDir = kwargs.get( 'InfoDir' )
workDir = kwargs.get( 'WorkDir' )
outputDir = kwargs.get( 'OutputDir' )
errorDir = kwargs.get( 'ErrorDir' )
running = 0
usedCores = 0
infoDir = os.path.expandvars( infoDir )
infoFiles = glob.glob( '%s/*.info' % infoDir )
for infoFileName in infoFiles:
infoFileName = os.path.expandvars( infoFileName )
infoFile = open( infoFileName, 'r' )
jobInfo = infoFile.read().strip()
infoFile.close()
jobInfo = json.loads( jobInfo )
pid = jobInfo['PID']
cmd = 'ps -f -p %s --no-headers | wc -l' % pid
status,output = commands.getstatusoutput( cmd )
if status == 0:
if output.strip() == '1':
running += 1
usedCores += jobInfo['NCores']
else:
stamp = jobInfo['JOBID']
jobLife = datetime.utcnow() - datetime.strptime( jobInfo['SubmissionTime'], "%Y-%m-%d %H:%M:%S" )
if jobLife > CLEAN_DELAY:
self.__cleanJob( stamp, infoDir, workDir, outputDir, errorDir )
else:
resultDict['Status'] = status
return resultDict
resultDict['Status'] = 0
resultDict['Running'] = running
availableCores = self.nCores - usedCores
resultDict['AvailableCores'] = availableCores
return resultDict
def __checkPid( self, pid, user ):
if pid == 0:
return "Unknown"
status, output = commands.getstatusoutput( 'ps -f -p %s | grep %s | wc -l' % ( pid, user ) )
if status == 0 and output.strip() == "1":
return "Running"
else:
return "Done"
def getJobStatus( self, **kwargs ):
resultDict = {}
MANDATORY_PARAMETERS = [ 'InfoDir', 'JobIDList', 'User' ]
for argument in MANDATORY_PARAMETERS:
if not argument in kwargs:
resultDict['Status'] = -1
resultDict['Message'] = 'No %s' % argument
return resultDict
user = kwargs.get( 'User' )
infoDir = kwargs.get( 'InfoDir' )
jobStamps = kwargs.get( 'JobIDList' )
jobDict = {}
for stamp in jobStamps:
pid = self.__getJobInfo( infoDir, stamp ).get( 'PID', 0 )
jobDict[stamp] = self.__checkPid( pid, user )
resultDict['Status'] = 0
resultDict['Jobs'] = jobDict
return resultDict
def killJob( self, **kwargs ):
resultDict = {}
MANDATORY_PARAMETERS = [ 'InfoDir', 'WorkDir', 'OutputDir',
'ErrorDir', 'JobIDList', 'User' ]
for argument in MANDATORY_PARAMETERS:
if not argument in kwargs:
resultDict['Status'] = -1
resultDict['Message'] = 'No %s' % argument
return resultDict
user = kwargs.get( 'User' )
infoDir = kwargs.get( 'InfoDir' )
workDir = kwargs.get( 'WorkDir' )
outputDir = kwargs.get( 'OutputDir' )
errorDir = kwargs.get( 'ErrorDir' )
jobStamps = kwargs.get( 'JobIDList' )
jobDict = {}
for stamp in jobStamps:
pid = self.__getJobInfo( infoDir, stamp ).get( 'PID', 0 )
if self.__checkPid( pid, user ) == 'Running':
os.kill( pid, signal.SIGKILL )
self.__cleanJob( stamp, infoDir, workDir, outputDir, errorDir )
jobDict[stamp] = 'Killed'
else:
jobDict[stamp] = 'Done'
resultDict['Status'] = 0
resultDict['Successful'] = jobStamps
resultDict['Failed'] = []
resultDict['Jobs'] = jobDict
return resultDict
|
xiaokeng/robotframework | refs/heads/master | atest/testdata/test_libraries/MyLibDir/SubModuleLib.py | 37 | def keyword_in_mylibdir_submodulelib():
pass
|
Fisiu/calendar-oswiecim | refs/heads/master | webapp/calendars/migrations/0027_organizer_url.py | 2 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('calendars', '0026_auto_20160222_1953'),
]
operations = [
migrations.AddField(
model_name='organizer',
name='url',
field=models.URLField(null=True, blank=True, verbose_name='Adres www'),
),
]
|
osvalr/odoo | refs/heads/8.0 | addons/project_timesheet/report/__init__.py | 441 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import task_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
FGMEMBERS/c150 | refs/heads/master | Models/Panel/Instruments/asi/asi.py | 15 | #!/usr/bin/env python
from svginstr import *
import sys
__author__ = "Melchior FRANZ < mfranz # aon : at >"
__url__ = "http://gitorious.org/svginstr/"
__version__ = "0.2"
__license__ = "GPL v2+"
__doc__ = """
"""
try:
a = Instrument("asi.svg", 512, 512, "test face; " + __version__)
a.disc(98, color = 'black')
a.disc(1)
# define mapping function: map scale value 30 - 160 to angle 0-320 degree.
# However, the values from 100-160 are compressed slightly, so the lambda function is just 120 values
a.angle = lambda x: x * 300.0 / 115.0 - 145.0
# inside line
l = 50
# compression
compress = 0.8
a.arc(44, 100 + 27 * compress, l+3, width = 8, color = "green")
a.arc(100 + 27 * compress, 100 + 58 * compress, l+3, width = 8, color = "yellow")
a.arc(33, 85, l, width = 3, color = "white")
for i in range(35, 100, 5):
a.tick(i, l, 58, 2)
for i in range(100, 152, int(5 * compress + 0.5)):
a.tick(i, l, 58, 2)
for i in range(40, 100, 10):
a.tick(i, l, 65, 2)
for i in range(100, 155, int(10 * compress + 0.5)):
a.tick(i, l, 65, 2)
a.tick(100 + 58 * compress, l, 60, color="red")
# mph conversion
mph = 0.8689
k = 30
for i in range(40, 100, int(10 * mph)):
a.tick(i, k, k + 6, 1)
for i in range(40 + int(65 * mph), 150, int(10 * mph * compress)):
a.tick(i, k, k + 6, 1)
# fc-list tells you the names of available fonts on Linux (fc ... font cache)
s = 13
a.at(0,-70).text("AIRSPEED", size = 10, font_family = "Lucida Sans", color = "white")
a.at(0,-55).text("KNOTS", size = 10, font_family = "Lucida Sans", color = "white")
a.at(60,-42).text(40, size = s, font_family = "Lucida Sans", color = "white")
a.at(75,20).text(60, size = s, font_family = "Lucida Sans", color = "white")
a.at(35,72).text(80, size = s, font_family = "Lucida Sans", color = "white")
a.at(-40,72).text(100, size = s, font_family = "Lucida Sans", color = "white")
a.at(-75,30).text(120, size = s, font_family = "Lucida Sans", color = "white")
a.at(-75,-20).text(140, size = s, font_family = "Lucida Sans", color = "white")
a.at(-45,-57).text(160, size = s, font_family = "Lucida Sans", color = "white")
# mph markings
s = 7
a.at(16,-18).text(40, size = s, font_family = "Lucida Sans", color = "white")
a.at(22,2).text(60, size = s, font_family = "Lucida Sans", color = "white")
a.at(16,20).text(80, size = s, font_family = "Lucida Sans", color = "white")
a.at(0,27).text(100, size = s, font_family = "Lucida Sans", color = "white")
a.at(-14,22).text(120, size = s, font_family = "Lucida Sans", color = "white")
a.at(-19,13).text(140, size = s, font_family = "Lucida Sans", color = "white")
a.at(-22,0).text(160, size = s, font_family = "Lucida Sans", color = "white")
a.at(-17,-14).text(180, size = s, font_family = "Lucida Sans", color = "white")
a.at(0,-20).text("MPH", size = s, font_family = "Lucida Sans", color = "white")
#a.at(75,20).text(60, size = s, font_family = "Lucida Sans", color = "white")
#a.at(35,72).text(80, size = s, font_family = "Lucida Sans", color = "white")
#a.at(-40,72).text(100, size = s, font_family = "Lucida Sans", color = "white")
#a.at(-75,30).text(120, size = s, font_family = "Lucida Sans", color = "white")
#a.at(-75,-20).text(140, size = s, font_family = "Lucida Sans", color = "white")
#a.at(-45,-55).text(160, size = s, font_family = "Lucida Sans", color = "white")
except Error as e:
print >>sys.stderr, "\033[31;1m%s\033[m\n" % e
|
colinfitzpatrick/otherphone | refs/heads/master | Message.py | 1 | #
# Message.py
#
# Copyright xsynergy ltd 2010. All Rights Reserved.
#
# Setup
# Install PySerial http://pyserial.sourceforge.net
# Install Pything Messaagine https://github.com/pmarti/python-messaging
import sys
import logging
from messaging.sms import SmsSubmit
from messaging.sms import SmsDeliver
logger = logging.getLogger('MY_OTHER_PHONE_APP')
# Class for dealing with SMS Message
# Include PDU text.
class Message:
kNONE="none"
_sMessage=""
_sFrom=""
_sTo=""
_sId=""
# New or Read Message
_bStatus=True
def __init__(self):
logger.debug('New Message Class Created')
_sMessage=""
_sFrom=""
_sTo=""
_sId=""
# Create a message for sending
def createMessage(self, to, message):
logger.info('Creating message to "%s" "%s"' % (to,message) )
self._sTo = to
self._sMessage = message
# Import a message received from the deviee
def importMessage(self, pduText, details=''):
logger.debug('importMessage()-%s (%s)' % (pduText, details))
try:
message = SmsDeliver(pduText)
self._sMessage = message.text
self._sFrom = message.number
msgDetails = details.split(",")
self._sId = msgDetails[0].split(":")[1].strip()
if msgDetails[1] != '0':
self._bStatus = True
else:
self._bStatus = False
except Exception, e:
logger.exception(e)
logger.error('Failed to decode incoming message')
return False
logger.debug(self.toString())
return True
# Export message to a device
# May return an empty string
def exportMessage(self):
logger.debug('exportMessage()')
try:
message = SmsSubmit(self._sTo, self._sMessage)
return message.to_pdu()
except:
logger.error('Failed to encode message ' + self.toString())
return ''
# Print the Message, for debugging
def toString(self):
return '[from: %s] [to: %s] [ID: %s] [Read: %s] %s' % (self._sFrom, self._sTo, self._sId, self._bStatus, self._sMessage)
def getFrom(self):
logger.debug('getFrom() ' + self.toString())
return self._sFrom
def getTo(self):
logger.debug('getTo() ' + self.toString())
return self.sTo
def getMessage(self):
logger.debug('getMessage() ' + self.toString())
return self._sMessage
def getID(self):
logger.debug('getID() ' + self.toString())
return self._sId
def isRead(self):
logger.debug('isRead() ' + self.toString())
return self._bStatus
|
illustris-bh-wisc/illustris-bh | refs/heads/master | readsubfHDF5.py | 1 | # Python HDF5 subfind reader
# (requires util/hdf5lib.py)
#
# import readsubfHDF5
# cat = readsubfHDF5.subfind_catalog("./output/", 60)
# print cat.SubhaloPos
#
#
# Mark Vogelsberger (mvogelsb@cfa.harvard.edu)
import numpy as np
import os
import sys
import hdf5lib
####################
#SUBHALO DATABLOCKS#
####################
#descriptions of subhalo datablocks -> add new datablocks here!
#format -> "HDF5_NAME":["DATATYPE", DIMENSION]
sub_datablocks = {"SubhaloLen":["INT",1],
"SubhaloMass":["FLOAT",1],
"SubhaloMassinRad":["FLOAT",1],
"SubhaloPos":["FLOAT",3],
"SubhaloVel":["FLOAT",3],
"SubhaloLenType":["INT",6],
"SubhaloMassType":["FLOAT",6],
"SubhaloCM":["FLOAT",3],
"SubhaloSpin":["FLOAT",3],
"SubhaloVelDisp":["FLOAT",1],
"SubhaloVmax":["FLOAT",1],
"SubhaloVmaxRad":["FLOAT",1],
"SubhaloHalfmassRad":["FLOAT",1],
"SubhaloHalfmassRadType":["FLOAT",6],
"SubhaloMassInRadType":["FLOAT", 6],
"SubhaloMassInRad":["FLOAT",1],
"SubhaloMassInHalfRadType":["FLOAT", 6],
"SubhaloMassInHalfRad":["FLOAT", 1],
"SubhaloIDMostbound":["ID",1],
"SubhaloGrNr":["INT",1],
"SubhaloParent":["INT",1],
"SubhaloSFR":["FLOAT",1],
"SubhaloSFRinRad":["FLOAT",1],
"SubhaloGasMetallicity":["FLOAT",1],
"SubhaloGasMetallicitySfr":["FLOAT",1],
"SubhaloStarMetallicity":["FLOAT",1],
"SubhaloGasMetalFractions":["FLOAT",9],
"SubhaloGasMetalFractionsSfr":["FLOAT",9],
"SubhaloGasMetalFractionsSfrWeighted":["FLOAT",9],
"SubhaloStarMetalFractions":["FLOAT",9],
"SubhaloStarMetallicityHalfRad":["FLOAT",1],
"SubhaloBHMass":["FLOAT",1],
"SubhaloBHMdot":["FLOAT",1],
"SubhaloStellarPhotometricsMassInRad":["FLOAT",1],
"SubhaloStellarPhotometrics":["FLOAT",8]} #band luminosities: U, B, V, K, g, r, i, z
##################
#GROUP DATABLOCKS#
##################
#descriptions of subhalo datablocks -> add new datablocks here!
#format -> "HDF5_NAME":["DATATYPE", DIMENSION]
grp_datablocks = {"GroupLen":["INT",1],
"GroupMass":["FLOAT",1],
"GroupPos":["FLOAT",3],
"GroupVel":["FLOAT",3],
"GroupLenType":["INT",6],
"GroupMassType":["FLOAT",6],
"Group_M_Mean200":["FLOAT",1],
"Group_R_Mean200":["FLOAT",1],
"Group_M_Crit200":["FLOAT",1],
"Group_R_Crit200":["FLOAT",1],
"Group_M_TopHat200":["FLOAT",1],
"Group_R_TopHat200":["FLOAT",1],
"Group_M_Crit500":["FLOAT",1],
"Group_R_Crit500":["FLOAT",1],
"GroupNsubs":["INT",1],
"GroupFirstSub":["INT",1],
"GroupSFR":["FLOAT",1],
"GroupGasMetallicity":["FLOAT",1],
"GroupStarMetallicity":["FLOAT",1],
"GroupGasMetalFractions":["FLOAT",9],
"GroupStarMetalFractions":["FLOAT",9],
"GroupBHMass":["FLOAT",1],
"GroupBHMdot":["FLOAT",1],
"GroupFuzzOffsetType":["INT64",6]}
class subfind_catalog:
def __init__(self, basedir, snapnum, long_ids = False, double_output = False, grpcat = True, subcat = True, name = "fof_subhalo_tab", keysel = None):
self.filebase = basedir + "/groups_" + str(snapnum).zfill(3) + "/" + name + "_" + str(snapnum).zfill(3) + "."
if long_ids: self.id_type = np.uint64
else: self.id_type = np.uint32
if double_output: self.double_type = np.float32
else: self.double_type = np.float64
filenum = 0
doneflag = False
skip_gr = 0
skip_sub = 0
vardict = {}
while not doneflag:
curfile = self.filebase + str(filenum) + ".hdf5"
if (not os.path.exists(curfile)):
self.filebase = basedir + "/" + name + "_" + str(snapnum).zfill(3)
curfile = self.filebase + ".hdf5"
if (not os.path.exists(curfile)):
print "file not found:", curfile
sys.exit()
f=hdf5lib.OpenFile(curfile)
ngroups = hdf5lib.GetAttr(f, "Header", "Ngroups_ThisFile")
nsubs = hdf5lib.GetAttr(f, "Header", "Nsubgroups_ThisFile")
nfiles = hdf5lib.GetAttr(f, "Header", "NumFiles")
if filenum == 0:
self.ngroups = hdf5lib.GetAttr(f, "Header", "Ngroups_Total")
self.nids = hdf5lib.GetAttr(f, "Header", "Nids_Total")
self.nsubs = hdf5lib.GetAttr(f, "Header", "Nsubgroups_Total")
#GROUPS
if (grpcat==True):
if (keysel == None):
for key, val in grp_datablocks.items():
if hdf5lib.Contains(f, "Group", key):
type = val[0]
dim = val[1]
if (type=='FLOAT'):
vars(self)[key]=np.empty(self.ngroups, dtype=np.dtype((self.double_type,dim)))
if (type=='INT'):
vars(self)[key]=np.empty(self.ngroups, dtype=np.dtype((np.int32,dim)))
if (type=='INT64'):
vars(self)[key]=np.empty(self.ngroups, dtype=np.dtype((np.int64,dim)))
if (type=='ID'):
vars(self)[key]=np.empty(self.ngroups, dtype=np.dtype((self.id_type,dim)))
vardict[key]=vars(self)[key]
else:
for key in keysel:
if hdf5lib.Contains(f, "Group", key):
val = grp_datablocks[key]
type = val[0]
dim = val[1]
if (type=='FLOAT'):
vars(self)[key]=np.empty(self.ngroups, dtype=np.dtype((self.double_type,dim)))
if (type=='INT'):
vars(self)[key]=np.empty(self.ngroups, dtype=np.dtype((np.int32,dim)))
if (type=='INT64'):
vars(self)[key]=np.empty(self.ngroups, dtype=np.dtype((np.int64,dim)))
if (type=='ID'):
vars(self)[key]=np.empty(self.ngroups, dtype=np.dtype((self.id_type,dim)))
vardict[key]=vars(self)[key]
#SUBHALOS
if (subcat==True):
if (keysel == None):
for key, val in sub_datablocks.items():
if hdf5lib.Contains(f, "Subhalo", key):
type = val[0]
dim = val[1]
if (type=='FLOAT'):
vars(self)[key]=np.empty(self.nsubs, dtype=np.dtype((self.double_type,dim)))
if (type=='INT'):
vars(self)[key]=np.empty(self.nsubs, dtype=np.dtype((np.int32,dim)))
if (type=='INT64'):
vars(self)[key]=np.empty(self.nsubs, dtype=np.dtype((np.int32,dim)))
if (type=='ID'):
vars(self)[key]=np.empty(self.nsubs, dtype=np.dtype((self.id_type,dim)))
vardict[key]=vars(self)[key]
else:
for key in keysel:
if hdf5lib.Contains(f, "Subhalo", key):
val = sub_datablocks[key]
type = val[0]
dim = val[1]
if (type=='FLOAT'):
vars(self)[key]=np.empty(self.nsubs, dtype=np.dtype((self.double_type,dim)))
if (type=='INT'):
vars(self)[key]=np.empty(self.nsubs, dtype=np.dtype((np.int32,dim)))
if (type=='INT64'):
vars(self)[key]=np.empty(self.nsubs, dtype=np.dtype((np.int64,dim)))
if (type=='ID'):
vars(self)[key]=np.empty(self.nsubs, dtype=np.dtype((self.id_type,dim)))
vardict[key]=vars(self)[key]
#GROUPS
if (grpcat==True):
if ngroups > 0:
if (keysel == None):
for key, val in grp_datablocks.items():
if hdf5lib.Contains(f, "Group", key):
type = val[0]
dim = val[1]
a=hdf5lib.GetData(f, "Group/"+key)
if dim==1:
vardict[key][skip_gr:skip_gr + ngroups]=a[:]
else:
for d in range(0,dim):
vardict[key][skip_gr:skip_gr + ngroups,d]=a[:,d]
else:
for key in keysel:
if hdf5lib.Contains(f, "Group", key):
val = grp_datablocks[key]
type = val[0]
dim = val[1]
a=hdf5lib.GetData(f, "Group/"+key)
if dim==1:
vardict[key][skip_gr:skip_gr + ngroups]=a[:]
else:
for d in range(0,dim):
vardict[key][skip_gr:skip_gr + ngroups,d]=a[:,d]
skip_gr += ngroups
#SUBHALOS
if (subcat==True):
if nsubs > 0:
if (keysel == None):
for key, val in sub_datablocks.items():
if hdf5lib.Contains(f, "Subhalo", key):
type = val[0]
dim = val[1]
a=hdf5lib.GetData(f, "Subhalo/"+key)
if dim==1:
vardict[key][skip_sub:skip_sub + nsubs]=a[:]
else:
for d in range(0,dim):
vardict[key][skip_sub:skip_sub + nsubs,d]=a[:,d]
else:
for key in keysel:
if hdf5lib.Contains(f, "Subhalo", key):
val = sub_datablocks[key]
type = val[0]
dim = val[1]
a=hdf5lib.GetData(f, "Subhalo/"+key)
if dim==1:
vardict[key][skip_sub:skip_sub + nsubs]=a[:]
else:
for d in range(0,dim):
vardict[key][skip_sub:skip_sub + nsubs,d]=a[:,d]
skip_sub += nsubs
f.close()
filenum += 1
if filenum == nfiles: doneflag = True
|
abligh/xen | refs/heads/master | tools/python/xen/xend/XendDomainInfo.py | 15 | #===========================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2004, 2005 Mike Wray <mike.wray@hp.com>
# Copyright (C) 2005-2007 XenSource Ltd
#============================================================================
"""Representation of a single domain.
Includes support for domain construction, using
open-ended configurations.
Author: Mike Wray <mike.wray@hp.com>
"""
import logging
import time
import threading
import thread
import re
import copy
import os
import stat
import shutil
import traceback
from types import StringTypes
import xen.lowlevel.xc
from xen.util import asserts, auxbin, mkdir
from xen.util.blkif import parse_uname
import xen.util.xsm.xsm as security
from xen.util import xsconstants
from xen.util import mkdir
from xen.util.pci import serialise_pci_opts, pci_opts_list_to_sxp, \
append_default_pci_opts, \
pci_dict_to_bdf_str, pci_dict_to_xc_str, \
pci_convert_sxp_to_dict, pci_convert_dict_to_sxp, \
pci_dict_cmp, PCI_DEVFN, PCI_SLOT, PCI_FUNC, parse_hex
from xen.xend import balloon, sxp, uuid, image, arch
from xen.xend import XendOptions, XendNode, XendConfig
from xen.xend.XendConfig import scrub_password
from xen.xend.XendBootloader import bootloader, bootloader_tidy
from xen.xend.XendError import XendError, VmError
from xen.xend.XendDevices import XendDevices
from xen.xend.XendTask import XendTask
from xen.xend.xenstore.xstransact import xstransact, complete
from xen.xend.xenstore.xsutil import GetDomainPath, IntroduceDomain, SetTarget, ResumeDomain
from xen.xend.xenstore.xswatch import xswatch
from xen.xend.XendConstants import *
from xen.xend.XendAPIConstants import *
from xen.xend.XendCPUPool import XendCPUPool
from xen.xend.server.DevConstants import xenbusState
from xen.xend.server.BlktapController import TapdiskController
from xen.xend.XendVMMetrics import XendVMMetrics
from xen.xend import XendAPIStore
from xen.xend.XendPPCI import XendPPCI
from xen.xend.XendDPCI import XendDPCI
from xen.xend.XendPSCSI import XendPSCSI
from xen.xend.XendDSCSI import XendDSCSI, XendDSCSI_HBA
MIGRATE_TIMEOUT = 30.0
BOOTLOADER_LOOPBACK_DEVICE = '/dev/xvdp'
xc = xen.lowlevel.xc.xc()
xoptions = XendOptions.instance()
log = logging.getLogger("xend.XendDomainInfo")
#log.setLevel(logging.TRACE)
def create(config):
"""Creates and start a VM using the supplied configuration.
@param config: A configuration object involving lists of tuples.
@type config: list of lists, eg ['vm', ['image', 'xen.gz']]
@rtype: XendDomainInfo
@return: An up and running XendDomainInfo instance
@raise VmError: Invalid configuration or failure to start.
"""
from xen.xend import XendDomain
domconfig = XendConfig.XendConfig(sxp_obj = config)
othervm = XendDomain.instance().domain_lookup_nr(domconfig["name_label"])
if othervm is None or othervm.domid is None:
othervm = XendDomain.instance().domain_lookup_nr(domconfig["uuid"])
if othervm is not None and othervm.domid is not None:
raise VmError("Domain '%s' already exists with ID '%d'" % (domconfig["name_label"], othervm.domid))
log.debug("XendDomainInfo.create(%s)", scrub_password(config))
vm = XendDomainInfo(domconfig)
try:
vm.start()
except:
log.exception('Domain construction failed')
vm.destroy()
raise
return vm
def create_from_dict(config_dict):
"""Creates and start a VM using the supplied configuration.
@param config_dict: An configuration dictionary.
@rtype: XendDomainInfo
@return: An up and running XendDomainInfo instance
@raise VmError: Invalid configuration or failure to start.
"""
log.debug("XendDomainInfo.create_from_dict(%s)",
scrub_password(config_dict))
vm = XendDomainInfo(XendConfig.XendConfig(xapi = config_dict))
try:
vm.start()
except:
log.exception('Domain construction failed')
vm.destroy()
raise
return vm
def recreate(info, priv):
"""Create the VM object for an existing domain. The domain must not
be dying, as the paths in the store should already have been removed,
and asking us to recreate them causes problems.
@param xeninfo: Parsed configuration
@type xeninfo: Dictionary
@param priv: Is a privileged domain (Dom 0)
@type priv: bool
@rtype: XendDomainInfo
@return: A up and running XendDomainInfo instance
@raise VmError: Invalid configuration.
@raise XendError: Errors with configuration.
"""
log.debug("XendDomainInfo.recreate(%s)", scrub_password(info))
assert not info['dying']
xeninfo = XendConfig.XendConfig(dominfo = info)
xeninfo['is_control_domain'] = priv
xeninfo['is_a_template'] = False
xeninfo['auto_power_on'] = False
domid = xeninfo['domid']
uuid1 = uuid.fromString(xeninfo['uuid'])
needs_reinitialising = False
dompath = GetDomainPath(domid)
if not dompath:
raise XendError('No domain path in store for existing '
'domain %d' % domid)
log.info("Recreating domain %d, UUID %s. at %s" %
(domid, xeninfo['uuid'], dompath))
# need to verify the path and uuid if not Domain-0
# if the required uuid and vm aren't set, then that means
# we need to recreate the dom with our own values
#
# NOTE: this is probably not desirable, really we should just
# abort or ignore, but there may be cases where xenstore's
# entry disappears (eg. xenstore-rm /)
#
try:
vmpath = xstransact.Read(dompath, "vm")
if not vmpath:
if not priv:
log.warn('/local/domain/%d/vm is missing. recreate is '
'confused, trying our best to recover' % domid)
needs_reinitialising = True
raise XendError('reinit')
uuid2_str = xstransact.Read(vmpath, "uuid")
if not uuid2_str:
log.warn('%s/uuid/ is missing. recreate is confused, '
'trying our best to recover' % vmpath)
needs_reinitialising = True
raise XendError('reinit')
uuid2 = uuid.fromString(uuid2_str)
if uuid1 != uuid2:
log.warn('UUID in /vm does not match the UUID in /dom/%d.'
'Trying out best to recover' % domid)
needs_reinitialising = True
except XendError:
pass # our best shot at 'goto' in python :)
vm = XendDomainInfo(xeninfo, domid, dompath, augment = True, priv = priv,
vmpath = vmpath)
if needs_reinitialising:
vm._recreateDom()
vm._removeVm()
vm._storeVmDetails()
vm._storeDomDetails()
vm.image = image.create(vm, vm.info)
vm.image.recreate()
vm._registerWatches()
vm.refreshShutdown(xeninfo)
# register the domain in the list
from xen.xend import XendDomain
XendDomain.instance().add_domain(vm)
return vm
def restore(config):
"""Create a domain and a VM object to do a restore.
@param config: Domain SXP configuration
@type config: list of lists. (see C{create})
@rtype: XendDomainInfo
@return: A up and running XendDomainInfo instance
@raise VmError: Invalid configuration or failure to start.
@raise XendError: Errors with configuration.
"""
log.debug("XendDomainInfo.restore(%s)", scrub_password(config))
vm = XendDomainInfo(XendConfig.XendConfig(sxp_obj = config),
resume = True)
try:
vm.resume()
return vm
except:
vm.destroy()
raise
def createDormant(domconfig):
"""Create a dormant/inactive XenDomainInfo without creating VM.
This is for creating instances of persistent domains that are not
yet start.
@param domconfig: Parsed configuration
@type domconfig: XendConfig object
@rtype: XendDomainInfo
@return: A up and running XendDomainInfo instance
@raise XendError: Errors with configuration.
"""
log.debug("XendDomainInfo.createDormant(%s)", scrub_password(domconfig))
# domid does not make sense for non-running domains.
domconfig.pop('domid', None)
vm = XendDomainInfo(domconfig)
return vm
def domain_by_name(name):
"""Get domain by name
@params name: Name of the domain
@type name: string
@return: XendDomainInfo or None
"""
from xen.xend import XendDomain
return XendDomain.instance().domain_lookup_by_name_nr(name)
def shutdown_reason(code):
"""Get a shutdown reason from a code.
@param code: shutdown code
@type code: int
@return: shutdown reason
@rtype: string
"""
return DOMAIN_SHUTDOWN_REASONS.get(code, "?")
def dom_get(dom):
"""Get info from xen for an existing domain.
@param dom: domain id
@type dom: int
@return: info or None
@rtype: dictionary
"""
try:
domlist = xc.domain_getinfo(dom, 1)
if domlist and dom == domlist[0]['domid']:
return domlist[0]
except Exception, err:
# ignore missing domain
log.trace("domain_getinfo(%d) failed, ignoring: %s", dom, str(err))
return None
from xen.xend.server.pciif import parse_pci_name, PciDevice,\
get_assigned_pci_devices, get_all_assigned_pci_devices
def do_FLR(domid, is_hvm):
dev_str_list = get_assigned_pci_devices(domid)
for dev_str in dev_str_list:
try:
dev = PciDevice(parse_pci_name(dev_str))
except Exception, e:
raise VmError("pci: failed to locate device and "+
"parse it's resources - "+str(e))
dev.do_FLR(is_hvm, xoptions.get_pci_dev_assign_strict_check())
class XendDomainInfo:
"""An object represents a domain.
@TODO: try to unify dom and domid, they mean the same thing, but
xc refers to it as dom, and everywhere else, including
xenstore it is domid. The best way is to change xc's
python interface.
@ivar info: Parsed configuration
@type info: dictionary
@ivar domid: Domain ID (if VM has started)
@type domid: int or None
@ivar paused_by_admin: Is this Domain paused by command or API
@type paused_by_admin: bool
@ivar guest_bitsize: the bitsize of guest
@type guest_bitsize: int or None
@ivar alloc_mem: the memory domain allocated when booting
@type alloc_mem: int or None
@ivar vmpath: XenStore path to this VM.
@type vmpath: string
@ivar dompath: XenStore path to this Domain.
@type dompath: string
@ivar image: Reference to the VM Image.
@type image: xen.xend.image.ImageHandler
@ivar store_port: event channel to xenstored
@type store_port: int
@ivar console_port: event channel to xenconsoled
@type console_port: int
@ivar store_mfn: xenstored mfn
@type store_mfn: int
@ivar console_mfn: xenconsoled mfn
@type console_mfn: int
@ivar notes: OS image notes
@type notes: dictionary
@ivar vmWatch: reference to a watch on the xenstored vmpath
@type vmWatch: xen.xend.xenstore.xswatch
@ivar shutdownWatch: reference to watch on the xenstored domain shutdown
@type shutdownWatch: xen.xend.xenstore.xswatch
@ivar shutdownStartTime: UNIX Time when domain started shutting down.
@type shutdownStartTime: float or None
@ivar restart_in_progress: Is a domain restart thread running?
@type restart_in_progress: bool
# @ivar state: Domain state
# @type state: enum(DOM_STATE_HALTED, DOM_STATE_RUNNING, ...)
@ivar state_updated: lock for self.state
@type state_updated: threading.Condition
@ivar refresh_shutdown_lock: lock for polling shutdown state
@type refresh_shutdown_lock: threading.Condition
@ivar _deviceControllers: device controller cache for this domain
@type _deviceControllers: dict 'string' to DevControllers
"""
def __init__(self, info, domid = None, dompath = None, augment = False,
priv = False, resume = False, vmpath = None):
"""Constructor for a domain
@param info: parsed configuration
@type info: dictionary
@keyword domid: Set initial domain id (if any)
@type domid: int
@keyword dompath: Set initial dompath (if any)
@type dompath: string
@keyword augment: Augment given info with xenstored VM info
@type augment: bool
@keyword priv: Is a privileged domain (Dom 0)
@type priv: bool
@keyword resume: Is this domain being resumed?
@type resume: bool
"""
self.info = info
if domid == None:
self.domid = self.info.get('domid')
else:
self.domid = domid
self.guest_bitsize = None
self.alloc_mem = None
self.paused_by_admin = False
maxmem = self.info.get('memory_static_max', 0)
memory = self.info.get('memory_dynamic_max', 0)
if self.info.is_hvm() and maxmem > memory:
self.pod_enabled = True
else:
self.pod_enabled = False
#REMOVE: uuid is now generated in XendConfig
#if not self._infoIsSet('uuid'):
# self.info['uuid'] = uuid.toString(uuid.create())
# Find a unique /vm/<uuid>/<integer> path if not specified.
# This avoids conflict between pre-/post-migrate domains when doing
# localhost relocation.
self.vmpath = vmpath
i = 0
while self.vmpath == None:
self.vmpath = XS_VMROOT + self.info['uuid']
if i != 0:
self.vmpath = self.vmpath + '-' + str(i)
try:
if self._readVm("uuid"):
self.vmpath = None
i = i + 1
except:
pass
self.dompath = dompath
self.image = None
self.store_port = None
self.store_mfn = None
self.console_port = None
self.console_mfn = None
self.native_protocol = None
self.vmWatch = None
self.shutdownWatch = None
self.shutdownStartTime = None
self._resume = resume
self.restart_in_progress = False
self.state_updated = threading.Condition()
self.refresh_shutdown_lock = threading.Condition()
self._stateSet(DOM_STATE_HALTED)
self._deviceControllers = {}
for state in DOM_STATES_OLD:
self.info[state] = 0
if augment:
self._augmentInfo(priv)
self._checkName(self.info['name_label'])
self.metrics = XendVMMetrics(uuid.createString(), self)
#
# Public functions available through XMLRPC
#
def start(self, is_managed = False):
"""Attempts to start the VM by do the appropriate
initialisation if it not started.
"""
from xen.xend import XendDomain
if self._stateGet() in (XEN_API_VM_POWER_STATE_HALTED, XEN_API_VM_POWER_STATE_SUSPENDED, XEN_API_VM_POWER_STATE_CRASHED):
try:
XendTask.log_progress(0, 30, self._constructDomain)
XendTask.log_progress(31, 60, self._initDomain)
XendTask.log_progress(61, 70, self._storeVmDetails)
XendTask.log_progress(71, 80, self._storeDomDetails)
XendTask.log_progress(81, 90, self._registerWatches)
XendTask.log_progress(91, 100, self.refreshShutdown)
xendomains = XendDomain.instance()
# save running configuration if XendDomains believe domain is
# persistent
if is_managed:
xendomains.managed_config_save(self)
except:
log.exception('VM start failed')
self.destroy()
raise
else:
raise XendError('VM already running')
def resume(self):
"""Resumes a domain that has come back from suspension."""
state = self._stateGet()
if state in (DOM_STATE_SUSPENDED, DOM_STATE_HALTED):
try:
self._constructDomain()
try:
self._setCPUAffinity()
except:
# usually a CPU we want to set affinity to does not exist
# we just ignore it so that the domain can still be restored
log.warn("Cannot restore CPU affinity")
self._setSchedParams()
self._storeVmDetails()
self._createChannels()
self._createDevices()
self._storeDomDetails()
self._endRestore()
except:
log.exception('VM resume failed')
self.destroy()
raise
else:
raise XendError('VM is not suspended; it is %s'
% XEN_API_VM_POWER_STATE[state])
def shutdown(self, reason):
"""Shutdown a domain by signalling this via xenstored."""
log.debug('XendDomainInfo.shutdown(%s)', reason)
if self._stateGet() in (DOM_STATE_SHUTDOWN, DOM_STATE_HALTED,):
raise XendError('Domain cannot be shutdown')
if self.domid == 0:
raise XendError('Domain 0 cannot be shutdown')
if reason not in DOMAIN_SHUTDOWN_REASONS.values():
raise XendError('Invalid reason: %s' % reason)
self.storeDom("control/shutdown", reason)
# HVM domain shuts itself down only if it has PV drivers
if self.info.is_hvm():
hvm_pvdrv = xc.hvm_get_param(self.domid, HVM_PARAM_CALLBACK_IRQ)
hvm_s_state = xc.hvm_get_param(self.domid, HVM_PARAM_ACPI_S_STATE)
if not hvm_pvdrv or hvm_s_state != 0:
code = REVERSE_DOMAIN_SHUTDOWN_REASONS[reason]
log.info("HVM save:remote shutdown dom %d!", self.domid)
xc.domain_shutdown(self.domid, code)
def pause(self):
"""Pause domain
@raise XendError: Failed pausing a domain
"""
try:
if(self.domid):
# get all blktap2 devices
dev = xstransact.List(self.vmpath + '/device/tap2')
for x in dev:
path = self.getDeviceController('tap2').readBackend(x, 'params')
if path and path.startswith(TapdiskController.TAP_DEV):
TapdiskController.pause(path)
except Exception, ex:
log.warn('Could not pause blktap disk.');
try:
xc.domain_pause(self.domid)
self._stateSet(DOM_STATE_PAUSED)
except Exception, ex:
log.exception(ex)
raise XendError("Domain unable to be paused: %s" % str(ex))
def unpause(self):
"""Unpause domain
@raise XendError: Failed unpausing a domain
"""
try:
if(self.domid):
dev = xstransact.List(self.vmpath + '/device/tap2')
for x in dev:
path = self.getDeviceController('tap2').readBackend(x, 'params')
if path and path.startswith(TapdiskController.TAP_DEV):
TapdiskController.unpause(path)
except Exception, ex:
log.warn('Could not unpause blktap disk: %s' % str(ex));
try:
xc.domain_unpause(self.domid)
self._stateSet(DOM_STATE_RUNNING)
except Exception, ex:
log.exception(ex)
raise XendError("Domain unable to be unpaused: %s" % str(ex))
def send_sysrq(self, key):
""" Send a Sysrq equivalent key via xenstored."""
if self._stateGet() not in (DOM_STATE_RUNNING, DOM_STATE_PAUSED):
raise XendError("Domain '%s' is not started" % self.info['name_label'])
asserts.isCharConvertible(key)
self.storeDom("control/sysrq", '%c' % key)
def pci_device_configure_boot(self):
if not self.info.is_hvm():
return
devid = '0'
first = True
dev_info = self._getDeviceInfo_pci(devid)
if dev_info is None:
return
# get the virtual slot info from xenstore
dev_uuid = sxp.child_value(dev_info, 'uuid')
pci_conf = self.info['devices'][dev_uuid][1]
pci_devs = pci_conf['devs']
# Keep a set of keys that are done rather than
# just itterating through set(map(..., pci_devs))
# to preserve any order information present.
done = set()
for key in map(lambda x: x['key'], pci_devs):
if key in done:
continue
done |= set([key])
dev = filter(lambda x: x['key'] == key, pci_devs)
head_dev = dev.pop()
dev_sxp = pci_convert_dict_to_sxp(head_dev, 'Initialising',
'Booting')
self.pci_device_configure(dev_sxp, first_dev = first)
first = False
# That is all for single-function virtual devices
if len(dev) == 0:
continue
if int(head_dev['vdevfn'], 16) & AUTO_PHP_SLOT:
new_dev_info = self._getDeviceInfo_pci(devid)
if new_dev_info is None:
continue
new_dev_uuid = sxp.child_value(new_dev_info, 'uuid')
new_pci_conf = self.info['devices'][new_dev_uuid][1]
new_pci_devs = new_pci_conf['devs']
new_head_dev = filter(lambda x: pci_dict_cmp(x, head_dev),
new_pci_devs)[0]
if int(new_head_dev['vdevfn'], 16) & AUTO_PHP_SLOT:
continue
vdevfn = PCI_SLOT(int(new_head_dev['vdevfn'], 16))
new_dev = []
for i in dev:
i['vdevfn'] = '0x%02x' % \
PCI_DEVFN(vdevfn,
PCI_FUNC(int(i['vdevfn'], 16)))
new_dev.append(i)
dev = new_dev
for i in dev:
dev_sxp = pci_convert_dict_to_sxp(i, 'Initialising', 'Booting')
self.pci_device_configure(dev_sxp)
def hvm_pci_device_create(self, dev_config):
log.debug("XendDomainInfo.hvm_pci_device_create: %s"
% scrub_password(dev_config))
if not self.info.is_hvm():
raise VmError("hvm_pci_device_create called on non-HVM guest")
#all the PCI devs share one conf node
devid = '0'
new_dev = dev_config['devs'][0]
dev_info = self._getDeviceInfo_pci(devid)#from self.info['devices']
#check conflict before trigger hotplug event
if dev_info is not None:
dev_uuid = sxp.child_value(dev_info, 'uuid')
pci_conf = self.info['devices'][dev_uuid][1]
pci_devs = pci_conf['devs']
for x in pci_devs:
if (int(x['vdevfn'], 16) == int(new_dev['vdevfn'], 16) and
not int(x['vdevfn'], 16) & AUTO_PHP_SLOT):
raise VmError("vdevfn %s already have a device." %
(new_dev['vdevfn']))
if (pci_dict_cmp(x, new_dev)):
raise VmError("device is already inserted")
# Test whether the devices can be assigned.
self.pci_dev_check_attachability_and_do_FLR(new_dev)
return self.hvm_pci_device_insert_dev(new_dev)
def iommu_check_pod_mode(self):
""" Disallow PCI device assignment if pod is enabled. """
if self.pod_enabled:
raise VmError("failed to assign device since pod is enabled")
def pci_dev_check_assignability_and_do_FLR(self, config):
""" In the case of static device assignment(i.e., the 'pci' string in
guest config file), we check if the device(s) specified in the 'pci'
can be assigned to guest or not; if yes, we do_FLR the device(s).
"""
self.iommu_check_pod_mode()
pci_dev_ctrl = self.getDeviceController('pci')
return pci_dev_ctrl.dev_check_assignability_and_do_FLR(config)
def pci_dev_check_attachability_and_do_FLR(self, new_dev):
""" In the case of dynamic device assignment(i.e., xm pci-attach), we
check if the device can be attached to guest or not; if yes, we do_FLR
the device.
"""
self.iommu_check_pod_mode()
# Test whether the devices can be assigned
pci_name = pci_dict_to_bdf_str(new_dev)
_all_assigned_pci_devices = get_all_assigned_pci_devices(self.domid)
if pci_name in _all_assigned_pci_devices:
raise VmError("failed to assign device %s that has"
" already been assigned to other domain." % pci_name)
# Test whether the device is owned by pciback or pci-stub.
try:
pci_device = PciDevice(new_dev)
except Exception, e:
raise VmError("pci: failed to locate device and "+
"parse its resources - "+str(e))
if pci_device.driver!='pciback' and pci_device.driver!='pci-stub':
raise VmError(("pci: PCI Backend and pci-stub don't own device %s")\
%pci_device.name)
strict_check = xoptions.get_pci_dev_assign_strict_check()
# Check non-page-aligned MMIO BAR.
if pci_device.has_non_page_aligned_bar and strict_check:
raise VmError("pci: %s: non-page-aligned MMIO BAR found." % \
pci_device.name)
# PV guest has less checkings.
if not self.info.is_hvm():
# try to do FLR for PV guest
pci_device.do_FLR(self.info.is_hvm(), strict_check)
return
if not strict_check:
return
# Check if there is intermediate PCIe switch bewteen the device and
# Root Complex.
if pci_device.is_behind_switch_lacking_acs():
err_msg = 'pci: to avoid potential security issue, %s is not'+\
' allowed to be assigned to guest since it is behind'+\
' PCIe switch that does not support or enable ACS.'
raise VmError(err_msg % pci_device.name)
# Check the co-assignment.
# To pci-attach a device D to domN, we should ensure each of D's
# co-assignment devices hasn't been assigned, or has been assigned to
# domN.
coassignment_list = pci_device.find_coassigned_devices()
pci_device.devs_check_driver(coassignment_list)
assigned_pci_device_str_list = self._get_assigned_pci_devices()
for pci_str in coassignment_list:
if not (pci_str in _all_assigned_pci_devices):
continue
if not pci_str in assigned_pci_device_str_list:
raise VmError(("pci: failed to pci-attach %s to domain %s" + \
" because one of its co-assignment device %s has been" + \
" assigned to other domain." \
)% (pci_device.name, self.info['name_label'], pci_str))
# try to do FLR for HVM guest
pci_device.do_FLR(self.info.is_hvm(), strict_check)
def hvm_pci_device_insert(self, dev_config):
log.debug("XendDomainInfo.hvm_pci_device_insert: %s"
% scrub_password(dev_config))
if not self.info.is_hvm():
raise VmError("hvm_pci_device_create called on non-HVM guest")
new_dev = dev_config['devs'][0]
return self.hvm_pci_device_insert_dev(new_dev)
def hvm_pci_device_insert_dev(self, new_dev):
log.debug("XendDomainInfo.hvm_pci_device_insert_dev: %s"
% scrub_password(new_dev))
if self.domid is not None:
opts = ''
optslist = []
pci_defopts = []
if 'pci_msitranslate' in self.info['platform']:
pci_defopts.append(['msitranslate',
str(self.info['platform']['pci_msitranslate'])])
if 'pci_power_mgmt' in self.info['platform']:
pci_defopts.append(['power_mgmt',
str(self.info['platform']['pci_power_mgmt'])])
if new_dev.has_key('opts'):
optslist += new_dev['opts']
if optslist or pci_defopts:
opts = ',' + serialise_pci_opts(
append_default_pci_opts(optslist, pci_defopts))
bdf_str = "%s@%02x%s" % (pci_dict_to_bdf_str(new_dev),
int(new_dev['vdevfn'], 16), opts)
log.debug("XendDomainInfo.hvm_pci_device_insert_dev: %s" % bdf_str)
bdf = xc.assign_device(self.domid, pci_dict_to_xc_str(new_dev))
if bdf > 0:
raise VmError("Failed to assign device to IOMMU (%s)" % bdf_str)
log.debug("pci: assign device %s" % bdf_str)
self.image.signalDeviceModel('pci-ins', 'pci-inserted', bdf_str)
vdevfn = xstransact.Read("/local/domain/0/device-model/%i/parameter"
% self.getDomid())
try:
vdevfn_int = int(vdevfn, 16)
except ValueError:
raise VmError(("Cannot pass-through PCI function '%s'. " +
"Device model reported an error: %s") %
(bdf_str, vdevfn))
else:
vdevfn = new_dev['vdevfn']
return vdevfn
def device_create(self, dev_config):
"""Create a new device.
@param dev_config: device configuration
@type dev_config: SXP object (parsed config)
"""
log.debug("XendDomainInfo.device_create: %s" % scrub_password(dev_config))
dev_type = sxp.name(dev_config)
if dev_type == 'vif':
for x in dev_config:
if x != 'vif' and x[0] == 'mac':
if not re.match('^([0-9a-f]{2}:){5}[0-9a-f]{2}$', x[1], re.I):
log.error("Virtual network interface creation error - invalid MAC Address entered: %s", x[1])
raise VmError("Cannot create a new virtual network interface - MAC address is not valid!");
dev_uuid = self.info.device_add(dev_type, cfg_sxp = dev_config)
dev_config_dict = self.info['devices'][dev_uuid][1]
log.debug("XendDomainInfo.device_create: %s" % scrub_password(dev_config_dict))
if self.domid is not None:
try:
dev_config_dict['devid'] = devid = \
self._createDevice(dev_type, dev_config_dict)
if dev_type == 'tap2':
# createDevice may create a blktap1 device if blktap2 is not
# installed or if the blktap driver is not supported in
# blktap1
dev_type = self.getBlockDeviceClass(devid)
self._waitForDevice(dev_type, devid)
except VmError, ex:
del self.info['devices'][dev_uuid]
if dev_type == 'pci':
for dev in dev_config_dict['devs']:
XendAPIStore.deregister(dev['uuid'], 'DPCI')
elif dev_type == 'vscsi':
for dev in dev_config_dict['devs']:
XendAPIStore.deregister(dev['uuid'], 'DSCSI')
elif dev_type == 'tap' or dev_type == 'tap2':
self.info['vbd_refs'].remove(dev_uuid)
else:
self.info['%s_refs' % dev_type].remove(dev_uuid)
raise ex
else:
devid = None
xen.xend.XendDomain.instance().managed_config_save(self)
return self.getDeviceController(dev_type).sxpr(devid)
def pci_device_configure(self, dev_sxp, devid = 0, first_dev = False):
"""Configure an existing pci device.
@param dev_sxp: device configuration
@type dev_sxp: SXP object (parsed config)
@param devid: device id
@type devid: int
@return: Returns True if successfully updated device
@rtype: boolean
"""
log.debug("XendDomainInfo.pci_device_configure: %s"
% scrub_password(dev_sxp))
dev_class = sxp.name(dev_sxp)
if dev_class != 'pci':
return False
pci_state = sxp.child_value(dev_sxp, 'state')
pci_sub_state = sxp.child_value(dev_sxp, 'sub_state')
existing_dev_info = self._getDeviceInfo_pci(devid)
if existing_dev_info is None and pci_state != 'Initialising':
raise XendError("Cannot detach when pci platform does not exist")
pci_dev = sxp.children(dev_sxp, 'dev')[0]
dev_config = pci_convert_sxp_to_dict(dev_sxp)
dev = dev_config['devs'][0]
stubdomid = self.getStubdomDomid()
# Do HVM specific processing
if self.info.is_hvm():
from xen.xend import XendDomain
if pci_state == 'Initialising':
if stubdomid is not None :
XendDomain.instance().domain_lookup(stubdomid).pci_device_configure(dev_sxp[:])
# HVM PCI device attachment
if pci_sub_state == 'Booting':
vdevfn = self.hvm_pci_device_insert(dev_config)
else:
vdevfn = self.hvm_pci_device_create(dev_config)
# Update vdevfn
dev['vdevfn'] = vdevfn
for n in sxp.children(pci_dev):
if(n[0] == 'vdevfn'):
n[1] = vdevfn
else:
# HVM PCI device detachment
existing_dev_uuid = sxp.child_value(existing_dev_info, 'uuid')
existing_pci_conf = self.info['devices'][existing_dev_uuid][1]
existing_pci_devs = existing_pci_conf['devs']
new_devs = filter(lambda x: pci_dict_cmp(x, dev),
existing_pci_devs)
if len(new_devs) < 0:
raise VmError("Device %s is not connected" %
pci_dict_to_bdf_str(dev))
new_dev = new_devs[0]
# Only tell qemu-dm to unplug function 0.
# When unplugging a function, all functions in the
# same vslot must be unplugged, and function 0 must
# be one of the functions present when a vslot is
# hot-plugged. Telling qemu-dm to unplug function 0
# also tells it to unplug all other functions in the
# same vslot.
if (PCI_FUNC(int(new_dev['vdevfn'], 16)) == 0):
self.hvm_destroyPCIDevice(new_dev)
if stubdomid is not None :
XendDomain.instance().domain_lookup(stubdomid).pci_device_configure(dev_sxp[:])
# Update vdevfn
dev['vdevfn'] = new_dev['vdevfn']
for n in sxp.children(pci_dev):
if(n[0] == 'vdevfn'):
n[1] = new_dev['vdevfn']
else:
# Do PV specific checking
if pci_state == 'Initialising':
# PV PCI device attachment
self.pci_dev_check_attachability_and_do_FLR(dev)
# If pci platform does not exist, create and exit.
if existing_dev_info is None :
self.device_create(dev_sxp)
return True
if first_dev is True :
existing_dev_uuid = sxp.child_value(existing_dev_info, 'uuid')
existing_pci_conf = self.info['devices'][existing_dev_uuid][1]
devid = self._createDevice('pci', existing_pci_conf)
self.info['devices'][existing_dev_uuid][1]['devid'] = devid
if self.domid is not None:
# use DevController.reconfigureDevice to change device config
dev_control = self.getDeviceController(dev_class)
dev_uuid = dev_control.reconfigureDevice(devid, dev_config)
if not self.info.is_hvm() and not self.info.is_stubdom():
# in PV case, wait until backend state becomes connected.
dev_control.waitForDevice_reconfigure(devid)
num_devs = dev_control.cleanupDevice(devid)
# update XendConfig with new device info
if dev_uuid:
new_dev_sxp = dev_control.configuration(devid)
self.info.device_update(dev_uuid, new_dev_sxp)
# If there is no device left, destroy pci and remove config.
if num_devs == 0:
if self.info.is_hvm():
self.destroyDevice('pci', devid, True)
else:
self.destroyDevice('pci', devid)
del self.info['devices'][dev_uuid]
else:
new_dev_sxp = ['pci']
for cur_dev in sxp.children(existing_dev_info, 'dev'):
if pci_state == 'Closing':
if int(dev['domain'], 16) == int(sxp.child_value(cur_dev, 'domain'), 16) and \
int(dev['bus'], 16) == int(sxp.child_value(cur_dev, 'bus'), 16) and \
int(dev['slot'], 16) == int(sxp.child_value(cur_dev, 'slot'), 16) and \
int(dev['func'], 16) == int(sxp.child_value(cur_dev, 'func'), 16):
continue
new_dev_sxp.append(cur_dev)
if pci_state == 'Initialising' and pci_sub_state != 'Booting':
for new_dev in sxp.children(dev_sxp, 'dev'):
new_dev_sxp.append(new_dev)
dev_uuid = sxp.child_value(existing_dev_info, 'uuid')
self.info.device_update(dev_uuid, new_dev_sxp)
# If there is no device left, remove config.
if len(sxp.children(new_dev_sxp, 'dev')) == 0:
del self.info['devices'][dev_uuid]
xen.xend.XendDomain.instance().managed_config_save(self)
return True
def vscsi_device_configure(self, dev_sxp):
"""Configure an existing vscsi device.
quoted pci funciton
"""
def _is_vscsi_defined(dev_info, p_devs = None, v_devs = None):
if not dev_info:
return False
for dev in sxp.children(dev_info, 'dev'):
if p_devs is not None:
if sxp.child_value(dev, 'p-dev') in p_devs:
return True
if v_devs is not None:
if sxp.child_value(dev, 'v-dev') in v_devs:
return True
return False
def _vscsi_be(be):
be_xdi = xen.xend.XendDomain.instance().domain_lookup_nr(be)
if be_xdi is not None:
be_domid = be_xdi.getDomid()
if be_domid is not None:
return str(be_domid)
return str(be)
dev_class = sxp.name(dev_sxp)
if dev_class != 'vscsi':
return False
dev_config = self.info.vscsi_convert_sxp_to_dict(dev_sxp)
devs = dev_config['devs']
v_devs = [d['v-dev'] for d in devs]
state = devs[0]['state']
req_devid = int(devs[0]['devid'])
cur_dev_sxp = self._getDeviceInfo_vscsi(req_devid)
if state == xenbusState['Initialising']:
# new create
# If request devid does not exist, create and exit.
p_devs = [d['p-dev'] for d in devs]
for dev_type, dev_info in self.info.all_devices_sxpr():
if dev_type != 'vscsi':
continue
if _is_vscsi_defined(dev_info, p_devs = p_devs):
raise XendError('The physical device "%s" is already defined' % \
p_devs[0])
if cur_dev_sxp is None:
self.device_create(dev_sxp)
return True
if _is_vscsi_defined(cur_dev_sxp, v_devs = v_devs):
raise XendError('The virtual device "%s" is already defined' % \
v_devs[0])
if int(dev_config['feature-host']) != \
int(sxp.child_value(cur_dev_sxp, 'feature-host')):
raise XendError('The physical device "%s" cannot define '
'because mode is different' % devs[0]['p-dev'])
new_be = dev_config.get('backend', None)
if new_be is not None:
cur_be = sxp.child_value(cur_dev_sxp, 'backend', None)
if cur_be is None:
cur_be = xen.xend.XendDomain.DOM0_ID
new_be_dom = _vscsi_be(new_be)
cur_be_dom = _vscsi_be(cur_be)
if new_be_dom != cur_be_dom:
raise XendError('The physical device "%s" cannot define '
'because backend is different' % devs[0]['p-dev'])
elif state == xenbusState['Closing']:
if not _is_vscsi_defined(cur_dev_sxp, v_devs = v_devs):
raise XendError("Cannot detach vscsi device does not exist")
if self.domid is not None:
# use DevController.reconfigureDevice to change device config
dev_control = self.getDeviceController(dev_class)
dev_uuid = dev_control.reconfigureDevice(req_devid, dev_config)
dev_control.waitForDevice_reconfigure(req_devid)
num_devs = dev_control.cleanupDevice(req_devid)
# update XendConfig with new device info
if dev_uuid:
new_dev_sxp = dev_control.configuration(req_devid)
self.info.device_update(dev_uuid, new_dev_sxp)
# If there is no device left, destroy vscsi and remove config.
if num_devs == 0:
self.destroyDevice('vscsi', req_devid)
del self.info['devices'][dev_uuid]
else:
new_dev_sxp = ['vscsi']
cur_mode = sxp.children(cur_dev_sxp, 'feature-host')[0]
new_dev_sxp.append(cur_mode)
try:
cur_be = sxp.children(cur_dev_sxp, 'backend')[0]
new_dev_sxp.append(cur_be)
except IndexError:
pass
for cur_dev in sxp.children(cur_dev_sxp, 'dev'):
if state == xenbusState['Closing']:
if int(cur_mode[1]) == 1:
continue
if sxp.child_value(cur_dev, 'v-dev') in v_devs:
continue
new_dev_sxp.append(cur_dev)
if state == xenbusState['Initialising']:
for new_dev in sxp.children(dev_sxp, 'dev'):
new_dev_sxp.append(new_dev)
dev_uuid = sxp.child_value(cur_dev_sxp, 'uuid')
self.info.device_update(dev_uuid, new_dev_sxp)
# If there is only 'vscsi' in new_dev_sxp, remove the config.
if len(sxp.children(new_dev_sxp, 'dev')) == 0:
del self.info['devices'][dev_uuid]
xen.xend.XendDomain.instance().managed_config_save(self)
return True
def vusb_device_configure(self, dev_sxp, devid):
"""Configure a virtual root port.
"""
dev_class = sxp.name(dev_sxp)
if dev_class != 'vusb':
return False
dev_config = {}
ports = sxp.child(dev_sxp, 'port')
for port in ports[1:]:
try:
num, bus = port
dev_config['port-%i' % int(num)] = str(bus)
except TypeError:
pass
dev_control = self.getDeviceController(dev_class)
dev_control.reconfigureDevice(devid, dev_config)
return True
def device_configure(self, dev_sxp, devid = None):
"""Configure an existing device.
@param dev_config: device configuration
@type dev_config: SXP object (parsed config)
@param devid: device id
@type devid: int
@return: Returns True if successfully updated device
@rtype: boolean
"""
# convert device sxp to a dict
dev_class = sxp.name(dev_sxp)
dev_config = {}
if dev_class == 'pci':
return self.pci_device_configure(dev_sxp)
if dev_class == 'vscsi':
return self.vscsi_device_configure(dev_sxp)
if dev_class == 'vusb':
return self.vusb_device_configure(dev_sxp, devid)
for opt_val in dev_sxp[1:]:
try:
dev_config[opt_val[0]] = opt_val[1]
except IndexError:
pass
dev_control = self.getDeviceController(dev_class)
if devid is None:
dev = dev_config.get('dev', '')
if not dev:
raise VmError('Block device must have virtual details specified')
if 'ioemu:' in dev:
(_, dev) = dev.split(':', 1)
try:
(dev, _) = dev.split(':', 1) # Remove ":disk" or ":cdrom"
except ValueError:
pass
devid = dev_control.convertToDeviceNumber(dev)
dev_info = self._getDeviceInfo_vbd(devid)
if dev_info is None:
raise VmError("Device %s not connected" % devid)
dev_uuid = sxp.child_value(dev_info, 'uuid')
if self.domid is not None:
# use DevController.reconfigureDevice to change device config
dev_control.reconfigureDevice(devid, dev_config)
else:
(_, new_b, new_f) = dev_control.getDeviceDetails(dev_config)
if (new_f['device-type'] == 'cdrom' and
sxp.child_value(dev_info, 'dev').endswith(':cdrom') and
new_b['mode'] == 'r' and
sxp.child_value(dev_info, 'mode') == 'r'):
pass
else:
raise VmError('Refusing to reconfigure device %s:%d to %s' %
(dev_class, devid, dev_config))
# update XendConfig with new device info
self.info.device_update(dev_uuid, dev_sxp)
xen.xend.XendDomain.instance().managed_config_save(self)
return True
def waitForDevices(self):
"""Wait for this domain's configured devices to connect.
@raise VmError: if any device fails to initialise.
"""
for devclass in XendDevices.valid_devices():
self.getDeviceController(devclass).waitForDevices()
def hvm_destroyPCIDevice(self, pci_dev):
log.debug("hvm_destroyPCIDevice: %s", pci_dev)
if not self.info.is_hvm():
raise VmError("hvm_destroyPCIDevice called on non-HVM guest")
# Check the co-assignment.
# To pci-detach a device D from domN, we should ensure: for each DD in the
# list of D's co-assignment devices, DD is not assigned (to domN).
#
from xen.xend.server.pciif import PciDevice
try:
pci_device = PciDevice(pci_dev)
except Exception, e:
raise VmError("pci: failed to locate device and "+
"parse its resources - "+str(e))
coassignment_list = pci_device.find_coassigned_devices()
coassignment_list.remove(pci_device.name)
assigned_pci_device_str_list = self._get_assigned_pci_devices()
for pci_str in coassignment_list:
if xoptions.get_pci_dev_assign_strict_check() and \
pci_str in assigned_pci_device_str_list:
raise VmError(("pci: failed to pci-detach %s from domain %s" + \
" because one of its co-assignment device %s is still " + \
" assigned to the domain." \
)% (pci_device.name, self.info['name_label'], pci_str))
bdf_str = pci_dict_to_bdf_str(pci_dev)
log.info("hvm_destroyPCIDevice:%s:%s!", pci_dev, bdf_str)
if self.domid is not None:
self.image.signalDeviceModel('pci-rem', 'pci-removed', bdf_str)
return 0
def destroyDevice(self, deviceClass, devid, force = False, rm_cfg = False):
log.debug("XendDomainInfo.destroyDevice: deviceClass = %s, device = %s",
deviceClass, devid)
if rm_cfg:
# Convert devid to device number. A device number is
# needed to remove its configuration.
dev = self.getDeviceController(deviceClass).convertToDeviceNumber(devid)
# Save current sxprs. A device number and a backend
# path are needed to remove its configuration but sxprs
# do not have those after calling destroyDevice.
sxprs = self.getDeviceSxprs(deviceClass)
rc = None
if self.domid is not None:
#new blktap implementation may need a sysfs write after everything is torn down.
if deviceClass == 'tap2':
dev = self.getDeviceController(deviceClass).convertToDeviceNumber(devid)
path = self.getDeviceController(deviceClass).readBackend(dev, 'params')
frontpath = self.getDeviceController(deviceClass).frontendPath(dev)
backpath = xstransact.Read(frontpath, "backend")
thread.start_new_thread(self.getDeviceController(deviceClass).finishDeviceCleanup, (backpath, path))
rc = self.getDeviceController(deviceClass).destroyDevice(devid, force)
if not force and rm_cfg:
# The backend path, other than the device itself,
# has to be passed because its accompanied frontend
# path may be void until its removal is actually
# issued. It is probable because destroyDevice is
# issued first.
for dev_num, dev_info in sxprs:
dev_num = int(dev_num)
if dev_num == dev:
for x in dev_info:
if x[0] == 'backend':
backend = x[1]
break
break
self._waitForDevice_destroy(deviceClass, devid, backend)
if rm_cfg and deviceClass != "vif2":
if deviceClass == 'vif':
if self.domid is not None:
mac = ''
for dev_num, dev_info in sxprs:
dev_num = int(dev_num)
if dev_num == dev:
for x in dev_info:
if x[0] == 'mac':
mac = x[1]
break
break
dev_info = self._getDeviceInfo_vif(mac)
else:
_, dev_info = sxprs[dev]
else: # 'vbd' or 'tap' or 'tap2'
dev_info = self._getDeviceInfo_vbd(dev)
# To remove the UUID of the device from refs,
# deviceClass must be always 'vbd'.
deviceClass = 'vbd'
if dev_info is None:
raise XendError("Device %s is not defined" % devid)
dev_uuid = sxp.child_value(dev_info, 'uuid')
del self.info['devices'][dev_uuid]
self.info['%s_refs' % deviceClass].remove(dev_uuid)
xen.xend.XendDomain.instance().managed_config_save(self)
return rc
def getDeviceSxprs(self, deviceClass):
if deviceClass == 'pci':
dev_info = self._getDeviceInfo_pci('0')#from self.info['devices']
if dev_info is None:
return []
dev_uuid = sxp.child_value(dev_info, 'uuid')
pci_devs = self.info['devices'][dev_uuid][1]['devs']
return pci_devs
if self._stateGet() in (DOM_STATE_RUNNING, DOM_STATE_PAUSED, DOM_STATE_CRASHED):
return self.getDeviceController(deviceClass).sxprs()
else:
sxprs = []
dev_num = 0
for dev_type, dev_info in self.info.all_devices_sxpr():
if (deviceClass == 'vbd' and dev_type not in ['vbd', 'tap', 'tap2']) or \
(deviceClass != 'vbd' and dev_type != deviceClass):
continue
if deviceClass == 'vscsi':
vscsi_devs = ['devs', []]
for vscsi_dev in sxp.children(dev_info, 'dev'):
vscsi_dev.append(['frontstate', None])
vscsi_devs[1].append(vscsi_dev)
dev_num = int(sxp.child_value(vscsi_dev, 'devid'))
vscsi_mode = sxp.children(dev_info, 'feature-host')[0]
sxprs.append([dev_num, [vscsi_devs, vscsi_mode]])
elif deviceClass == 'vbd':
dev = sxp.child_value(dev_info, 'dev')
if 'ioemu:' in dev:
(_, dev) = dev.split(':', 1)
try:
(dev_name, _) = dev.split(':', 1) # Remove ":disk" or ":cdrom"
except ValueError:
dev_name = dev
dev_num = self.getDeviceController('vbd').convertToDeviceNumber(dev_name)
sxprs.append([dev_num, dev_info])
else:
sxprs.append([dev_num, dev_info])
dev_num += 1
return sxprs
def getBlockDeviceClass(self, devid):
# if the domain is running we can get the device class from xenstore.
# This is more accurate, as blktap1 devices show up as blktap2 devices
# in the config.
if self._stateGet() in (DOM_STATE_RUNNING, DOM_STATE_PAUSED, DOM_STATE_CRASHED):
# All block devices have a vbd frontend, so we know the frontend path
dev = self.getDeviceController('vbd').convertToDeviceNumber(devid)
frontendPath = "%s/device/vbd/%s" % (self.dompath, dev)
for devclass in XendDevices.valid_devices():
for dev in xstransact.List("%s/device/%s" % (self.vmpath, devclass)):
devFrontendPath = xstransact.Read("%s/device/%s/%s/frontend" % (self.vmpath, devclass, dev))
if frontendPath == devFrontendPath:
return devclass
else: # the domain is not active so we must get the device class
# from the config
# To get a device number from the devid,
# we temporarily use the device controller of VBD.
dev = self.getDeviceController('vbd').convertToDeviceNumber(devid)
dev_info = self._getDeviceInfo_vbd(dev)
if dev_info:
return dev_info[0]
def _getDeviceInfo_vif(self, mac):
for dev_type, dev_info in self.info.all_devices_sxpr():
if dev_type != 'vif':
continue
if mac == sxp.child_value(dev_info, 'mac'):
return dev_info
def _getDeviceInfo_vbd(self, devid):
for dev_type, dev_info in self.info.all_devices_sxpr():
if dev_type != 'vbd' and dev_type != 'tap' and dev_type != 'tap2':
continue
dev = sxp.child_value(dev_info, 'dev')
dev = dev.split(':')[0]
dev = self.getDeviceController(dev_type).convertToDeviceNumber(dev)
if devid == dev:
return dev_info
def _getDeviceInfo_pci(self, devid):
for dev_type, dev_info in self.info.all_devices_sxpr():
if dev_type != 'pci':
continue
return dev_info
return None
def _getDeviceInfo_vscsi(self, devid):
devid = int(devid)
for dev_type, dev_info in self.info.all_devices_sxpr():
if dev_type != 'vscsi':
continue
devs = sxp.children(dev_info, 'dev')
if devid == int(sxp.child_value(devs[0], 'devid')):
return dev_info
return None
def _getDeviceInfo_vusb(self, devid):
for dev_type, dev_info in self.info.all_devices_sxpr():
if dev_type != 'vusb':
continue
return dev_info
return None
def _get_assigned_pci_devices(self, devid = 0):
if self.domid is not None:
return get_assigned_pci_devices(self.domid)
dev_info = self._getDeviceInfo_pci(devid)
if dev_info is None:
return []
dev_uuid = sxp.child_value(dev_info, 'uuid')
pci_conf = self.info['devices'][dev_uuid][1]
return map(pci_dict_to_bdf_str, pci_conf['devs'])
def setMemoryTarget(self, target):
"""Set the memory target of this domain.
@param target: In MiB.
"""
log.debug("Setting memory target of domain %s (%s) to %d MiB.",
self.info['name_label'], str(self.domid), target)
MiB = 1024 * 1024
memory_cur = self.get_memory_dynamic_max() / MiB
if self.domid == 0:
dom0_min_mem = xoptions.get_dom0_min_mem()
if target < memory_cur and dom0_min_mem > target:
raise XendError("memory_dynamic_max too small")
self._safe_set_memory('memory_dynamic_min', target * MiB)
self._safe_set_memory('memory_dynamic_max', target * MiB)
if self.domid >= 0:
if target > memory_cur:
balloon.free((target - memory_cur) * 1024, self)
self.storeVm("memory", target)
self.storeDom("memory/target", target << 10)
xc.domain_set_target_mem(self.domid,
(target * 1024))
xen.xend.XendDomain.instance().managed_config_save(self)
def setMemoryMaximum(self, limit):
"""Set the maximum memory limit of this domain
@param limit: In MiB.
"""
log.debug("Setting memory maximum of domain %s (%s) to %d MiB.",
self.info['name_label'], str(self.domid), limit)
maxmem_cur = self.get_memory_static_max()
MiB = 1024 * 1024
self._safe_set_memory('memory_static_max', limit * MiB)
if self.domid >= 0:
maxmem = int(limit) * 1024
try:
return xc.domain_setmaxmem(self.domid, maxmem)
except Exception, ex:
self._safe_set_memory('memory_static_max', maxmem_cur)
raise XendError(str(ex))
xen.xend.XendDomain.instance().managed_config_save(self)
def getVCPUInfo(self):
try:
# We include the domain name and ID, to help xm.
sxpr = ['domain',
['domid', self.domid],
['name', self.info['name_label']],
['vcpu_count', self.info['VCPUs_max']]]
for i in range(0, self.info['VCPUs_max']):
if self.domid is not None:
info = xc.vcpu_getinfo(self.domid, i)
sxpr.append(['vcpu',
['number', i],
['online', info['online']],
['blocked', info['blocked']],
['running', info['running']],
['cpu_time', info['cpu_time'] / 1e9],
['cpu', info['cpu']],
['cpumap', info['cpumap']]])
else:
sxpr.append(['vcpu',
['number', i],
['online', 0],
['blocked', 0],
['running', 0],
['cpu_time', 0.0],
['cpu', -1],
['cpumap', self.info['cpus'][i] and \
self.info['cpus'][i] or range(64)]])
return sxpr
except RuntimeError, exn:
raise XendError(str(exn))
def getDomInfo(self):
return dom_get(self.domid)
#
# internal functions ... TODO: re-categorised
#
def _augmentInfo(self, priv):
"""Augment self.info, as given to us through L{recreate}, with
values taken from the store. This recovers those values known
to xend but not to the hypervisor.
"""
augment_entries = XendConfig.LEGACY_XENSTORE_VM_PARAMS[:]
if priv:
augment_entries.remove('memory')
augment_entries.remove('maxmem')
augment_entries.remove('vcpus')
augment_entries.remove('vcpu_avail')
vm_config = self._readVMDetails([(k, XendConfig.LEGACY_CFG_TYPES[k])
for k in augment_entries])
# make returned lists into a dictionary
vm_config = dict(zip(augment_entries, vm_config))
for arg in augment_entries:
val = vm_config[arg]
if val != None:
if arg in XendConfig.LEGACY_CFG_TO_XENAPI_CFG:
xapiarg = XendConfig.LEGACY_CFG_TO_XENAPI_CFG[arg]
self.info[xapiarg] = val
elif arg == "memory":
self.info["static_memory_min"] = val
elif arg == "maxmem":
self.info["static_memory_max"] = val
else:
self.info[arg] = val
# read CPU Affinity
self.info['cpus'] = []
vcpus_info = self.getVCPUInfo()
for vcpu_info in sxp.children(vcpus_info, 'vcpu'):
self.info['cpus'].append(sxp.child_value(vcpu_info, 'cpumap'))
# For dom0, we ignore any stored value for the vcpus fields, and
# read the current value from Xen instead. This allows boot-time
# settings to take precedence over any entries in the store.
if priv:
xeninfo = dom_get(self.domid)
self.info['VCPUs_max'] = xeninfo['online_vcpus']
self.info['vcpu_avail'] = (1 << xeninfo['online_vcpus']) - 1
# read image value
image_sxp = self._readVm('image')
if image_sxp:
self.info.update_with_image_sxp(sxp.from_string(image_sxp))
# read devices
devices = []
for devclass in XendDevices.valid_devices():
devconfig = self.getDeviceController(devclass).configurations()
if devconfig:
devices.extend(devconfig)
if not self.info['devices'] and devices is not None:
for device in devices:
self.info.device_add(device[0], cfg_sxp = device)
self._update_consoles()
def _update_consoles(self, transaction = None):
if self.domid == None or self.domid == 0:
return
# Update VT100 port if it exists
if transaction is None:
self.console_port = self.readDom('console/port')
else:
self.console_port = self.readDomTxn(transaction, 'console/port')
if self.console_port is not None:
serial_consoles = self.info.console_get_all('vt100')
if not serial_consoles:
cfg = self.info.console_add('vt100', self.console_port)
self._createDevice('console', cfg)
else:
console_uuid = serial_consoles[0].get('uuid')
self.info.console_update(console_uuid, 'location',
self.console_port)
# Notify xenpv device model that console info is ready
if not self.info.is_hvm() and self.info.has_rfb():
console_ctrl = self.getDeviceController('console')
# The value is unchanged. Just for xenstore watcher
console_ctrl.writeBackend(0, 'uuid', console_uuid)
# Update VNC port if it exists and write to xenstore
if transaction is None:
vnc_port = self.readDom('console/vnc-port')
else:
vnc_port = self.readDomTxn(transaction, 'console/vnc-port')
if vnc_port is not None:
for dev_uuid, (dev_type, dev_info) in self.info['devices'].items():
if dev_type == 'vfb':
old_location = dev_info.get('location')
listen_host = dev_info.get('vnclisten', \
XendOptions.instance().get_vnclisten_address())
new_location = '%s:%s' % (listen_host, str(vnc_port))
if old_location == new_location:
break
dev_info['location'] = new_location
self.info.device_update(dev_uuid, cfg_xenapi = dev_info)
vfb_ctrl = self.getDeviceController('vfb')
vfb_ctrl.reconfigureDevice(0, dev_info)
break
#
# Function to update xenstore /vm/*
#
def _readVm(self, *args):
return xstransact.Read(self.vmpath, *args)
def _writeVm(self, *args):
return xstransact.Write(self.vmpath, *args)
def _removeVm(self, *args):
return xstransact.Remove(self.vmpath, *args)
def _gatherVm(self, *args):
return xstransact.Gather(self.vmpath, *args)
def _listRecursiveVm(self, *args):
return xstransact.ListRecursive(self.vmpath, *args)
def storeVm(self, *args):
return xstransact.Store(self.vmpath, *args)
def permissionsVm(self, *args):
return xstransact.SetPermissions(self.vmpath, *args)
#
# Function to update xenstore /dom/*
#
def readDom(self, *args):
return xstransact.Read(self.dompath, *args)
def gatherDom(self, *args):
return xstransact.Gather(self.dompath, *args)
def _writeDom(self, *args):
return xstransact.Write(self.dompath, *args)
def _removeDom(self, *args):
return xstransact.Remove(self.dompath, *args)
def storeDom(self, *args):
return xstransact.Store(self.dompath, *args)
def readDomTxn(self, transaction, *args):
paths = map(lambda x: self.dompath + "/" + x, args)
return transaction.read(*paths)
def gatherDomTxn(self, transaction, *args):
paths = map(lambda x: self.dompath + "/" + x, args)
return transaction.gather(*paths)
def _writeDomTxn(self, transaction, *args):
paths = map(lambda x: self.dompath + "/" + x, args)
return transaction.write(*paths)
def _removeDomTxn(self, transaction, *args):
paths = map(lambda x: self.dompath + "/" + x, args)
return transaction.remove(*paths)
def storeDomTxn(self, transaction, *args):
paths = map(lambda x: self.dompath + "/" + x, args)
return transaction.store(*paths)
def _recreateDom(self):
complete(self.dompath, lambda t: self._recreateDomFunc(t))
def _recreateDomFunc(self, t):
t.remove()
t.mkdir()
t.set_permissions({'dom' : self.domid, 'read' : True})
t.write('vm', self.vmpath)
# NB. Solaris guests use guest/ and hvmpv/ xenstore directories
# XCP Windows paravirtualized guests use data/
for i in [ 'device', 'control', 'error', 'memory', 'guest', \
'hvmpv', 'data' ]:
t.mkdir(i)
t.set_permissions(i, {'dom' : self.domid})
def _storeDomDetails(self):
to_store = {
'domid': str(self.domid),
'vm': self.vmpath,
'name': self.info['name_label'],
'console/limit': str(xoptions.get_console_limit() * 1024),
'memory/target': str(self.info['memory_dynamic_max'] / 1024),
'description': str(self.info['description']),
}
def f(n, v):
if v is not None:
if type(v) == bool:
to_store[n] = v and "1" or "0"
else:
to_store[n] = str(v)
# Figure out if we need to tell xenconsoled to ignore this guest's
# console - device model will handle console if it is running
constype = "ioemu"
if 'device_model' not in self.info['platform']:
constype = "xenconsoled"
f('console/port', self.console_port)
f('console/ring-ref', self.console_mfn)
f('console/type', constype)
f('store/port', self.store_port)
f('store/ring-ref', self.store_mfn)
f('control/platform-feature-xs_reset_watches', True)
if arch.type == "x86":
f('control/platform-feature-multiprocessor-suspend', True)
# elfnotes
for n, v in self.info.get_notes().iteritems():
n = n.lower().replace('_', '-')
if n == 'features':
for v in v.split('|'):
v = v.replace('_', '-')
if v.startswith('!'):
f('image/%s/%s' % (n, v[1:]), False)
else:
f('image/%s/%s' % (n, v), True)
else:
f('image/%s' % n, v)
if self.info.has_key('security_label'):
f('security_label', self.info['security_label'])
to_store.update(self._vcpuDomDetails())
log.debug("Storing domain details: %s", scrub_password(to_store))
self._writeDom(to_store)
def _vcpuDomDetails(self):
def availability(n):
if self.info['vcpu_avail'] & (1 << n):
return 'online'
else:
return 'offline'
result = {}
for v in range(0, self.info['VCPUs_max']):
result["cpu/%d/availability" % v] = availability(v)
return result
#
# xenstore watches
#
def _registerWatches(self):
"""Register a watch on this VM's entries in the store, and the
domain's control/shutdown node, so that when they are changed
externally, we keep up to date. This should only be called by {@link
#create}, {@link #recreate}, or {@link #restore}, once the domain's
details have been written, but before the new instance is returned."""
self.vmWatch = xswatch(self.vmpath, self._storeChanged)
self.shutdownWatch = xswatch(self.dompath + '/control/shutdown',
self._handleShutdownWatch)
def _storeChanged(self, _):
log.trace("XendDomainInfo.storeChanged");
changed = False
# Check whether values in the configuration have
# changed in Xenstore.
cfg_vm = ['name', 'on_poweroff', 'on_reboot', 'on_crash',
'rtc/timeoffset']
vm_details = self._readVMDetails([(k,XendConfig.LEGACY_CFG_TYPES[k])
for k in cfg_vm])
# convert two lists into a python dictionary
vm_details = dict(zip(cfg_vm, vm_details))
for arg, val in vm_details.items():
if arg in XendConfig.LEGACY_CFG_TO_XENAPI_CFG:
xapiarg = XendConfig.LEGACY_CFG_TO_XENAPI_CFG[arg]
if val != None and val != self.info[xapiarg]:
self.info[xapiarg] = val
changed = True
elif arg == "memory":
if val != None and val != self.info["static_memory_min"]:
self.info["static_memory_min"] = val
changed = True
elif arg == "maxmem":
if val != None and val != self.info["static_memory_max"]:
self.info["static_memory_max"] = val
changed = True
# Check whether image definition has been updated
image_sxp = self._readVm('image')
if image_sxp and image_sxp != sxp.to_string(self.info.image_sxpr()):
self.info.update_with_image_sxp(sxp.from_string(image_sxp))
changed = True
# Update the rtc_timeoffset to be preserved across reboot.
# NB. No need to update xenstore domain section.
val = int(vm_details.get("rtc/timeoffset", 0))
self.info["platform"]["rtc_timeoffset"] = val
if changed:
# Update the domain section of the store, as this contains some
# parameters derived from the VM configuration.
self.refresh_shutdown_lock.acquire()
try:
state = self._stateGet()
if state not in (DOM_STATE_SHUTDOWN, DOM_STATE_HALTED,):
self._storeDomDetails()
finally:
self.refresh_shutdown_lock.release()
return 1
def _handleShutdownWatch(self, _):
log.debug('XendDomainInfo.handleShutdownWatch')
reason = self.readDom('control/shutdown')
if reason and reason != 'suspend':
sst = self.readDom('xend/shutdown_start_time')
now = time.time()
if sst:
self.shutdownStartTime = float(sst)
timeout = float(sst) + SHUTDOWN_TIMEOUT - now
else:
self.shutdownStartTime = now
self.storeDom('xend/shutdown_start_time', now)
timeout = SHUTDOWN_TIMEOUT
log.trace(
"Scheduling refreshShutdown on domain %d in %ds.",
self.domid, timeout)
threading.Timer(timeout, self.refreshShutdown).start()
return True
#
# Public Attributes for the VM
#
def getDomid(self):
return self.domid
def getStubdomDomid(self):
dom_list = xstransact.List('/local/domain')
for d in dom_list:
target = xstransact.Read('/local/domain/' + d + '/target')
if target is not None and int(target) == self.domid:
return int(d)
return None
def setName(self, name, to_store = True):
self._checkName(name)
self.info['name_label'] = name
if to_store:
self.storeVm("name", name)
def getName(self):
return self.info['name_label']
def getDomainPath(self):
return self.dompath
def getShutdownReason(self):
return self.readDom('control/shutdown')
def getStorePort(self):
"""For use only by image.py and XendCheckpoint.py."""
return self.store_port
def getConsolePort(self):
"""For use only by image.py and XendCheckpoint.py"""
return self.console_port
def getFeatures(self):
"""For use only by image.py."""
return self.info['features']
def getVCpuCount(self):
return self.info['VCPUs_max']
def getVCpuAvail(self):
return self.info['vcpu_avail']
def setVCpuCount(self, vcpus):
def vcpus_valid(n):
if vcpus <= 0:
raise XendError('Zero or less VCPUs is invalid')
if self.domid >= 0 and vcpus > self.info['VCPUs_max']:
raise XendError('Cannot set vcpus greater than max vcpus on running domain')
vcpus_valid(vcpus)
self.info['vcpu_avail'] = (1 << vcpus) - 1
if self.domid >= 0:
self.storeVm('vcpu_avail', self.info['vcpu_avail'])
self._writeDom(self._vcpuDomDetails())
self.info['VCPUs_live'] = vcpus
else:
if self.info['VCPUs_max'] > vcpus:
# decreasing
del self.info['cpus'][vcpus:]
elif self.info['VCPUs_max'] < vcpus:
# increasing
for c in range(self.info['VCPUs_max'], vcpus):
self.info['cpus'].append(list())
self.info['VCPUs_max'] = vcpus
xen.xend.XendDomain.instance().managed_config_save(self)
log.info("Set VCPU count on domain %s to %d", self.info['name_label'],
vcpus)
def getMemoryTarget(self):
"""Get this domain's target memory size, in KB."""
return self.info['memory_dynamic_max'] / 1024
def getMemoryMaximum(self):
"""Get this domain's maximum memory size, in KB."""
# remember, info now stores memory in bytes
return self.info['memory_static_max'] / 1024
def getResume(self):
return str(self._resume)
def setResume(self, isresume):
self._resume = isresume
def getCpus(self):
return self.info['cpus']
def setCpus(self, cpumap):
self.info['cpus'] = cpumap
def getCap(self):
return self.info['vcpus_params']['cap']
def setCap(self, cpu_cap):
self.info['vcpus_params']['cap'] = cpu_cap
def getWeight(self):
return self.info['vcpus_params']['weight']
def setWeight(self, cpu_weight):
self.info['vcpus_params']['weight'] = cpu_weight
def getRestartCount(self):
return self._readVm('xend/restart_count')
def refreshShutdown(self, xeninfo = None):
""" Checks the domain for whether a shutdown is required.
Called from XendDomainInfo and also image.py for HVM images.
"""
# If set at the end of this method, a restart is required, with the
# given reason. This restart has to be done out of the scope of
# refresh_shutdown_lock.
restart_reason = None
self.refresh_shutdown_lock.acquire()
try:
if xeninfo is None:
xeninfo = dom_get(self.domid)
if xeninfo is None:
# The domain no longer exists. This will occur if we have
# scheduled a timer to check for shutdown timeouts and the
# shutdown succeeded. It will also occur if someone
# destroys a domain beneath us. We clean up the domain,
# just in case, but we can't clean up the VM, because that
# VM may have migrated to a different domain on this
# machine.
self.cleanupDomain()
self._stateSet(DOM_STATE_HALTED)
return
if xeninfo['dying']:
# Dying means that a domain has been destroyed, but has not
# yet been cleaned up by Xen. This state could persist
# indefinitely if, for example, another domain has some of its
# pages mapped. We might like to diagnose this problem in the
# future, but for now all we do is make sure that it's not us
# holding the pages, by calling cleanupDomain. We can't
# clean up the VM, as above.
self.cleanupDomain()
self._stateSet(DOM_STATE_SHUTDOWN)
return
elif xeninfo['crashed']:
if self.readDom('xend/shutdown_completed'):
# We've seen this shutdown already, but we are preserving
# the domain for debugging. Leave it alone.
return
log.warn('Domain has crashed: name=%s id=%d.',
self.info['name_label'], self.domid)
self._writeVm(LAST_SHUTDOWN_REASON, 'crash')
restart_reason = 'crash'
self._stateSet(DOM_STATE_HALTED)
elif xeninfo['shutdown']:
self._stateSet(DOM_STATE_SHUTDOWN)
if self.readDom('xend/shutdown_completed'):
# We've seen this shutdown already, but we are preserving
# the domain for debugging. Leave it alone.
return
else:
reason = shutdown_reason(xeninfo['shutdown_reason'])
log.info('Domain has shutdown: name=%s id=%d reason=%s.',
self.info['name_label'], self.domid, reason)
self._writeVm(LAST_SHUTDOWN_REASON, reason)
self._clearRestart()
if reason == 'suspend':
self._stateSet(DOM_STATE_SUSPENDED)
# Don't destroy the domain. XendCheckpoint will do
# this once it has finished. However, stop watching
# the VM path now, otherwise we will end up with one
# watch for the old domain, and one for the new.
self._unwatchVm()
elif reason in ('poweroff', 'reboot'):
restart_reason = reason
else:
self.destroy()
elif self.dompath is None:
# We have yet to manage to call introduceDomain on this
# domain. This can happen if a restore is in progress, or has
# failed. Ignore this domain.
pass
else:
# Domain is alive. If we are shutting it down, log a message
# if it seems unresponsive.
if xeninfo['paused']:
self._stateSet(DOM_STATE_PAUSED)
else:
self._stateSet(DOM_STATE_RUNNING)
if self.shutdownStartTime:
timeout = (SHUTDOWN_TIMEOUT - time.time() +
self.shutdownStartTime)
if (timeout < 0 and not self.readDom('xend/unresponsive')):
log.info(
"Domain shutdown timeout expired: name=%s id=%s",
self.info['name_label'], self.domid)
self.storeDom('xend/unresponsive', 'True')
finally:
self.refresh_shutdown_lock.release()
if restart_reason and not self.restart_in_progress:
self.restart_in_progress = True
threading.Thread(target = self._maybeRestart,
args = (restart_reason,)).start()
#
# Restart functions - handling whether we come back up on shutdown.
#
def _clearRestart(self):
self._removeDom("xend/shutdown_start_time")
def _maybeDumpCore(self, reason):
if reason == 'crash':
if xoptions.get_enable_dump() or self.get_on_crash() \
in ['coredump_and_destroy', 'coredump_and_restart']:
try:
self.dumpCore()
except XendError:
# This error has been logged -- there's nothing more
# we can do in this context.
pass
def _maybeRestart(self, reason):
# Before taking configured action, dump core if configured to do so.
#
self._maybeDumpCore(reason)
# Dispatch to the correct method based upon the configured on_{reason}
# behaviour.
actions = {"destroy" : self.destroy,
"restart" : self._restart,
"preserve" : self._preserve,
"rename-restart" : self._renameRestart,
"coredump-destroy" : self.destroy,
"coredump-restart" : self._restart}
action_conf = {
'poweroff': 'actions_after_shutdown',
'reboot': 'actions_after_reboot',
'crash': 'actions_after_crash',
}
action_target = self.info.get(action_conf.get(reason))
func = actions.get(action_target, None)
if func and callable(func):
func()
else:
self.destroy() # default to destroy
def _renameRestart(self):
self._restart(True)
def _restart(self, rename = False):
"""Restart the domain after it has exited.
@param rename True if the old domain is to be renamed and preserved,
False if it is to be destroyed.
"""
from xen.xend import XendDomain
if self._readVm(RESTART_IN_PROGRESS):
log.error('Xend failed during restart of domain %s. '
'Refusing to restart to avoid loops.',
str(self.domid))
self.destroy()
return
old_domid = self.domid
self._writeVm(RESTART_IN_PROGRESS, 'True')
elapse = time.time() - self.info['start_time']
if elapse < MINIMUM_RESTART_TIME:
log.error('VM %s restarting too fast (Elapsed time: %f seconds). '
'Refusing to restart to avoid loops.',
self.info['name_label'], elapse)
self.destroy()
return
prev_vm_xend = self._listRecursiveVm('xend')
new_dom_info = self.info
try:
if rename:
new_dom_info = self._preserveForRestart()
else:
self._unwatchVm()
self.destroy()
# new_dom's VM will be the same as this domain's VM, except where
# the rename flag has instructed us to call preserveForRestart.
# In that case, it is important that we remove the
# RESTART_IN_PROGRESS node from the new domain, not the old one,
# once the new one is available.
new_dom = None
try:
new_dom = XendDomain.instance().domain_create_from_dict(
new_dom_info)
for x in prev_vm_xend[0][1]:
new_dom._writeVm('xend/%s' % x[0], x[1])
new_dom.waitForDevices()
new_dom.unpause()
rst_cnt = new_dom._readVm('xend/restart_count')
rst_cnt = int(rst_cnt) + 1
new_dom._writeVm('xend/restart_count', str(rst_cnt))
new_dom._removeVm(RESTART_IN_PROGRESS)
except:
if new_dom:
new_dom._removeVm(RESTART_IN_PROGRESS)
new_dom.destroy()
else:
self._removeVm(RESTART_IN_PROGRESS)
raise
except:
log.exception('Failed to restart domain %s.', str(old_domid))
def _preserveForRestart(self):
"""Preserve a domain that has been shut down, by giving it a new UUID,
cloning the VM details, and giving it a new name. This allows us to
keep this domain for debugging, but restart a new one in its place
preserving the restart semantics (name and UUID preserved).
"""
new_uuid = uuid.createString()
new_name = 'Domain-%s' % new_uuid
log.info("Renaming dead domain %s (%d, %s) to %s (%s).",
self.info['name_label'], self.domid, self.info['uuid'],
new_name, new_uuid)
self._unwatchVm()
self._releaseDevices()
# Remove existing vm node in xenstore
self._removeVm()
new_dom_info = self.info.copy()
new_dom_info['name_label'] = self.info['name_label']
new_dom_info['uuid'] = self.info['uuid']
self.info['name_label'] = new_name
self.info['uuid'] = new_uuid
self.vmpath = XS_VMROOT + new_uuid
# Write out new vm node to xenstore
self._storeVmDetails()
self._preserve()
return new_dom_info
def _preserve(self):
log.info("Preserving dead domain %s (%d).", self.info['name_label'],
self.domid)
self._unwatchVm()
self.storeDom('xend/shutdown_completed', 'True')
self._stateSet(DOM_STATE_HALTED)
#
# Debugging ..
#
def dumpCore(self, corefile = None):
"""Create a core dump for this domain.
@raise: XendError if core dumping failed.
"""
if not corefile:
# To prohibit directory traversal
based_name = os.path.basename(self.info['name_label'])
coredir = "/var/xen/dump/%s" % (based_name)
if not os.path.exists(coredir):
try:
mkdir.parents(coredir, stat.S_IRWXU)
except Exception, ex:
log.error("Cannot create directory: %s" % str(ex))
if not os.path.isdir(coredir):
# Use former directory to dump core
coredir = '/var/xen/dump'
this_time = time.strftime("%Y-%m%d-%H%M.%S", time.localtime())
corefile = "%s/%s-%s.%s.core" % (coredir, this_time,
self.info['name_label'], self.domid)
if os.path.isdir(corefile):
raise XendError("Cannot dump core in a directory: %s" %
corefile)
try:
try:
self._writeVm(DUMPCORE_IN_PROGRESS, 'True')
xc.domain_dumpcore(self.domid, corefile)
except RuntimeError, ex:
corefile_incomp = corefile+'-incomplete'
try:
os.rename(corefile, corefile_incomp)
except:
pass
log.error("core dump failed: id = %s name = %s: %s",
self.domid, self.info['name_label'], str(ex))
raise XendError("Failed to dump core: %s" % str(ex))
finally:
self._removeVm(DUMPCORE_IN_PROGRESS)
#
# Device creation/deletion functions
#
def _createDevice(self, deviceClass, devConfig):
return self.getDeviceController(deviceClass).createDevice(devConfig)
def _waitForDevice(self, deviceClass, devid):
return self.getDeviceController(deviceClass).waitForDevice(devid)
def _waitForDeviceUUID(self, dev_uuid):
deviceClass, config = self.info['devices'].get(dev_uuid)
self._waitForDevice(deviceClass, config['devid'])
def _waitForDevice_destroy(self, deviceClass, devid, backpath):
return self.getDeviceController(deviceClass).waitForDevice_destroy(
devid, backpath)
def _reconfigureDevice(self, deviceClass, devid, devconfig):
return self.getDeviceController(deviceClass).reconfigureDevice(
devid, devconfig)
def _createDevices(self):
"""Create the devices for a vm.
@raise: VmError for invalid devices
"""
if self.image:
self.image.prepareEnvironment()
vscsi_uuidlist = {}
vscsi_devidlist = []
ordered_refs = self.info.ordered_device_refs()
for dev_uuid in ordered_refs:
devclass, config = self.info['devices'][dev_uuid]
if devclass in XendDevices.valid_devices() and devclass != 'vscsi':
log.info("createDevice: %s : %s" % (devclass, scrub_password(config)))
dev_uuid = config.get('uuid')
if devclass == 'pci':
self.pci_dev_check_assignability_and_do_FLR(config)
if devclass != 'pci' or not self.info.is_hvm() :
devid = self._createDevice(devclass, config)
# store devid in XendConfig for caching reasons
if dev_uuid in self.info['devices']:
self.info['devices'][dev_uuid][1]['devid'] = devid
elif devclass == 'vscsi':
vscsi_config = config.get('devs', [])[0]
devid = vscsi_config.get('devid', '')
dev_uuid = config.get('uuid')
vscsi_uuidlist[devid] = dev_uuid
vscsi_devidlist.append(devid)
#It is necessary to sorted it for /dev/sdxx in guest.
if len(vscsi_uuidlist) > 0:
vscsi_devidlist.sort()
for vscsiid in vscsi_devidlist:
dev_uuid = vscsi_uuidlist[vscsiid]
devclass, config = self.info['devices'][dev_uuid]
log.info("createDevice: %s : %s" % (devclass, scrub_password(config)))
dev_uuid = config.get('uuid')
devid = self._createDevice(devclass, config)
# store devid in XendConfig for caching reasons
if dev_uuid in self.info['devices']:
self.info['devices'][dev_uuid][1]['devid'] = devid
if self.image:
self.image.createDeviceModel()
#if have pass-through devs, need the virtual pci slots info from qemu
self.pci_device_configure_boot()
def _releaseDevices(self, suspend = False):
"""Release all domain's devices. Nothrow guarantee."""
if self.image:
try:
log.debug("Destroying device model")
self.image.destroyDeviceModel()
except Exception, e:
log.exception("Device model destroy failed %s" % str(e))
else:
log.debug("No device model")
log.debug("Releasing devices")
t = xstransact("%s/device" % self.vmpath)
try:
for devclass in XendDevices.valid_devices():
for dev in t.list(devclass):
try:
log.debug("Removing %s", dev);
self.destroyDevice(devclass, dev, False);
except:
# Log and swallow any exceptions in removal --
# there's nothing more we can do.
log.exception("Device release failed: %s; %s; %s",
self.info['name_label'],
devclass, dev)
finally:
t.abort()
def getDeviceController(self, name):
"""Get the device controller for this domain, and if it
doesn't exist, create it.
@param name: device class name
@type name: string
@rtype: subclass of DevController
"""
if name not in self._deviceControllers:
devController = XendDevices.make_controller(name, self)
if not devController:
raise XendError("Unknown device type: %s" % name)
self._deviceControllers[name] = devController
return self._deviceControllers[name]
#
# Migration functions (public)
#
def testMigrateDevices(self, network, dst):
""" Notify all device about intention of migration
@raise: XendError for a device that cannot be migrated
"""
for (n, c) in self.info.all_devices_sxpr():
rc = self.migrateDevice(n, c, network, dst, DEV_MIGRATE_TEST, self.getName())
if rc != 0:
raise XendError("Device of type '%s' refuses migration." % n)
def migrateDevices(self, network, dst, step, domName=''):
"""Notify the devices about migration
"""
ctr = 0
try:
for (dev_type, dev_conf) in self.info.all_devices_sxpr():
self.migrateDevice(dev_type, dev_conf, network, dst,
step, domName)
ctr = ctr + 1
except:
for dev_type, dev_conf in self.info.all_devices_sxpr():
if ctr == 0:
step = step - 1
ctr = ctr - 1
self._recoverMigrateDevice(dev_type, dev_conf, network,
dst, step, domName)
raise
def migrateDevice(self, deviceClass, deviceConfig, network, dst,
step, domName=''):
return self.getDeviceController(deviceClass).migrate(deviceConfig,
network, dst, step, domName)
def _recoverMigrateDevice(self, deviceClass, deviceConfig, network,
dst, step, domName=''):
return self.getDeviceController(deviceClass).recover_migrate(
deviceConfig, network, dst, step, domName)
def setChangeHomeServer(self, chs):
if chs is not None:
self.info['change_home_server'] = bool(chs)
else:
if self.info.has_key('change_home_server'):
del self.info['change_home_server']
## private:
def _constructDomain(self):
"""Construct the domain.
@raise: VmError on error
"""
log.debug('XendDomainInfo.constructDomain')
self.shutdownStartTime = None
self.restart_in_progress = False
hap = 0
hvm = self.info.is_hvm()
if hvm:
hap = self.info.is_hap()
info = xc.xeninfo()
if 'hvm' not in info['xen_caps']:
raise VmError("HVM guest support is unavailable: is VT/AMD-V "
"supported by your CPU and enabled in your "
"BIOS?")
# Hack to pre-reserve some memory for initial domain creation.
# There is an implicit memory overhead for any domain creation. This
# overhead is greater for some types of domain than others. For
# example, an x86 HVM domain will have a default shadow-pagetable
# allocation of 4MB. We free up 16MB here to be on the safe side.
balloon.free(16*1024, self) # 16MB should be plenty
ssidref = 0
if security.on() == xsconstants.XS_POLICY_USE:
ssidref = security.calc_dom_ssidref_from_info(self.info)
if security.has_authorization(ssidref) == False:
raise VmError("VM is not authorized to run.")
s3_integrity = 0
if self.info.has_key('s3_integrity'):
s3_integrity = self.info['s3_integrity']
oos = self.info['platform'].get('oos', 1)
oos_off = 1 - int(oos)
# look-up pool id to use
pool_name = self.info['pool_name']
if len(pool_name) == 0:
pool_name = "Pool-0"
pool = XendCPUPool.lookup_pool(pool_name)
if pool is None:
raise VmError("unknown pool %s" % pool_name)
pool_id = pool.query_pool_id()
if pool_id is None:
raise VmError("pool %s not activated" % pool_name)
flags = (int(hvm) << 0) | (int(hap) << 1) | (int(s3_integrity) << 2) | (int(oos_off) << 3)
try:
self.domid = xc.domain_create(
domid = 0,
ssidref = ssidref,
handle = uuid.fromString(self.info['uuid']),
flags = flags,
#cpupool = pool_id,
target = self.info.target())
except Exception, e:
# may get here if due to ACM the operation is not permitted
if security.on() == xsconstants.XS_POLICY_ACM:
raise VmError('Domain in conflict set with running domain?')
log.exception(e)
if not self.domid or self.domid < 0:
failmsg = 'Creating domain failed: name=%s' % self.info['name_label']
if self.domid:
failmsg += ', error=%i' % int(self.domid)
raise VmError(failmsg)
try:
xc.cpupool_movedomain(pool_id, self.domid)
except Exception, e:
raise VmError('Moving domain to target pool failed')
self.dompath = GetDomainPath(self.domid)
self._recreateDom()
# Set TSC mode of domain
tsc_mode = self.info["platform"].get("tsc_mode")
if arch.type == "x86" and tsc_mode is not None:
xc.domain_set_tsc_info(self.domid, int(tsc_mode))
# Set timer configuration of domain
timer_mode = self.info["platform"].get("timer_mode")
if hvm and timer_mode is not None:
xc.hvm_set_param(self.domid, HVM_PARAM_TIMER_MODE,
long(timer_mode))
if arch.type == "x86" and hvm:
# Set Viridian interface configuration of domain
viridian = self.info["platform"].get("viridian")
if viridian is not None:
xc.hvm_set_param(self.domid, HVM_PARAM_VIRIDIAN, long(viridian))
# Set nestedhvm of domain
nestedhvm = self.info["platform"].get("nestedhvm")
if nestedhvm is not None:
xc.hvm_set_param(self.domid, HVM_PARAM_NESTEDHVM, long(nestedhvm))
# If nomigrate is set, disable migration
nomigrate = self.info["platform"].get("nomigrate")
if nomigrate is not None and long(nomigrate) != 0:
xc.domain_disable_migrate(self.domid)
# Optionally enable virtual HPET
hpet = self.info["platform"].get("hpet")
if hvm and hpet is not None:
xc.hvm_set_param(self.domid, HVM_PARAM_HPET_ENABLED,
long(hpet))
# Optionally enable periodic vpt aligning
vpt_align = self.info["platform"].get("vpt_align")
if hvm and vpt_align is not None:
xc.hvm_set_param(self.domid, HVM_PARAM_VPT_ALIGN,
long(vpt_align))
# Set maximum number of vcpus in domain
xc.domain_max_vcpus(self.domid, int(self.info['VCPUs_max']))
# Check for cpu_{cap|weight} validity for credit scheduler
if XendNode.instance().xenschedinfo() == 'credit':
cap = self.getCap()
weight = self.getWeight()
assert type(weight) == int
assert type(cap) == int
if weight < 1 or weight > 65535:
raise VmError("Cpu weight out of range, valid values are within range from 1 to 65535")
if cap < 0 or cap > self.getVCpuCount() * 100:
raise VmError("Cpu cap out of range, valid range is from 0 to %s for specified number of vcpus" %
(self.getVCpuCount() * 100))
# Test whether the devices can be assigned with VT-d
self.info.update_platform_pci()
pci = self.info["platform"].get("pci")
pci_str = ''
if pci and len(pci) > 0:
pci = map(lambda x: x[0:4], pci) # strip options
pci_str = str(pci)
# This test is done for both pv and hvm guest.
for p in pci:
pci_name = '%04x:%02x:%02x.%x' % \
(parse_hex(p[0]), parse_hex(p[1]), parse_hex(p[2]), parse_hex(p[3]))
try:
pci_device = PciDevice(parse_pci_name(pci_name))
except Exception, e:
raise VmError("pci: failed to locate device and "+
"parse its resources - "+str(e))
if pci_device.driver!='pciback' and pci_device.driver!='pci-stub':
raise VmError(("pci: PCI Backend and pci-stub don't own device %s")\
%pci_device.name)
if pci_name in get_all_assigned_pci_devices():
raise VmError("failed to assign device %s that has"
" already been assigned to other domain." % pci_name)
if hvm and pci_str != '':
bdf = xc.test_assign_device(0, pci_str)
if bdf != 0:
if bdf == -1:
raise VmError("failed to assign device: maybe the platform"
" doesn't support VT-d, or VT-d isn't enabled"
" properly?")
bus = (bdf >> 16) & 0xff
devfn = (bdf >> 8) & 0xff
dev = (devfn >> 3) & 0x1f
func = devfn & 0x7
raise VmError("failed to assign device %02x:%02x.%x: maybe it has"
" already been assigned to other domain, or maybe"
" it doesn't exist." % (bus, dev, func))
# register the domain in the list
from xen.xend import XendDomain
XendDomain.instance().add_domain(self)
def _introduceDomain(self):
assert self.domid is not None
assert self.store_mfn is not None
assert self.store_port is not None
try:
IntroduceDomain(self.domid, self.store_mfn, self.store_port)
except RuntimeError, exn:
raise XendError(str(exn))
def _setTarget(self, target):
assert self.domid is not None
try:
SetTarget(self.domid, target)
self.storeDom('target', target)
except RuntimeError, exn:
raise XendError(str(exn))
def _setCPUAffinity(self):
""" Repin domain vcpus if a restricted cpus list is provided.
Returns the choosen node number.
"""
def has_cpus():
if self.info['cpus'] is not None:
for c in self.info['cpus']:
if c:
return True
return False
def has_cpumap():
if self.info.has_key('vcpus_params'):
for k, v in self.info['vcpus_params'].items():
if k.startswith('cpumap'):
return True
return False
index = 0
if has_cpumap():
for v in range(0, self.info['VCPUs_max']):
if self.info['vcpus_params'].has_key('cpumap%i' % v):
cpumask = map(int, self.info['vcpus_params']['cpumap%i' % v].split(','))
xc.vcpu_setaffinity(self.domid, v, cpumask)
elif has_cpus():
for v in range(0, self.info['VCPUs_max']):
if self.info['cpus'][v]:
xc.vcpu_setaffinity(self.domid, v, self.info['cpus'][v])
else:
def find_relaxed_node(node_list):
import sys
nr_nodes = info['max_node_index'] + 1
if node_list is None:
node_list = range(0, nr_nodes)
nodeload = [0]
nodeload = nodeload * nr_nodes
from xen.xend import XendDomain
doms = XendDomain.instance().list('all')
for dom in filter (lambda d: d.domid != self.domid, doms):
cpuinfo = dom.getVCPUInfo()
for vcpu in sxp.children(cpuinfo, 'vcpu'):
if sxp.child_value(vcpu, 'online') == 0: continue
cpumap = list(sxp.child_value(vcpu,'cpumap'))
for i in range(0, nr_nodes):
node_cpumask = node_to_cpu[i]
for j in node_cpumask:
if j in cpumap:
nodeload[i] += 1
break
for i in range(0, nr_nodes):
if len(node_to_cpu[i]) == 0:
nodeload[i] += 8
else:
nodeload[i] = int(nodeload[i] * 16 / len(node_to_cpu[i]))
if i not in node_list:
nodeload[i] += 8
return map(lambda x: x[0], sorted(enumerate(nodeload), key=lambda x:x[1]))
info = xc.numainfo()
if info['max_node_index'] > 0 and XendCPUPool.number_of_pools() < 2:
node_memory_list = info['node_memfree']
node_to_cpu = []
for i in range(0, info['max_node_index'] + 1):
node_to_cpu.append([])
for cpu, node in enumerate(xc.topologyinfo()['cpu_to_node']):
node_to_cpu[node].append(cpu)
needmem = self.image.getRequiredAvailableMemory(self.info['memory_dynamic_max']) / 1024
candidate_node_list = []
for i in range(0, info['max_node_index'] + 1):
if node_memory_list[i] >= needmem and len(node_to_cpu[i]) > 0:
candidate_node_list.append(i)
best_node = find_relaxed_node(candidate_node_list)[0]
cpumask = node_to_cpu[best_node]
best_nodes = find_relaxed_node(filter(lambda x: x != best_node, range(0,info['max_node_index']+1)))
for node_idx in best_nodes:
if len(cpumask) >= self.info['VCPUs_max']:
break
cpumask = cpumask + node_to_cpu[node_idx]
log.debug("allocating additional NUMA node %d", node_idx)
for v in range(0, self.info['VCPUs_max']):
xc.vcpu_setaffinity(self.domid, v, cpumask)
return index
def _freeDMAmemory(self, node):
# If we are PV and have PCI devices the guest will
# turn on a SWIOTLB. The SWIOTLB _MUST_ be located in the DMA32
# zone (under 4GB). To do so, we need to balloon down Dom0 to where
# there is enough (64MB) memory under the 4GB mark. This balloon-ing
# might take more memory out than just 64MB thought :-(
if not self.info.is_pv_and_has_pci():
return
retries = 2000
ask_for_mem = 0
need_mem = 0
try:
while (retries > 0):
physinfo = xc.physinfo()
free_mem = physinfo['free_memory']
max_node_id = physinfo['max_node_id']
node_to_dma32_mem = physinfo['node_to_dma32_mem']
if (node > max_node_id):
return
# Extra 2MB above 64GB seems to do the trick.
need_mem = 64 * 1024 + 2048 - node_to_dma32_mem[node]
# our starting point. We ask just for the difference to
# be have an extra 64MB under 4GB.
ask_for_mem = max(need_mem, ask_for_mem);
if (need_mem > 0):
log.debug('_freeDMAmemory (%d) Need %dKiB DMA memory. '
'Asking for %dKiB', retries, need_mem,
ask_for_mem)
balloon.free(ask_for_mem, self)
ask_for_mem = ask_for_mem + 2048
else:
# OK. We got enough DMA memory.
break
retries = retries - 1
except:
# This is best-try after all.
need_mem = max(1, need_mem)
pass
if (need_mem > 0):
log.warn('We tried our best to balloon down DMA memory to '
'accomodate your PV guest. We need %dKiB extra memory.',
need_mem)
def _setSchedParams(self):
if XendNode.instance().xenschedinfo() == 'credit':
from xen.xend import XendDomain
XendDomain.instance().domain_sched_credit_set(self.getDomid(),
self.getWeight(),
self.getCap())
elif XendNode.instance().xenschedinfo() == 'credit2':
from xen.xend import XendDomain
XendDomain.instance().domain_sched_credit2_set(self.getDomid(),
self.getWeight())
def _initDomain(self):
log.debug('XendDomainInfo.initDomain: %s %s',
self.domid,
self.info['vcpus_params']['weight'])
self._configureBootloader()
try:
self.image = image.create(self, self.info)
# repin domain vcpus if a restricted cpus list is provided
# this is done prior to memory allocation to aide in memory
# distribution for NUMA systems.
node = self._setCPUAffinity()
# Set scheduling parameters.
self._setSchedParams()
# Use architecture- and image-specific calculations to determine
# the various headrooms necessary, given the raw configured
# values. maxmem, memory, and shadow are all in KiB.
# but memory_static_max etc are all stored in bytes now.
memory = self.image.getRequiredAvailableMemory(
self.info['memory_dynamic_max'] / 1024)
maxmem = self.image.getRequiredAvailableMemory(
self.info['memory_static_max'] / 1024)
shadow = self.image.getRequiredShadowMemory(
self.info['shadow_memory'] * 1024,
self.info['memory_static_max'] / 1024)
log.debug("_initDomain:shadow_memory=0x%x, memory_static_max=0x%x, memory_static_min=0x%x.", self.info['shadow_memory'], self.info['memory_static_max'], self.info['memory_static_min'],)
# Round shadow up to a multiple of a MiB, as shadow_mem_control
# takes MiB and we must not round down and end up under-providing.
shadow = ((shadow + 1023) / 1024) * 1024
# set memory limit
xc.domain_setmaxmem(self.domid, maxmem)
vtd_mem = 0
info = xc.physinfo()
if 'hvm_directio' in info['virt_caps']:
# Reserve 1 page per MiB of RAM for separate VT-d page table.
vtd_mem = 4 * (self.info['memory_static_max'] / 1024 / 1024)
# Round vtd_mem up to a multiple of a MiB.
vtd_mem = ((vtd_mem + 1023) / 1024) * 1024
self.guest_bitsize = self.image.getBitSize()
# Make sure there's enough RAM available for the domain
balloon.free(memory + shadow + vtd_mem, self)
# Set up the shadow memory
shadow_cur = xc.shadow_mem_control(self.domid, shadow / 1024)
self.info['shadow_memory'] = shadow_cur
# machine address size
if self.info.has_key('machine_address_size'):
log.debug("_initDomain: setting maximum machine address size %d" % self.info['machine_address_size'])
xc.domain_set_machine_address_size(self.domid, self.info['machine_address_size'])
if self.info.has_key('suppress_spurious_page_faults') and self.info['suppress_spurious_page_faults']:
log.debug("_initDomain: suppressing spurious page faults")
xc.domain_suppress_spurious_page_faults(self.domid)
self._createChannels()
channel_details = self.image.createImage()
self.store_mfn = channel_details['store_mfn']
if 'console_mfn' in channel_details:
self.console_mfn = channel_details['console_mfn']
if 'notes' in channel_details:
self.info.set_notes(channel_details['notes'])
if 'native_protocol' in channel_details:
self.native_protocol = channel_details['native_protocol'];
self._introduceDomain()
if self.info.target():
self._setTarget(self.info.target())
self._freeDMAmemory(node)
self._createDevices()
self.image.cleanupTmpImages()
self.info['start_time'] = time.time()
self._stateSet(DOM_STATE_RUNNING)
except VmError, exn:
log.exception("XendDomainInfo.initDomain: exception occurred")
if self.image:
self.image.cleanupTmpImages()
raise exn
except RuntimeError, exn:
log.exception("XendDomainInfo.initDomain: exception occurred")
if self.image:
self.image.cleanupTmpImages()
raise VmError(str(exn))
def cleanupDomain(self):
"""Cleanup domain resources; release devices. Idempotent. Nothrow
guarantee."""
self.refresh_shutdown_lock.acquire()
try:
self.unwatchShutdown()
self._releaseDevices()
bootloader_tidy(self)
if self.image:
self.image = None
try:
self._removeDom()
except:
log.exception("Removing domain path failed.")
self._stateSet(DOM_STATE_HALTED)
self.domid = None # Do not push into _stateSet()!
finally:
self.refresh_shutdown_lock.release()
def unwatchShutdown(self):
"""Remove the watch on the domain's control/shutdown node, if any.
Idempotent. Nothrow guarantee. Expects to be protected by the
refresh_shutdown_lock."""
try:
try:
if self.shutdownWatch:
self.shutdownWatch.unwatch()
finally:
self.shutdownWatch = None
except:
log.exception("Unwatching control/shutdown failed.")
def waitForShutdown(self):
self.state_updated.acquire()
try:
while self._stateGet() in (DOM_STATE_RUNNING,DOM_STATE_PAUSED):
self.state_updated.wait(timeout=1.0)
finally:
self.state_updated.release()
def waitForSuspend(self):
"""Wait for the guest to respond to a suspend request by
shutting down. If the guest hasn't re-written control/shutdown
after a certain amount of time, it's obviously not listening and
won't suspend, so we give up. HVM guests with no PV drivers
should already be shutdown.
"""
state = "suspend"
nr_tries = 60
self.state_updated.acquire()
try:
while self._stateGet() in (DOM_STATE_RUNNING,DOM_STATE_PAUSED):
self.state_updated.wait(1.0)
if state == "suspend":
if nr_tries == 0:
msg = ('Timeout waiting for domain %s to suspend'
% self.domid)
self._writeDom('control/shutdown', '')
raise XendError(msg)
state = self.readDom('control/shutdown')
nr_tries -= 1
finally:
self.state_updated.release()
#
# TODO: recategorise - called from XendCheckpoint
#
def completeRestore(self, store_mfn, console_mfn):
log.debug("XendDomainInfo.completeRestore")
self.store_mfn = store_mfn
self.console_mfn = console_mfn
self._introduceDomain()
self.image = image.create(self, self.info)
if self.image:
self.image.createDeviceModel(True)
self._storeDomDetails()
self._registerWatches()
self.refreshShutdown()
log.debug("XendDomainInfo.completeRestore done")
def _endRestore(self):
self.setResume(False)
#
# VM Destroy
#
def _prepare_phantom_paths(self):
# get associated devices to destroy
# build list of phantom devices to be removed after normal devices
plist = []
if self.domid is not None:
t = xstransact("%s/device/vbd" % GetDomainPath(self.domid))
try:
for dev in t.list():
backend_phantom_vbd = xstransact.Read("%s/device/vbd/%s/phantom_vbd" \
% (self.dompath, dev))
if backend_phantom_vbd is not None:
frontend_phantom_vbd = xstransact.Read("%s/frontend" \
% backend_phantom_vbd)
plist.append(backend_phantom_vbd)
plist.append(frontend_phantom_vbd)
finally:
t.abort()
return plist
def _cleanup_phantom_devs(self, plist):
# remove phantom devices
if not plist == []:
time.sleep(2)
for paths in plist:
if paths.find('backend') != -1:
# Modify online status /before/ updating state (latter is watched by
# drivers, so this ordering avoids a race).
xstransact.Write(paths, 'online', "0")
xstransact.Write(paths, 'state', str(xenbusState['Closing']))
# force
xstransact.Remove(paths)
def destroy(self):
"""Cleanup VM and destroy domain. Nothrow guarantee."""
if self.domid is None:
return
from xen.xend import XendDomain
log.debug("XendDomainInfo.destroy: domid=%s", str(self.domid))
paths = self._prepare_phantom_paths()
if self.dompath is not None:
try:
xc.domain_destroy_hook(self.domid)
xc.domain_pause(self.domid)
do_FLR(self.domid, self.info.is_hvm())
xc.domain_destroy(self.domid)
for state in DOM_STATES_OLD:
self.info[state] = 0
self._stateSet(DOM_STATE_HALTED)
except:
log.exception("XendDomainInfo.destroy: domain destruction failed.")
XendDomain.instance().remove_domain(self)
self.cleanupDomain()
if self.info.is_hvm() or self.guest_bitsize != 32:
if self.alloc_mem:
import MemoryPool
log.debug("%s KiB need to add to Memory pool" %self.alloc_mem)
MemoryPool.instance().increase_memory(self.alloc_mem)
self._cleanup_phantom_devs(paths)
self._cleanupVm()
if ("transient" in self.info["other_config"] and \
bool(self.info["other_config"]["transient"])) or \
("change_home_server" in self.info and \
bool(self.info["change_home_server"])):
XendDomain.instance().domain_delete_by_dominfo(self)
def resetDomain(self):
log.debug("XendDomainInfo.resetDomain(%s)", str(self.domid))
old_domid = self.domid
prev_vm_xend = self._listRecursiveVm('xend')
new_dom_info = self.info
try:
self._unwatchVm()
self.destroy()
new_dom = None
try:
from xen.xend import XendDomain
new_dom_info['domid'] = None
new_dom = XendDomain.instance().domain_create_from_dict(
new_dom_info)
for x in prev_vm_xend[0][1]:
new_dom._writeVm('xend/%s' % x[0], x[1])
new_dom.waitForDevices()
new_dom.unpause()
except:
if new_dom:
new_dom.destroy()
raise
except:
log.exception('Failed to reset domain %s.', str(old_domid))
def resumeDomain(self):
log.debug("XendDomainInfo.resumeDomain(%s)", str(self.domid))
# resume a suspended domain (e.g. after live checkpoint, or after
# a later error during save or migate); checks that the domain
# is currently suspended first so safe to call from anywhere
xeninfo = dom_get(self.domid)
if xeninfo is None:
return
if not xeninfo['shutdown']:
return
reason = shutdown_reason(xeninfo['shutdown_reason'])
if reason != 'suspend':
return
try:
# could also fetch a parsed note from xenstore
fast = self.info.get_notes().get('SUSPEND_CANCEL') and 1 or 0
if not fast:
self._releaseDevices()
self.testDeviceComplete()
self.testvifsComplete()
log.debug("XendDomainInfo.resumeDomain: devices released")
self._resetChannels()
self._removeDom('control/shutdown')
self._removeDom('device-misc/vif/nextDeviceID')
self._createChannels()
self._introduceDomain()
self._storeDomDetails()
self._createDevices()
log.debug("XendDomainInfo.resumeDomain: devices created")
xc.domain_resume(self.domid, fast)
ResumeDomain(self.domid)
except:
log.exception("XendDomainInfo.resume: xc.domain_resume failed on domain %s." % (str(self.domid)))
self.image.resumeDeviceModel()
log.debug("XendDomainInfo.resumeDomain: completed")
#
# Channels for xenstore and console
#
def _createChannels(self):
"""Create the channels to the domain.
"""
self.store_port = self._createChannel()
self.console_port = self._createChannel()
def _createChannel(self):
"""Create an event channel to the domain.
"""
try:
if self.domid != None:
return xc.evtchn_alloc_unbound(domid = self.domid,
remote_dom = 0)
except:
log.exception("Exception in alloc_unbound(%s)", str(self.domid))
raise
def _resetChannels(self):
"""Reset all event channels in the domain.
"""
try:
if self.domid != None:
return xc.evtchn_reset(dom = self.domid)
except:
log.exception("Exception in evtcnh_reset(%s)", str(self.domid))
raise
#
# Bootloader configuration
#
def _configureBootloader(self):
"""Run the bootloader if we're configured to do so."""
blexec = self.info['PV_bootloader']
bootloader_args = self.info['PV_bootloader_args']
kernel = self.info['PV_kernel']
ramdisk = self.info['PV_ramdisk']
args = self.info['PV_args']
boot = self.info['HVM_boot_policy']
if boot:
# HVM booting.
pass
elif not blexec and kernel:
# Boot from dom0. Nothing left to do -- the kernel and ramdisk
# will be picked up by image.py.
pass
else:
# Boot using bootloader
if not blexec or blexec == 'pygrub':
blexec = auxbin.pathTo('pygrub')
blcfg = None
disks = [x for x in self.info['vbd_refs']
if self.info['devices'][x][1]['bootable']]
if not disks:
msg = "Had a bootloader specified, but no disks are bootable"
log.error(msg)
raise VmError(msg)
devinfo = self.info['devices'][disks[0]]
devtype = devinfo[0]
disk = devinfo[1]['uname']
(fn, types) = parse_uname(disk)
def _shouldMount(types):
if types[0] in ('file', 'phy'):
return False
if types[0] in ('tap', 'tap2'):
if types[1] in ('aio', 'sync'):
return False
else:
return True
return os.access('/etc/xen/scripts/block-%s' % types[0], os.X_OK)
mounted = _shouldMount(types)
mounted_vbd_uuid = 0
if mounted:
# This is a file, not a device. pygrub can cope with a
# file if it's raw, but if it's QCOW or other such formats
# used through blktap, then we need to mount it first.
log.info("Mounting %s on %s." %
(fn, BOOTLOADER_LOOPBACK_DEVICE))
vbd = {
'mode': 'RO',
'device': BOOTLOADER_LOOPBACK_DEVICE,
}
from xen.xend import XendDomain
dom0 = XendDomain.instance().privilegedDomain()
mounted_vbd_uuid = dom0.create_vbd(vbd, disk);
dom0._waitForDeviceUUID(mounted_vbd_uuid)
fn = BOOTLOADER_LOOPBACK_DEVICE
try:
blcfg = bootloader(blexec, fn, self, False,
bootloader_args, kernel, ramdisk, args)
finally:
if mounted:
log.info("Unmounting %s from %s." %
(fn, BOOTLOADER_LOOPBACK_DEVICE))
_, vbd_info = dom0.info['devices'][mounted_vbd_uuid]
dom0.destroyDevice(dom0.getBlockDeviceClass(vbd_info['devid']),
BOOTLOADER_LOOPBACK_DEVICE, force = True)
if blcfg is None:
msg = "Had a bootloader specified, but can't find disk"
log.error(msg)
raise VmError(msg)
self.info.update_with_image_sxp(blcfg, True)
#
# VM Functions
#
def _readVMDetails(self, params):
"""Read the specified parameters from the store.
"""
try:
return self._gatherVm(*params)
except ValueError:
# One of the int/float entries in params has a corresponding store
# entry that is invalid. We recover, because older versions of
# Xend may have put the entry there (memory/target, for example),
# but this is in general a bad situation to have reached.
log.exception(
"Store corrupted at %s! Domain %d's configuration may be "
"affected.", self.vmpath, self.domid)
return []
def _cleanupVm(self):
"""Cleanup VM resources. Idempotent. Nothrow guarantee."""
self._unwatchVm()
try:
self._removeVm()
except:
log.exception("Removing VM path failed.")
def checkLiveMigrateMemory(self):
""" Make sure there's enough memory to migrate this domain """
overhead_kb = 0
if arch.type == "x86":
# 1MB per vcpu plus 4Kib/Mib of RAM. This is higher than
# the minimum that Xen would allocate if no value were given.
overhead_kb = self.info['VCPUs_max'] * 1024 + \
(self.info['memory_static_max'] / 1024 / 1024) * 4
overhead_kb = ((overhead_kb + 1023) / 1024) * 1024
# The domain might already have some shadow memory
overhead_kb -= xc.shadow_mem_control(self.domid) * 1024
if overhead_kb > 0:
balloon.free(overhead_kb, self)
def _unwatchVm(self):
"""Remove the watch on the VM path, if any. Idempotent. Nothrow
guarantee."""
try:
try:
if self.vmWatch:
self.vmWatch.unwatch()
finally:
self.vmWatch = None
except:
log.exception("Unwatching VM path failed.")
def testDeviceComplete(self):
""" For Block IO migration safety we must ensure that
the device has shutdown correctly, i.e. all blocks are
flushed to disk
"""
start = time.time()
while True:
test = 0
diff = time.time() - start
vbds = self.getDeviceController('vbd').deviceIDs()
taps = self.getDeviceController('tap').deviceIDs()
tap2s = self.getDeviceController('tap2').deviceIDs()
for i in vbds + taps + tap2s:
test = 1
log.info("Dev %s still active, looping...", i)
time.sleep(0.1)
if test == 0:
break
if diff >= MIGRATE_TIMEOUT:
log.info("Dev still active but hit max loop timeout")
break
def testvifsComplete(self):
""" In case vifs are released and then created for the same
domain, we need to wait the device shut down.
"""
start = time.time()
while True:
test = 0
diff = time.time() - start
for i in self.getDeviceController('vif').deviceIDs():
test = 1
log.info("Dev %s still active, looping...", i)
time.sleep(0.1)
if test == 0:
break
if diff >= MIGRATE_TIMEOUT:
log.info("Dev still active but hit max loop timeout")
break
def _storeVmDetails(self):
to_store = {}
for key in XendConfig.LEGACY_XENSTORE_VM_PARAMS:
info_key = XendConfig.LEGACY_CFG_TO_XENAPI_CFG.get(key, key)
if self._infoIsSet(info_key):
to_store[key] = str(self.info[info_key])
if self._infoIsSet("static_memory_min"):
to_store["memory"] = str(self.info["static_memory_min"])
if self._infoIsSet("static_memory_max"):
to_store["maxmem"] = str(self.info["static_memory_max"])
image_sxpr = self.info.image_sxpr()
if image_sxpr:
to_store['image'] = sxp.to_string(image_sxpr)
if not self._readVm('xend/restart_count'):
to_store['xend/restart_count'] = str(0)
log.debug("Storing VM details: %s", scrub_password(to_store))
self._writeVm(to_store)
self._setVmPermissions()
def _setVmPermissions(self):
"""Allow the guest domain to read its UUID. We don't allow it to
access any other entry, for security."""
xstransact.SetPermissions('%s/uuid' % self.vmpath,
{ 'dom' : self.domid,
'read' : True,
'write' : False })
#
# Utility functions
#
def __getattr__(self, name):
if name == "state":
log.warn("Somebody tried to read XendDomainInfo.state... should us _stateGet()!!!")
log.warn("".join(traceback.format_stack()))
return self._stateGet()
else:
raise AttributeError(name)
def __setattr__(self, name, value):
if name == "state":
log.warn("Somebody tried to set XendDomainInfo.state... should us _stateGet()!!!")
log.warn("".join(traceback.format_stack()))
self._stateSet(value)
else:
self.__dict__[name] = value
def _stateSet(self, state):
self.state_updated.acquire()
try:
# TODO Not sure this is correct...
# _stateGet is live now. Why not fire event
# even when it hasn't changed?
if self._stateGet() != state:
self.state_updated.notifyAll()
import XendAPI
XendAPI.event_dispatch('mod', 'VM', self.info['uuid'],
'power_state')
finally:
self.state_updated.release()
def _stateGet(self):
# Lets try and reconsitute the state from xc
# first lets try and get the domain info
# from xc - this will tell us if the domain
# exists
info = dom_get(self.getDomid())
if info is None or info['shutdown']:
# We are either HALTED or SUSPENDED
# check saved image exists
from xen.xend import XendDomain
managed_config_path = \
XendDomain.instance()._managed_check_point_path( \
self.get_uuid())
if os.path.exists(managed_config_path):
return XEN_API_VM_POWER_STATE_SUSPENDED
else:
return XEN_API_VM_POWER_STATE_HALTED
elif info['crashed']:
# Crashed
return XEN_API_VM_POWER_STATE_CRASHED
else:
# We are either RUNNING or PAUSED
if info['paused']:
return XEN_API_VM_POWER_STATE_PAUSED
else:
return XEN_API_VM_POWER_STATE_RUNNING
def _infoIsSet(self, name):
return name in self.info and self.info[name] is not None
def _checkName(self, name):
"""Check if a vm name is valid. Valid names contain alphabetic
characters, digits, or characters in '_-.:+'.
The same name cannot be used for more than one vm at the same time.
@param name: name
@raise: VmError if invalid
"""
from xen.xend import XendDomain
if name is None or name == '':
raise VmError('Missing VM Name')
if not re.search(r'^[A-Za-z0-9_\-\.\:\+]+$', name):
raise VmError('Invalid VM Name')
dom = XendDomain.instance().domain_lookup_nr(name)
if dom and dom.info['uuid'] != self.info['uuid']:
raise VmError("VM name '%s' already exists%s" %
(name,
dom.domid is not None and
(" as domain %s" % str(dom.domid)) or ""))
def update(self, info = None, refresh = True, transaction = None):
"""Update with info from xc.domain_getinfo().
"""
log.trace("XendDomainInfo.update(%s) on domain %s", info,
str(self.domid))
if not info:
info = dom_get(self.domid)
if not info:
return
if info["maxmem_kb"] < 0:
info["maxmem_kb"] = XendNode.instance() \
.physinfo_dict()['total_memory'] * 1024
# make sure state is reset for info
# TODO: we should eventually get rid of old_dom_states
self.info.update_config(info)
self._update_consoles(transaction)
if refresh:
self.refreshShutdown(info)
log.trace("XendDomainInfo.update done on domain %s: %s",
str(self.domid), self.info)
def sxpr(self, ignore_store = False, legacy_only = True):
result = self.info.to_sxp(domain = self,
ignore_devices = ignore_store,
legacy_only = legacy_only)
return result
# Xen API
# ----------------------------------------------------------------
def get_uuid(self):
dom_uuid = self.info.get('uuid')
if not dom_uuid: # if it doesn't exist, make one up
dom_uuid = uuid.createString()
self.info['uuid'] = dom_uuid
return dom_uuid
def get_memory_static_max(self):
return self.info.get('memory_static_max', 0)
def get_memory_static_min(self):
return self.info.get('memory_static_min', 0)
def get_memory_dynamic_max(self):
return self.info.get('memory_dynamic_max', 0)
def get_memory_dynamic_min(self):
return self.info.get('memory_dynamic_min', 0)
# only update memory-related config values if they maintain sanity
def _safe_set_memory(self, key, newval):
oldval = self.info.get(key, 0)
try:
self.info[key] = newval
self.info._memory_sanity_check()
except Exception, ex:
self.info[key] = oldval
raise
def set_memory_static_max(self, val):
self._safe_set_memory('memory_static_max', val)
def set_memory_static_min(self, val):
self._safe_set_memory('memory_static_min', val)
def set_memory_dynamic_max(self, val):
self._safe_set_memory('memory_dynamic_max', val)
def set_memory_dynamic_min(self, val):
self._safe_set_memory('memory_dynamic_min', val)
def get_vcpus_params(self):
if self.getDomid() is None:
return self.info['vcpus_params']
retval = xc.sched_credit_domain_get(self.getDomid())
return retval
def get_cpu_pool(self):
if self.getDomid() is None:
return None
xeninfo = dom_get(self.domid)
return xeninfo['cpupool']
def get_power_state(self):
return XEN_API_VM_POWER_STATE[self._stateGet()]
def get_platform(self):
return self.info.get('platform', {})
def get_pci_bus(self):
return self.info.get('pci_bus', '')
def get_tools_version(self):
return self.info.get('tools_version', {})
def get_metrics(self):
return self.metrics.get_uuid();
def get_security_label(self, xspol=None):
import xen.util.xsm.xsm as security
label = security.get_security_label(self, xspol)
return label
def set_security_label(self, seclab, old_seclab, xspol=None,
xspol_old=None):
"""
Set the security label of a domain from its old to
a new value.
@param seclab New security label formatted in the form
<policy type>:<policy name>:<vm label>
@param old_seclab The current security label that the
VM must have.
@param xspol An optional policy under which this
update should be done. If not given,
then the current active policy is used.
@param xspol_old The old policy; only to be passed during
the updating of a policy
@return Returns return code, a string with errors from
the hypervisor's operation, old label of the
domain
"""
rc = 0
errors = ""
old_label = ""
new_ssidref = 0
domid = self.getDomid()
res_labels = None
is_policy_update = (xspol_old != None)
from xen.xend.XendXSPolicyAdmin import XSPolicyAdminInstance
state = self._stateGet()
# Relabel only HALTED or RUNNING or PAUSED domains
if domid != 0 and \
state not in \
[ DOM_STATE_HALTED, DOM_STATE_RUNNING, DOM_STATE_PAUSED, \
DOM_STATE_SUSPENDED ]:
log.warn("Relabeling domain not possible in state '%s'" %
DOM_STATES[state])
return (-xsconstants.XSERR_VM_WRONG_STATE, "", "", 0)
# Remove security label. Works only for halted or suspended domains
if not seclab or seclab == "":
if state not in [ DOM_STATE_HALTED, DOM_STATE_SUSPENDED ]:
return (-xsconstants.XSERR_VM_WRONG_STATE, "", "", 0)
if self.info.has_key('security_label'):
old_label = self.info['security_label']
# Check label against expected one.
if old_label != old_seclab:
return (-xsconstants.XSERR_BAD_LABEL, "", "", 0)
del self.info['security_label']
xen.xend.XendDomain.instance().managed_config_save(self)
return (xsconstants.XSERR_SUCCESS, "", "", 0)
tmp = seclab.split(":")
if len(tmp) != 3:
return (-xsconstants.XSERR_BAD_LABEL_FORMAT, "", "", 0)
typ, policy, label = tmp
poladmin = XSPolicyAdminInstance()
if not xspol:
xspol = poladmin.get_policy_by_name(policy)
try:
xen.xend.XendDomain.instance().policy_lock.acquire_writer()
if state in [ DOM_STATE_RUNNING, DOM_STATE_PAUSED ]:
#if domain is running or paused try to relabel in hypervisor
if not xspol:
return (-xsconstants.XSERR_POLICY_NOT_LOADED, "", "", 0)
if typ != xspol.get_type_name() or \
policy != xspol.get_name():
return (-xsconstants.XSERR_BAD_LABEL, "", "", 0)
if typ == xsconstants.ACM_POLICY_ID:
new_ssidref = xspol.vmlabel_to_ssidref(label)
if new_ssidref == xsconstants.INVALID_SSIDREF:
return (-xsconstants.XSERR_BAD_LABEL, "", "", 0)
# Check that all used resources are accessible under the
# new label
if not is_policy_update and \
not security.resources_compatible_with_vmlabel(xspol,
self, label):
return (-xsconstants.XSERR_BAD_LABEL, "", "", 0)
#Check label against expected one. Can only do this
# if the policy hasn't changed underneath in the meantime
if xspol_old == None:
old_label = self.get_security_label()
if old_label != old_seclab:
log.info("old_label != old_seclab: %s != %s" %
(old_label, old_seclab))
return (-xsconstants.XSERR_BAD_LABEL, "", "", 0)
# relabel domain in the hypervisor
rc, errors = security.relabel_domains([[domid, new_ssidref]])
log.info("rc from relabeling in HV: %d" % rc)
else:
return (-xsconstants.XSERR_POLICY_TYPE_UNSUPPORTED, "", "", 0)
if rc == 0:
# HALTED, RUNNING or PAUSED
if domid == 0:
if xspol:
self.info['security_label'] = seclab
ssidref = poladmin.set_domain0_bootlabel(xspol, label)
else:
return (-xsconstants.XSERR_POLICY_NOT_LOADED, "", "", 0)
else:
if self.info.has_key('security_label'):
old_label = self.info['security_label']
# Check label against expected one, unless wildcard
if old_label != old_seclab:
return (-xsconstants.XSERR_BAD_LABEL, "", "", 0)
self.info['security_label'] = seclab
try:
xen.xend.XendDomain.instance().managed_config_save(self)
except:
pass
return (rc, errors, old_label, new_ssidref)
finally:
xen.xend.XendDomain.instance().policy_lock.release()
def get_on_shutdown(self):
after_shutdown = self.info.get('actions_after_shutdown')
if not after_shutdown or after_shutdown not in XEN_API_ON_NORMAL_EXIT:
return XEN_API_ON_NORMAL_EXIT[-1]
return after_shutdown
def get_on_reboot(self):
after_reboot = self.info.get('actions_after_reboot')
if not after_reboot or after_reboot not in XEN_API_ON_NORMAL_EXIT:
return XEN_API_ON_NORMAL_EXIT[-1]
return after_reboot
def get_on_suspend(self):
# TODO: not supported
after_suspend = self.info.get('actions_after_suspend')
if not after_suspend or after_suspend not in XEN_API_ON_NORMAL_EXIT:
return XEN_API_ON_NORMAL_EXIT[-1]
return after_suspend
def get_on_crash(self):
after_crash = self.info.get('actions_after_crash')
if not after_crash or after_crash not in \
XEN_API_ON_CRASH_BEHAVIOUR + restart_modes:
return XEN_API_ON_CRASH_BEHAVIOUR[0]
return XEN_API_ON_CRASH_BEHAVIOUR_FILTER[after_crash]
def get_dev_config_by_uuid(self, dev_class, dev_uuid):
""" Get's a device configuration either from XendConfig or
from the DevController.
@param dev_class: device class, either, 'vbd' or 'vif'
@param dev_uuid: device UUID
@rtype: dictionary
"""
dev_type, dev_config = self.info['devices'].get(dev_uuid, (None, None))
# shortcut if the domain isn't started because
# the devcontrollers will have no better information
# than XendConfig.
if self._stateGet() in (XEN_API_VM_POWER_STATE_HALTED,
XEN_API_VM_POWER_STATE_SUSPENDED):
if dev_config:
return copy.deepcopy(dev_config)
return None
# instead of using dev_class, we use the dev_type
# that is from XendConfig.
controller = self.getDeviceController(dev_type)
if not controller:
return None
all_configs = controller.getAllDeviceConfigurations()
if not all_configs:
return None
updated_dev_config = copy.deepcopy(dev_config)
for _devid, _devcfg in all_configs.items():
if _devcfg.get('uuid') == dev_uuid:
updated_dev_config.update(_devcfg)
updated_dev_config['id'] = _devid
return updated_dev_config
return updated_dev_config
def get_dev_xenapi_config(self, dev_class, dev_uuid):
config = self.get_dev_config_by_uuid(dev_class, dev_uuid)
if not config:
return {}
config['VM'] = self.get_uuid()
if dev_class == 'vif':
if not config.has_key('name'):
config['name'] = config.get('vifname', '')
if not config.has_key('MAC'):
config['MAC'] = config.get('mac', '')
if not config.has_key('type'):
config['type'] = 'paravirtualised'
if not config.has_key('device'):
devid = config.get('id')
if devid != None:
config['device'] = 'eth%s' % devid
else:
config['device'] = ''
if not config.has_key('network'):
try:
bridge = config.get('bridge', None)
if bridge is None:
from xen.util import Brctl
if_to_br = dict([(i,b)
for (b,ifs) in Brctl.get_state().items()
for i in ifs])
vifname = "vif%s.%s" % (self.getDomid(),
config.get('id'))
bridge = if_to_br.get(vifname, None)
config['network'] = \
XendNode.instance().bridge_to_network(
config.get('bridge')).get_uuid()
except Exception:
log.exception('bridge_to_network')
# Ignore this for now -- it may happen if the device
# has been specified using the legacy methods, but at
# some point we're going to have to figure out how to
# handle that properly.
config['MTU'] = 1500 # TODO
if self._stateGet() not in (XEN_API_VM_POWER_STATE_HALTED,):
xennode = XendNode.instance()
rx_bps, tx_bps = xennode.get_vif_util(self.domid, devid)
config['io_read_kbs'] = rx_bps/1024
config['io_write_kbs'] = tx_bps/1024
rx, tx = xennode.get_vif_stat(self.domid, devid)
config['io_total_read_kbs'] = rx/1024
config['io_total_write_kbs'] = tx/1024
else:
config['io_read_kbs'] = 0.0
config['io_write_kbs'] = 0.0
config['io_total_read_kbs'] = 0.0
config['io_total_write_kbs'] = 0.0
config['security_label'] = config.get('security_label', '')
if dev_class == 'vbd':
if self._stateGet() not in (XEN_API_VM_POWER_STATE_HALTED,):
controller = self.getDeviceController(dev_class)
devid, _1, _2 = controller.getDeviceDetails(config)
xennode = XendNode.instance()
rd_blkps, wr_blkps = xennode.get_vbd_util(self.domid, devid)
config['io_read_kbs'] = rd_blkps
config['io_write_kbs'] = wr_blkps
else:
config['io_read_kbs'] = 0.0
config['io_write_kbs'] = 0.0
config['VDI'] = config.get('VDI', '')
config['device'] = config.get('dev', '')
if config['device'].startswith('ioemu:'):
_, vbd_device = config['device'].split(':', 1)
config['device'] = vbd_device
if ':' in config['device']:
vbd_name, vbd_type = config['device'].split(':', 1)
config['device'] = vbd_name
if vbd_type == 'cdrom':
config['type'] = XEN_API_VBD_TYPE[0]
else:
config['type'] = XEN_API_VBD_TYPE[1]
config['driver'] = 'paravirtualised' # TODO
config['image'] = config.get('uname', '')
if config.get('mode', 'r') == 'r':
config['mode'] = 'RO'
else:
config['mode'] = 'RW'
return config
def get_dev_property(self, dev_class, dev_uuid, field):
config = self.get_dev_xenapi_config(dev_class, dev_uuid)
try:
return config[field]
except KeyError:
raise XendError('Invalid property for device: %s' % field)
def set_dev_property(self, dev_class, dev_uuid, field, value):
self.info['devices'][dev_uuid][1][field] = value
def get_vcpus_util(self):
vcpu_util = {}
xennode = XendNode.instance()
if 'VCPUs_max' in self.info and self.domid != None:
for i in range(0, self.info['VCPUs_max']):
util = xennode.get_vcpu_util(self.domid, i)
vcpu_util[str(i)] = util
return vcpu_util
def get_consoles(self):
return self.info.get('console_refs', [])
def get_vifs(self):
return self.info.get('vif_refs', [])
def get_vbds(self):
return self.info.get('vbd_refs', [])
def get_dpcis(self):
return XendDPCI.get_by_VM(self.info.get('uuid'))
def get_dscsis(self):
return XendDSCSI.get_by_VM(self.info.get('uuid'))
def get_dscsi_HBAs(self):
return XendDSCSI_HBA.get_by_VM(self.info.get('uuid'))
def create_vbd(self, xenapi_vbd, vdi_image_path):
"""Create a VBD using a VDI from XendStorageRepository.
@param xenapi_vbd: vbd struct from the Xen API
@param vdi_image_path: VDI UUID
@rtype: string
@return: uuid of the device
"""
xenapi_vbd['image'] = vdi_image_path
if vdi_image_path.startswith('tap'):
dev_uuid = self.info.device_add('tap2', cfg_xenapi = xenapi_vbd)
else:
dev_uuid = self.info.device_add('vbd', cfg_xenapi = xenapi_vbd)
if not dev_uuid:
raise XendError('Failed to create device')
if self._stateGet() in (XEN_API_VM_POWER_STATE_RUNNING,
XEN_API_VM_POWER_STATE_PAUSED):
_, config = self.info['devices'][dev_uuid]
if vdi_image_path.startswith('tap'):
dev_control = self.getDeviceController('tap2')
else:
dev_control = self.getDeviceController('vbd')
try:
devid = dev_control.createDevice(config)
dev_type = self.getBlockDeviceClass(devid)
self._waitForDevice(dev_type, devid)
self.info.device_update(dev_uuid,
cfg_xenapi = {'devid': devid})
except Exception, exn:
log.exception(exn)
del self.info['devices'][dev_uuid]
self.info['vbd_refs'].remove(dev_uuid)
raise
return dev_uuid
def create_phantom_vbd_with_vdi(self, xenapi_vbd, vdi_image_path):
"""Create a VBD using a VDI from XendStorageRepository.
@param xenapi_vbd: vbd struct from the Xen API
@param vdi_image_path: VDI UUID
@rtype: string
@return: uuid of the device
"""
xenapi_vbd['image'] = vdi_image_path
dev_uuid = self.info.phantom_device_add('tap', cfg_xenapi = xenapi_vbd)
if not dev_uuid:
raise XendError('Failed to create device')
if self._stateGet() == XEN_API_VM_POWER_STATE_RUNNING:
_, config = self.info['devices'][dev_uuid]
config['devid'] = self.getDeviceController('tap').createDevice(config)
return config['devid']
def create_vif(self, xenapi_vif):
"""Create VIF device from the passed struct in Xen API format.
@param xenapi_vif: Xen API VIF Struct.
@rtype: string
@return: UUID
"""
dev_uuid = self.info.device_add('vif', cfg_xenapi = xenapi_vif)
if not dev_uuid:
raise XendError('Failed to create device')
if self._stateGet() in (XEN_API_VM_POWER_STATE_RUNNING,
XEN_API_VM_POWER_STATE_PAUSED):
_, config = self.info['devices'][dev_uuid]
dev_control = self.getDeviceController('vif')
try:
devid = dev_control.createDevice(config)
dev_control.waitForDevice(devid)
self.info.device_update(dev_uuid,
cfg_xenapi = {'devid': devid})
except Exception, exn:
log.exception(exn)
del self.info['devices'][dev_uuid]
self.info['vif_refs'].remove(dev_uuid)
raise
return dev_uuid
def create_console(self, xenapi_console):
""" Create a console device from a Xen API struct.
@return: uuid of device
@rtype: string
"""
if self._stateGet() not in (DOM_STATE_HALTED,):
raise VmError("Can only add console to a halted domain.")
dev_uuid = self.info.device_add('console', cfg_xenapi = xenapi_console)
if not dev_uuid:
raise XendError('Failed to create device')
return dev_uuid
def set_console_other_config(self, console_uuid, other_config):
self.info.console_update(console_uuid, 'other_config', other_config)
def create_dpci(self, xenapi_pci):
"""Create pci device from the passed struct in Xen API format.
@param xenapi_pci: DPCI struct from Xen API
@rtype: bool
#@rtype: string
@return: True if successfully created device
#@return: UUID
"""
dpci_uuid = uuid.createString()
dpci_opts = []
opts_dict = xenapi_pci.get('options')
for k in opts_dict.keys():
dpci_opts.append([k, opts_dict[k]])
opts_sxp = pci_opts_list_to_sxp(dpci_opts)
# Convert xenapi to sxp
ppci = XendAPIStore.get(xenapi_pci.get('PPCI'), 'PPCI')
dev_sxp = ['dev',
['domain', '0x%02x' % ppci.get_domain()],
['bus', '0x%02x' % ppci.get_bus()],
['slot', '0x%02x' % ppci.get_slot()],
['func', '0x%1x' % ppci.get_func()],
['vdevfn', '0x%02x' % xenapi_pci.get('hotplug_slot')],
['key', xenapi_pci['key']],
['uuid', dpci_uuid]]
dev_sxp = sxp.merge(dev_sxp, opts_sxp)
target_pci_sxp = ['pci', dev_sxp, ['state', 'Initialising'] ]
if self._stateGet() != XEN_API_VM_POWER_STATE_RUNNING:
old_pci_sxp = self._getDeviceInfo_pci(0)
if old_pci_sxp is None:
dev_uuid = self.info.device_add('pci', cfg_sxp = target_pci_sxp)
if not dev_uuid:
raise XendError('Failed to create device')
else:
new_pci_sxp = ['pci']
for existing_dev in sxp.children(old_pci_sxp, 'dev'):
new_pci_sxp.append(existing_dev)
new_pci_sxp.append(sxp.child0(target_pci_sxp, 'dev'))
dev_uuid = sxp.child_value(old_pci_sxp, 'uuid')
self.info.device_update(dev_uuid, new_pci_sxp)
xen.xend.XendDomain.instance().managed_config_save(self)
else:
try:
self.device_configure(target_pci_sxp)
except Exception, exn:
raise XendError('Failed to create device')
return dpci_uuid
def create_dscsi(self, xenapi_dscsi):
"""Create scsi device from the passed struct in Xen API format.
@param xenapi_dscsi: DSCSI struct from Xen API
@rtype: string
@return: UUID
"""
dscsi_uuid = uuid.createString()
# Convert xenapi to sxp
pscsi = XendAPIStore.get(xenapi_dscsi.get('PSCSI'), 'PSCSI')
devid = int(xenapi_dscsi.get('virtual_HCTL').split(':')[0])
target_vscsi_sxp = \
['vscsi',
['dev',
['devid', devid],
['p-devname', pscsi.get_dev_name()],
['p-dev', pscsi.get_physical_HCTL()],
['v-dev', xenapi_dscsi.get('virtual_HCTL')],
['state', xenbusState['Initialising']],
['uuid', dscsi_uuid]
],
['feature-host', 0]
]
if self._stateGet() != XEN_API_VM_POWER_STATE_RUNNING:
cur_vscsi_sxp = self._getDeviceInfo_vscsi(devid)
if cur_vscsi_sxp is None:
dev_uuid = self.info.device_add('vscsi', cfg_sxp = target_vscsi_sxp)
if not dev_uuid:
raise XendError('Failed to create device')
else:
new_vscsi_sxp = ['vscsi', ['feature-host', 0]]
for existing_dev in sxp.children(cur_vscsi_sxp, 'dev'):
new_vscsi_sxp.append(existing_dev)
new_vscsi_sxp.append(sxp.child0(target_vscsi_sxp, 'dev'))
dev_uuid = sxp.child_value(cur_vscsi_sxp, 'uuid')
self.info.device_update(dev_uuid, new_vscsi_sxp)
xen.xend.XendDomain.instance().managed_config_save(self)
else:
try:
self.device_configure(target_vscsi_sxp)
except Exception, exn:
log.exception('create_dscsi: %s', exn)
raise XendError('Failed to create device')
return dscsi_uuid
def create_dscsi_HBA(self, xenapi_dscsi):
"""Create scsi devices from the passed struct in Xen API format.
@param xenapi_dscsi: DSCSI_HBA struct from Xen API
@rtype: string
@return: UUID
"""
dscsi_HBA_uuid = uuid.createString()
# Convert xenapi to sxp
feature_host = xenapi_dscsi.get('assignment_mode', 'HOST') == 'HOST' and 1 or 0
target_vscsi_sxp = \
['vscsi',
['feature-host', feature_host],
['uuid', dscsi_HBA_uuid],
]
pscsi_HBA = XendAPIStore.get(xenapi_dscsi.get('PSCSI_HBA'), 'PSCSI_HBA')
devid = pscsi_HBA.get_physical_host()
for pscsi_uuid in pscsi_HBA.get_PSCSIs():
pscsi = XendAPIStore.get(pscsi_uuid, 'PSCSI')
pscsi_HCTL = pscsi.get_physical_HCTL()
dscsi_uuid = uuid.createString()
dev = \
['dev',
['devid', devid],
['p-devname', pscsi.get_dev_name()],
['p-dev', pscsi_HCTL],
['v-dev', pscsi_HCTL],
['state', xenbusState['Initialising']],
['uuid', dscsi_uuid]
]
target_vscsi_sxp.append(dev)
if self._stateGet() != XEN_API_VM_POWER_STATE_RUNNING:
if not self.info.device_add('vscsi', cfg_sxp = target_vscsi_sxp):
raise XendError('Failed to create device')
xen.xend.XendDomain.instance().managed_config_save(self)
else:
try:
self.device_configure(target_vscsi_sxp)
except Exception, exn:
log.exception('create_dscsi_HBA: %s', exn)
raise XendError('Failed to create device')
return dscsi_HBA_uuid
def change_vdi_of_vbd(self, xenapi_vbd, vdi_image_path):
"""Change current VDI with the new VDI.
@param xenapi_vbd: vbd struct from the Xen API
@param vdi_image_path: path of VDI
"""
dev_uuid = xenapi_vbd['uuid']
if dev_uuid not in self.info['devices']:
raise XendError('Device does not exist')
# Convert xenapi to sxp
if vdi_image_path.startswith('tap'):
dev_class = 'tap'
else:
dev_class = 'vbd'
dev_sxp = [
dev_class,
['uuid', dev_uuid],
['uname', vdi_image_path],
['dev', '%s:cdrom' % xenapi_vbd['device']],
['mode', 'r'],
['VDI', xenapi_vbd['VDI']]
]
if self._stateGet() in (XEN_API_VM_POWER_STATE_RUNNING,
XEN_API_VM_POWER_STATE_PAUSED):
self.device_configure(dev_sxp)
else:
self.info.device_update(dev_uuid, dev_sxp)
def destroy_device_by_uuid(self, dev_type, dev_uuid):
if dev_uuid not in self.info['devices']:
raise XendError('Device does not exist')
try:
if self._stateGet() in (XEN_API_VM_POWER_STATE_RUNNING,
XEN_API_VM_POWER_STATE_PAUSED):
_, config = self.info['devices'][dev_uuid]
devid = config.get('devid')
if devid != None:
self.getDeviceController(dev_type).destroyDevice(devid, force = False)
else:
raise XendError('Unable to get devid for device: %s:%s' %
(dev_type, dev_uuid))
finally:
del self.info['devices'][dev_uuid]
self.info['%s_refs' % dev_type].remove(dev_uuid)
def destroy_vbd(self, dev_uuid):
self.destroy_device_by_uuid('vbd', dev_uuid)
def destroy_vif(self, dev_uuid):
self.destroy_device_by_uuid('vif', dev_uuid)
def destroy_dpci(self, dev_uuid):
dpci = XendAPIStore.get(dev_uuid, 'DPCI')
ppci = XendAPIStore.get(dpci.get_PPCI(), 'PPCI')
old_pci_sxp = self._getDeviceInfo_pci(0)
dev_uuid = sxp.child_value(old_pci_sxp, 'uuid')
target_dev = None
new_pci_sxp = ['pci']
for dev in sxp.children(old_pci_sxp, 'dev'):
pci_dev = {}
pci_dev['domain'] = sxp.child_value(dev, 'domain')
pci_dev['bus'] = sxp.child_value(dev, 'bus')
pci_dev['slot'] = sxp.child_value(dev, 'slot')
pci_dev['func'] = sxp.child_value(dev, 'func')
if ppci.get_name() == pci_dict_to_bdf_str(pci_dev):
target_dev = dev
else:
new_pci_sxp.append(dev)
if target_dev is None:
raise XendError('Failed to destroy device')
target_pci_sxp = ['pci', target_dev, ['state', 'Closing']]
if self._stateGet() != XEN_API_VM_POWER_STATE_RUNNING:
self.info.device_update(dev_uuid, new_pci_sxp)
if len(sxp.children(new_pci_sxp, 'dev')) == 0:
del self.info['devices'][dev_uuid]
xen.xend.XendDomain.instance().managed_config_save(self)
else:
try:
self.device_configure(target_pci_sxp)
except Exception, exn:
raise XendError('Failed to destroy device')
def destroy_dscsi(self, dev_uuid):
dscsi = XendAPIStore.get(dev_uuid, 'DSCSI')
devid = dscsi.get_virtual_host()
vHCTL = dscsi.get_virtual_HCTL()
cur_vscsi_sxp = self._getDeviceInfo_vscsi(devid)
dev_uuid = sxp.child_value(cur_vscsi_sxp, 'uuid')
target_dev = None
new_vscsi_sxp = ['vscsi', ['feature-host', 0]]
for dev in sxp.children(cur_vscsi_sxp, 'dev'):
if vHCTL == sxp.child_value(dev, 'v-dev'):
target_dev = dev
else:
new_vscsi_sxp.append(dev)
if target_dev is None:
raise XendError('Failed to destroy device')
target_dev.append(['state', xenbusState['Closing']])
target_vscsi_sxp = ['vscsi', target_dev, ['feature-host', 0]]
if self._stateGet() != XEN_API_VM_POWER_STATE_RUNNING:
self.info.device_update(dev_uuid, new_vscsi_sxp)
if len(sxp.children(new_vscsi_sxp, 'dev')) == 0:
del self.info['devices'][dev_uuid]
xen.xend.XendDomain.instance().managed_config_save(self)
else:
try:
self.device_configure(target_vscsi_sxp)
except Exception, exn:
log.exception('destroy_dscsi: %s', exn)
raise XendError('Failed to destroy device')
def destroy_dscsi_HBA(self, dev_uuid):
dscsi_HBA = XendAPIStore.get(dev_uuid, 'DSCSI_HBA')
devid = dscsi_HBA.get_virtual_host()
cur_vscsi_sxp = self._getDeviceInfo_vscsi(devid)
feature_host = sxp.child_value(cur_vscsi_sxp, 'feature-host')
if self._stateGet() != XEN_API_VM_POWER_STATE_RUNNING:
new_vscsi_sxp = ['vscsi', ['feature-host', feature_host]]
self.info.device_update(dev_uuid, new_vscsi_sxp)
del self.info['devices'][dev_uuid]
xen.xend.XendDomain.instance().managed_config_save(self)
else:
# If feature_host is 1, all devices are destroyed by just
# one reconfiguration.
# If feature_host is 0, we should reconfigure all devices
# one-by-one to destroy all devices.
# See reconfigureDevice@VSCSIController.
for dev in sxp.children(cur_vscsi_sxp, 'dev'):
target_vscsi_sxp = [
'vscsi',
dev + [['state', xenbusState['Closing']]],
['feature-host', feature_host]
]
try:
self.device_configure(target_vscsi_sxp)
except Exception, exn:
log.exception('destroy_dscsi_HBA: %s', exn)
raise XendError('Failed to destroy device')
if feature_host:
break
def destroy_xapi_instances(self):
"""Destroy Xen-API instances stored in XendAPIStore.
"""
# Xen-API classes based on XendBase have their instances stored
# in XendAPIStore. Cleanup these instances here, if they are supposed
# to be destroyed when the parent domain is dead.
#
# Most of the virtual devices (vif, vbd, vfb, etc) are not based on
# XendBase and there's no need to remove them from XendAPIStore.
from xen.xend import XendDomain
if XendDomain.instance().is_valid_vm(self.info.get('uuid')):
# domain still exists.
return
# Destroy the VMMetrics instance.
if XendAPIStore.get(self.metrics.get_uuid(), self.metrics.getClass()) \
is not None:
self.metrics.destroy()
# Destroy DPCI instances.
for dpci_uuid in XendDPCI.get_by_VM(self.info.get('uuid')):
XendAPIStore.deregister(dpci_uuid, "DPCI")
# Destroy DSCSI instances.
for dscsi_uuid in XendDSCSI.get_by_VM(self.info.get('uuid')):
XendAPIStore.deregister(dscsi_uuid, "DSCSI")
# Destroy DSCSI_HBA instances.
for dscsi_HBA_uuid in XendDSCSI_HBA.get_by_VM(self.info.get('uuid')):
XendAPIStore.deregister(dscsi_HBA_uuid, "DSCSI_HBA")
def has_device(self, dev_class, dev_uuid):
return (dev_uuid in self.info['%s_refs' % dev_class.lower()])
def __str__(self):
return '<domain id=%s name=%s memory=%s state=%s>' % \
(str(self.domid), self.info['name_label'],
str(self.info['memory_dynamic_max']), DOM_STATES[self._stateGet()])
__repr__ = __str__
|
sileht/gnocchi | refs/heads/master | tools/duration_perf_analyse.py | 2 | #!/usr/bin/env python
#
# Copyright (c) 2014 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Tools to analyse the result of multiple call of duration_perf_test.py:
#
# $ clients=10
# $ parallel --progress -j $clients python duration_perf_test.py \
# --result myresults/client{} ::: $(seq 0 $clients)
# $ python duration_perf_analyse.py myresults
# * get_measures:
# Time
# count 1000.000000
# mean 0.032090
# std 0.028287
# ...
#
import argparse
import os
import pandas
def main():
parser = argparse.ArgumentParser()
parser.add_argument('result',
help=('Path of the results of perf_tool.py.'),
default='result')
data = {
'get_measures': [],
'write_measures': [],
'write_metric': [],
}
args = parser.parse_args()
for root, dirs, files in os.walk(args.result):
for name in files:
for method in data:
if name.endswith('_%s.csv' % method):
datum = data[method]
filepath = os.path.join(root, name)
datum.append(pandas.read_csv(filepath))
cname = name.replace('_%s.csv' % method, '')
datum[-1].rename(columns={'Duration': cname}, inplace=True)
for method in data:
merged = pandas.DataFrame(columns=['Index', 'Duration'])
append = pandas.DataFrame(columns=['Duration'])
for datum in data[method]:
datum.dropna(axis=1, inplace=True)
datum.drop('Count', axis=1, inplace=True)
merged = merged.merge(datum, on='Index')
cname = datum.columns.values[1]
datum.rename(columns={cname: 'Duration'}, inplace=True)
append = append.append(datum.drop('Index', axis=1))
merged.to_csv(os.path.join(args.result, '%s_merged.csv' % method),
index=False)
print("* %s:" % method)
print(append.describe())
print("")
if __name__ == '__main__':
main()
|
pombredanne/pants | refs/heads/master | tests/python/pants_test/backend/graph_info/tasks/test_list_owners.py | 15 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from textwrap import dedent
from pants.backend.graph_info.tasks.list_owners import ListOwners
from pants.backend.python.targets.python_library import PythonLibrary
from pants.base.exceptions import TaskError
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants_test.tasks.task_test_base import ConsoleTaskTestBase
class ListOwnersTest(ConsoleTaskTestBase):
@classmethod
def task_type(cls):
return ListOwners
@property
def alias_groups(self):
return BuildFileAliases(targets={'python_library': PythonLibrary})
def setUp(self):
super(ListOwnersTest, self).setUp()
def add_to_build_file(path, name, *sources):
all_sources = ["'{}'".format(source) for source in list(sources)]
self.add_to_build_file(path, dedent("""
python_library(name='{name}',
sources=[{all_sources}]
)
""".format(name=name, all_sources=','.join(all_sources))))
add_to_build_file('a', 'b', 'b.txt')
add_to_build_file('a/c', 'd', 'd.txt')
add_to_build_file('a/c', 'd2', 'd.txt')
add_to_build_file('a/c', 'e', 'e.txt', 'f.txt', 'g.txt')
add_to_build_file('a', 'c', 'c/c.txt')
add_to_build_file('a', 'h', 'c/h.txt')
add_to_build_file('a/c', 'h', 'h.txt')
def test_no_targets(self):
self.assert_console_output(passthru_args=['a/a.txt'])
def test_no_targets_output_format_json(self):
self.assert_console_output(dedent("""
{
"a/a.txt": []
}""").lstrip('\n'),
passthru_args=['a/a.txt'],
options={'output_format': 'json'}
)
def test_one_target(self):
self.assert_console_output('a:b', passthru_args=['a/b.txt'])
def test_one_target_output_format_json(self):
self.assert_console_output(dedent("""
{
"a/b.txt": [
"a:b"
]
}""").lstrip('\n'),
passthru_args=['a/b.txt'],
options={'output_format': 'json'}
)
def test_multiple_targets(self):
self.assert_console_output('a/c:d', 'a/c:d2', passthru_args=['a/c/d.txt'])
def test_multiple_targets_output_format_json(self):
self.assert_console_output(dedent("""
{
"a/c/d.txt": [
"a/c:d",
"a/c:d2"
]
}""").lstrip('\n'),
passthru_args=['a/c/d.txt'],
options={'output_format': 'json'}
)
def test_target_in_parent_directory(self):
self.assert_console_output('a:c', passthru_args=['a/c/c.txt'])
def test_multiple_targets_one_in_parent_directory(self):
self.assert_console_output('a:h', 'a/c:h', passthru_args=['a/c/h.txt'])
def test_target_with_multiple_sources(self):
self.assert_console_output('a/c:e', passthru_args=['a/c/e.txt'])
def test_no_sources(self):
self.assert_console_raises(TaskError, passthru_args=[])
def test_too_many_sources_output_format_text(self):
self.assert_console_raises(TaskError, passthru_args=['a/a.txt', 'a/b.txt'])
def test_multiple_sources_output_format_json(self):
self.assert_console_output(dedent("""
{
"a/b.txt": [
"a:b"
],
"a/a.txt": []
}""").lstrip('\n'),
passthru_args=['a/a.txt', 'a/b.txt'],
options={'output_format': 'json'}
)
|
F5Networks/f5-common-python | refs/heads/development | f5/bigip/tm/security/scrubber.py | 1 | # coding=utf-8
#
# Copyright 2015-2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""BIG-IP® Advanced Firewall Manager™ (AFM®) module.
REST URI
``http://localhost/mgmt/tm/security/scrubber``
GUI Path
``Security --> Option --> Network Firewall --> External Redirection
--> Scrubbing Profile``
REST Kind
``tm:security:scrubbercollectionstate:*``
"""
from f5.bigip.resource import Collection
from f5.bigip.resource import OrganizingCollection
from f5.bigip.resource import Resource
class Scrubber(OrganizingCollection):
"""BIG-IP® AFM® Scrubber organizing collection."""
def __init__(self, security):
super(Scrubber, self).__init__(security)
self._meta_data['allowed_lazy_attributes'] = [
Profile_s]
class Profile_s(Collection):
"""BIG-IP® AFM® Scrubber Profile collection"""
def __init__(self, scrubber):
super(Profile_s, self).__init__(scrubber)
self._meta_data['allowed_lazy_attributes'] = [Profile]
self._meta_data['attribute_registry'] = \
{'tm:security:scrubber:profile:profilestate':
Profile}
class Profile(Resource):
"""BIG-IP® AFM® Scrubber Profile resource"""
def __init__(self, profile_s):
super(Profile, self).__init__(profile_s)
self._meta_data['required_json_kind'] = \
'tm:security:scrubber:profile:profilestate'
self._meta_data['required_load_parameters'].update(('partition', 'name'))
self._meta_data['attribute_registry'] = \
{'tm:security:scrubber:profile:scrubber-rt-domain:scrubber_rt_domaincollectionstate': Scrubber_Rt_Domain_s,
'tm:security:scrubber:profile:scrubber-categories:scrubber-categoriescollectionstate': Scrubber_Categories_s,
'tm:security:scrubber:profile:scrubber-virtual-server:scrubber-virtual-servercollectionstate': Scrubber_Virtual_Server_s,
'tm:security:scrubber:profile:scrubber-netflow-protected-server:scrubber-netflow-protected-servercollectionstate':
Scrubber_Netflow_Protected_Server_s}
self._meta_data['allowed_lazy_attributes'] = [
Scrubber_Rt_Domain_s,
Scrubber_Virtual_Server_s,
Scrubber_Categories_s,
Scrubber_Netflow_Protected_Server_s]
class Scrubber_Rt_Domain_s(Collection):
"""BIG-IP® AFM® Scrubber Profile Route Domain collection"""
def __init__(self, profile):
super(Scrubber_Rt_Domain_s, self).__init__(profile)
self._meta_data['allowed_lazy_attributes'] = [Scrubber_Rt_Domain]
self._meta_data['attribute_registry'] = \
{'tm:security:scrubber:profile:scrubber-rt-domain:scrubber-rt-domainstate':
Scrubber_Rt_Domain}
class Scrubber_Rt_Domain(Resource):
"""BIG-IP® AFM® Scrubber Profile Route Domain resource"""
def __init__(self, scrubber_rt_domain_s):
super(Scrubber_Rt_Domain, self).__init__(scrubber_rt_domain_s)
self._meta_data['allowed_lazy_attributes'] = [Scrubber_Rd_Network_Prefix_s]
self._meta_data['required_json_kind'] = \
'tm:security:scrubber:profile:scrubber-rt-domain:scrubber-rt-domainstate'
self._meta_data['attribute_registry'] = \
{'tm:security:scrubber:profile:scrubber-rt-domain:scrubber-rd-network-prefix:scrubber-rd-network-prefixcollectionstate':
Scrubber_Rd_Network_Prefix_s}
self._meta_data['required_creation_parameters'].update(('name', 'routeDomain'))
class Scrubber_Rd_Network_Prefix_s(Collection):
"""BIG-IP® AFM® Scrubber Rd Network Prefix collection"""
def __init__(self, scrubber_rt_domain):
super(Scrubber_Rd_Network_Prefix_s, self).__init__(scrubber_rt_domain)
self._meta_data['allowed_lazy_attributes'] = [Scrubber_Rd_Network_Prefix]
self._meta_data['attribute_registry'] = \
{'tm:security:scrubber:profile:scrubber-rt-domain:scrubber-rd-network-prefix:scrubber-rd-network-prefixstate':
Scrubber_Rd_Network_Prefix}
class Scrubber_Rd_Network_Prefix(Resource):
"""BIG-IP® AFM® Scrubber Rd Network Prefix resource"""
def __init__(self, scrubber_rd_network_prefix_s):
super(Scrubber_Rd_Network_Prefix, self).__init__(scrubber_rd_network_prefix_s)
self._meta_data['required_json_kind'] = \
'tm:security:scrubber:profile:scrubber-rt-domain:scrubber-rd-network-prefix:scrubber-rd-network-prefixstate'
self._meta_data['required_creation_parameters'].update(('name', 'nextHop', 'dstIp', 'mask'))
class Scrubber_Virtual_Server_s(Collection):
"""BIG-IP® AFM® Scrubber Profile Virtual Server collection"""
def __init__(self, profile):
super(Scrubber_Virtual_Server_s, self).__init__(profile)
self._meta_data['allowed_lazy_attributes'] = [Scrubber_Virtual_Server]
self._meta_data['attribute_registry'] = \
{'tm:security:scrubber:profile:scrubber-virtual-server:scrubber-virtual-serverstate':
Scrubber_Virtual_Server}
class Scrubber_Virtual_Server(Resource):
"""BIG-IP® AFM® Scrubber Profile Virtual Server resource"""
def __init__(self, scrubber_virtual_server_s):
super(Scrubber_Virtual_Server, self).__init__(scrubber_virtual_server_s)
self._meta_data['required_json_kind'] = \
'tm:security:scrubber:profile:scrubber-virtual-server:scrubber-virtual-serverstate'
self._meta_data['required_creation_parameters'].update(('name', 'vsName'))
class Scrubber_Categories_s(Collection):
"""BIG-IP® AFM® Scrubber Profile Categories collection"""
def __init__(self, profile):
super(Scrubber_Categories_s, self).__init__(profile)
self._meta_data['allowed_lazy_attributes'] = [Scrubber_Categories]
self._meta_data['attribute_registry'] = \
{'tm:security:scrubber:profile:scrubber-categories:scrubber-categoriesstate':
Scrubber_Categories}
class Scrubber_Categories(Resource):
"""BIG-IP® AFM® Scrubber Profile Categories resource"""
def __init__(self, scrubber_categories_s):
super(Scrubber_Categories, self).__init__(scrubber_categories_s)
self._meta_data['required_json_kind'] = \
'tm:security:scrubber:profile:scrubber-categories:scrubber-categoriesstate'
self._meta_data['required_creation_parameters'].update(('name', 'blacklistCategory', 'routeDomainName'))
class Scrubber_Netflow_Protected_Server_s(Collection):
"""BIG-IP® AFM® Scrubber Profile Netflow Protected Server collection"""
def __init__(self, profile):
super(Scrubber_Netflow_Protected_Server_s, self).__init__(profile)
self._meta_data['allowed_lazy_attributes'] = [Scrubber_Netflow_Protected_Server]
self._meta_data['attribute_registry'] = \
{'tm:security:scrubber:profile:scrubber-netflow-protected-server:scrubber-netflow-protected-serverstate':
Scrubber_Netflow_Protected_Server}
class Scrubber_Netflow_Protected_Server(Resource):
"""BIG-IP® AFM® Scrubber Profile Netflow Protected Server resource"""
def __init__(self, scrubber_netflow_protected_server_s):
super(Scrubber_Netflow_Protected_Server, self).__init__(scrubber_netflow_protected_server_s)
self._meta_data['required_json_kind'] = \
'tm:security:scrubber:profile:scrubber-netflow-protected-server:scrubber-netflow-protected-serverstate'
self._meta_data['required_creation_parameters'].update(('name', 'npsName'))
|
ZHAW-INES/rioxo-uClinux-dist | refs/heads/rtsp | user/python/python-2.4.4/Mac/Tools/macfreeze/macgen_bin.py | 4 | """macgen_bin - Generate application from shared libraries"""
import os
import sys
import string
import types
import macfs
from MACFS import *
import MacOS
from Carbon import Res
import py_resource
import cfmfile
import buildtools
def generate(input, output, module_dict=None, architecture='fat', debug=0):
# try to remove old file
try:
os.remove(output)
except:
pass
if module_dict is None:
import macmodulefinder
print "Searching for modules..."
module_dict, missing = macmodulefinder.process(input, [], [], 1)
if missing:
import EasyDialogs
missing.sort()
answer = EasyDialogs.AskYesNoCancel("Some modules could not be found; continue anyway?\n(%s)"
% string.join(missing, ", "))
if answer <> 1:
sys.exit(0)
applettemplatepath = buildtools.findtemplate()
corepath = findpythoncore()
dynamicmodules, dynamicfiles, extraresfiles = findfragments(module_dict, architecture)
print 'Adding "__main__"'
buildtools.process(applettemplatepath, input, output, 0)
outputref = Res.FSpOpenResFile(output, 3)
try:
Res.UseResFile(outputref)
print "Adding Python modules"
addpythonmodules(module_dict)
print "Adding PythonCore resources"
copyres(corepath, outputref, ['cfrg', 'Popt', 'GU\267I'], 1)
print "Adding resources from shared libraries"
for ppcpath, cfm68kpath in extraresfiles:
if os.path.exists(ppcpath):
copyres(ppcpath, outputref, ['cfrg'], 1)
elif os.path.exists(cfm68kpath):
copyres(cfm68kpath, outputref, ['cfrg'], 1)
print "Fixing sys.path prefs"
Res.UseResFile(outputref)
try:
res = Res.Get1Resource('STR#', 228) # from PythonCore
except Res.Error: pass
else:
res.RemoveResource()
# setting pref file name to empty string
res = Res.Get1NamedResource('STR ', "PythonPreferenceFileName")
res.data = Pstring("")
res.ChangedResource()
syspathpref = "$(APPLICATION)"
res = Res.Resource("\000\001" + Pstring(syspathpref))
res.AddResource("STR#", 229, "sys.path preference")
print "Creating 'PYD ' resources"
for modname, (ppcfrag, cfm68kfrag) in dynamicmodules.items():
res = Res.Resource(Pstring(ppcfrag) + Pstring(cfm68kfrag))
id = 0
while id < 128:
id = Res.Unique1ID('PYD ')
res.AddResource('PYD ', id, modname)
finally:
Res.CloseResFile(outputref)
print "Merging code fragments"
cfmfile.mergecfmfiles([applettemplatepath, corepath] + dynamicfiles.keys(),
output, architecture)
print "done!"
def findfragments(module_dict, architecture):
dynamicmodules = {}
dynamicfiles = {}
extraresfiles = []
for name, module in module_dict.items():
if module.gettype() <> 'dynamic':
continue
path = resolvealiasfile(module.__file__)
dir, filename = os.path.split(path)
## ppcfile, cfm68kfile = makefilenames(filename)
ppcfile = filename
cfm68kfile = "dummy.cfm68k.slb"
# ppc stuff
ppcpath = os.path.join(dir, ppcfile)
if architecture <> 'm68k':
ppcfrag, dynamicfiles = getfragname(ppcpath, dynamicfiles)
else:
ppcfrag = "_no_fragment_"
# 68k stuff
cfm68kpath = os.path.join(dir, cfm68kfile)
if architecture <> 'pwpc':
cfm68kfrag, dynamicfiles = getfragname(cfm68kpath, dynamicfiles)
else:
cfm68kfrag = "_no_fragment_"
dynamicmodules[name] = ppcfrag, cfm68kfrag
if (ppcpath, cfm68kpath) not in extraresfiles:
extraresfiles.append((ppcpath, cfm68kpath))
return dynamicmodules, dynamicfiles, extraresfiles
def getfragname(path, dynamicfiles):
if not dynamicfiles.has_key(path):
if os.path.exists(path):
lib = cfmfile.CfrgResource(path)
fragname = lib.fragments[0].name
else:
print "shared lib not found:", path
fragname = "_no_fragment_"
dynamicfiles[path] = fragname
else:
fragname = dynamicfiles[path]
return fragname, dynamicfiles
def addpythonmodules(module_dict):
# XXX should really use macgen_rsrc.generate(), this does the same, but skips __main__
items = module_dict.items()
items.sort()
for name, module in items:
mtype = module.gettype()
if mtype not in ['module', 'package'] or name == "__main__":
continue
location = module.__file__
if location[-4:] == '.pyc':
# Attempt corresponding .py
location = location[:-1]
if location[-3:] != '.py':
print '*** skipping', location
continue
print 'Adding module "%s"' % name
id, name = py_resource.frompyfile(location, name, preload=0,
ispackage=mtype=='package')
def Pstring(str):
if len(str) > 255:
raise TypeError, "Str255 must be at most 255 chars long"
return chr(len(str)) + str
##def makefilenames(name):
## lname = string.lower(name)
## pos = string.find(lname, ".ppc.")
## if pos > 0:
## return name, name[:pos] + '.CFM68K.' + name[pos+5:]
## pos = string.find(lname, ".cfm68k.")
## if pos > 0:
## return name[:pos] + '.ppc.' + name[pos+8:], name
## raise ValueError, "can't make ppc/cfm68k filenames"
def copyres(input, output, *args, **kwargs):
openedin = openedout = 0
if type(input) == types.StringType:
input = Res.FSpOpenResFile(input, 1)
openedin = 1
if type(output) == types.StringType:
output = Res.FSpOpenResFile(output, 3)
openedout = 1
try:
apply(buildtools.copyres, (input, output) + args, kwargs)
finally:
if openedin:
Res.CloseResFile(input)
if openedout:
Res.CloseResFile(output)
def findpythoncore():
"""find the PythonCore shared library, possibly asking the user if we can't find it"""
try:
vRefNum, dirID = macfs.FindFolder(kOnSystemDisk, kSharedLibrariesFolderType, 0)
except macfs.error:
extpath = ":"
else:
extpath = macfs.FSSpec((vRefNum, dirID, "")).as_pathname()
version = string.split(sys.version)[0]
if MacOS.runtimemodel == 'carbon':
corename = "PythonCoreCarbon " + version
elif MacOS.runtimemodel == 'ppc':
corename = "PythonCore " + version
else:
raise "Unknown MacOS.runtimemodel", MacOS.runtimemodel
corepath = os.path.join(extpath, corename)
if not os.path.exists(corepath):
corepath = EasyDialogs.AskFileForOpen(message="Please locate PythonCore:",
typeList=("shlb",))
if not corepath:
raise KeyboardInterrupt, "cancelled"
return resolvealiasfile(corepath)
def resolvealiasfile(path):
try:
fss, dummy1, dummy2 = macfs.ResolveAliasFile(path)
except macfs.error:
pass
else:
path = fss.as_pathname()
return path
|
sidnarayanan/BAdNet | refs/heads/master | train/smh/zh_sr.py | 1 | #!/usr/bin/env python
from keras.models import Model, load_model
from keras.callbacks import ModelCheckpoint
#from subtlenet.backend.keras_objects import *
#from subtlenet.backend.losses import *
from keras.layers import Dense, BatchNormalization, Input
from keras.utils import np_utils
from keras.optimizers import Adam
import keras.backend as K
from tensorflow.python.framework import graph_util, graph_io
import os, sys
import numpy as np
from collections import namedtuple
import subtlenet.utils as utils
utils.set_processor('cpu')
VALSPLIT = 1 #0.7
MULTICLASS = False
REGRESSION = True
def _make_parent(path):
os.system('mkdir -p %s'%('/'.join(path.split('/')[:-1])))
class Sample(object):
def __init__(self, name, base, max_Y):
self.name = name
self.X = np.load('%s/%s_%s.npy'%(base, name, 'x'))
if REGRESSION:
self.Y = np.load('%s/%s_%s.npy'%(base, name, 'y'))
else:
if MULTICLASS:
self.Y = np_utils.to_categorical(
np.load('%s/%s_%s.npy'%(base, name, 'y')),
max_Y
)
else:
self.Y = np_utils.to_categorical(
(np.load('%s/%s_%s.npy'%(base, name, 'y')) > 0).astype(np.int),
2
)
self.W = np.load('%s/%s_%s.npy'%(base, name, 'w'))
self.idx = np.random.permutation(self.Y.shape[0])
@property
def tidx(self):
if VALSPLIT == 1 or VALSPLIT == 0:
return self.idx
else:
return self.idx[int(VALSPLIT*len(self.idx)):]
@property
def vidx(self):
if VALSPLIT == 1 or VALSPLIT == 0:
return self.idx
else:
return self.idx[:int(VALSPLIT*len(self.idx))]
def infer(self, model):
self.Yhat = model.predict(self.X)
def standardize(self, mu, std):
self.X = (self.X - mu) / std
class ClassModel(object):
def __init__(self, n_inputs, n_hidden, n_targets):
self._hidden = 0
self.n_inputs = n_inputs
self.n_targets = n_targets if MULTICLASS else 2
self.n_hidden = n_hidden
self.inputs = Input(shape=(n_inputs,), name='input')
h = self.inputs
h = BatchNormalization(momentum=0.6)(h)
for _ in xrange(n_hidden-1):
h = Dense(n_inputs, activation='relu')(h)
h = BatchNormalization()(h)
h = Dense(n_inputs, activation='tanh')(h)
h = BatchNormalization()(h)
if REGRESSION:
self.outputs = Dense(1, activation='linear', name='output')(h)
self.model = Model(inputs=self.inputs, outputs=self.outputs)
self.model.compile(optimizer=Adam(),
loss='mse')
else:
self.outputs = Dense(self.n_targets, activation='softmax', name='output')(h)
self.model = Model(inputs=self.inputs, outputs=self.outputs)
self.model.compile(optimizer=Adam(),
loss='categorical_crossentropy')
self.model.summary()
def train(self, samples):
tX = np.vstack([s.X[s.tidx] for s in samples])
tW = np.concatenate([s.W[s.tidx] for s in samples])
vX = np.vstack([s.X[s.vidx] for s in samples])
vW = np.concatenate([s.W[s.vidx] for s in samples])
if REGRESSION:
tY = np.concatenate([s.Y[s.tidx] for s in samples])
vY = np.concatenate([s.Y[s.vidx] for s in samples])
else:
tY = np.vstack([s.Y[s.tidx] for s in samples])
vY = np.vstack([s.Y[s.vidx] for s in samples])
if not REGRESSION:
for i in xrange(tY.shape[1]):
tot = np.sum(tW[tY[:,i] == 1])
tW[tY[:,i] == 1] *= 100/tot
vW[vY[:,i] == 1] *= 100/tot
history = self.model.fit(tX, tY, sample_weight=tW,
batch_size=1024, epochs=20, shuffle=True,
validation_data=(vX, vY, vW))
with open('history.log','w') as flog:
history = history.history
flog.write(','.join(history.keys())+'\n')
for l in zip(*history.values()):
flog.write(','.join([str(x) for x in l])+'\n')
def save_as_keras(self, path):
_make_parent(path)
self.model.save(path)
print 'Saved to',path
def save_as_tf(self,path):
_make_parent(path)
sess = K.get_session()
print [l.op.name for l in self.model.inputs],'->',[l.op.name for l in self.model.outputs]
graph = graph_util.convert_variables_to_constants(sess,
sess.graph.as_graph_def(),
[n.op.name for n in self.model.outputs])
p0 = '/'.join(path.split('/')[:-1])
p1 = path.split('/')[-1]
graph_io.write_graph(graph, p0, p1, as_text=False)
print 'Saved to',path
def predict(self, *args, **kwargs):
return self.model.predict(*args, **kwargs)
def load_model(self, path):
self.model = load_model(path)
def plot(binning, fn, samples, outpath, xlabel=None, ylabel=None):
hists = {}
for s in samples:
h = utils.NH1(binning)
if type(fn) == int:
h.fill_array(s.X[s.vidx,fn], weights=s.W[s.vidx])
else:
h.fill_array(fn(s), weights=s.W[s.vidx])
h.scale()
hists[s.name] = h
p = utils.Plotter()
for i,s in enumerate(samples):
p.add_hist(hists[s.name], s.name, i)
_make_parent(outpath)
p.plot(xlabel=xlabel, ylabel=ylabel,
output = outpath)
p.plot(xlabel=xlabel, ylabel=ylabel,
output = outpath + '_logy',
logy=True)
def get_mu_std(samples):
X = np.array(np.vstack([s.X for s in samples]), np.float64)
mu = np.mean(X, axis=0)
std = np.std(X, axis=0)
return mu, std
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('--train', action='store_true')
parser.add_argument('--plot', action='store_true')
parser.add_argument('--version', type=int, default=0)
parser.add_argument('--hidden', type=int, default=4)
args = parser.parse_args()
basedir = '/data/t3home000/snarayan/smh_dnn/v13/'
figsdir = '/home/snarayan/public_html/figs/smh/v10/evt_%i/'%(args.version)
modeldir = 'models/evt/v%i/'%(args.version)
samples = ['VH', 'Diboson', 'SingleTop', 'TT', 'WJets', 'ZtoNuNu']
samples = [Sample(s, basedir, len(samples)) for s in samples]
n_inputs = samples[0].X.shape[1]
n_hidden = 2
print 'Standardizing...'
mu, std = get_mu_std(samples)
[s.standardize(mu, std) for s in samples]
model = ClassModel(n_inputs, n_hidden, len(samples))
if args.train:
print 'Training...'
model.train(samples)
model.save_as_keras(modeldir+'/weights.h5')
model.save_as_tf(modeldir+'/graph.pb')
else:
print 'Loading...'
model.load_model(modeldir+'weights.h5')
if args.plot:
print 'Inferring...'
for s in samples:
s.infer(model)
samples.reverse()
if REGRESSION:
plot(np.linspace(60, 160, 20),
lambda s : s.Yhat[s.vidx][:,0],
samples, figsdir+'mass_regressed', xlabel='Regressed mass')
plot(np.linspace(60, 160, 20),
lambda s : s.Y[s.vidx],
samples, figsdir+'mass_truth', xlabel='True mass')
else:
for i in xrange(len(samples) if MULTICLASS else 2):
plot(np.linspace(0, 1, 20),
lambda s, i=i : s.Yhat[s.vidx,i],
samples, figsdir+'class_%i'%i, xlabel='Class %i'%i)
#
# for i in xrange(n_inputs):
# plot(np.linspace(-2, 2, 20),
# lambda s, i=i : s.X[s.vidx,i],
# samples, figsdir+'feature_%i'%i, xlabel='Feature %i'%i)
#
|
meskio/leap_pycommon | refs/heads/develop | src/leap/common/testing/https_server.py | 6 | # -*- coding: utf-8 -*-
# leap.common.testing.https_server.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
A simple HTTPS server to be used in tests
"""
from BaseHTTPServer import HTTPServer
import os
import ssl
import SocketServer
import threading
import unittest
_where = os.path.split(__file__)[0]
def where(filename):
return os.path.join(_where, filename)
class HTTPSServer(HTTPServer):
def server_bind(self):
SocketServer.TCPServer.server_bind(self)
self.socket = ssl.wrap_socket(
self.socket, server_side=True,
certfile=where("leaptestscert.pem"),
keyfile=where("leaptestskey.pem"),
ca_certs=where("cacert.pem"),
ssl_version=ssl.PROTOCOL_SSLv23)
class TestServerThread(threading.Thread):
def __init__(self, test_object, request_handler):
threading.Thread.__init__(self)
self.request_handler = request_handler
self.test_object = test_object
def run(self):
self.server = HTTPSServer(('localhost', 0), self.request_handler)
host, port = self.server.socket.getsockname()
self.test_object.HOST, self.test_object.PORT = host, port
self.test_object.server_started.set()
self.test_object = None
try:
self.server.serve_forever(0.05)
finally:
self.server.server_close()
def stop(self):
self.server.shutdown()
class BaseHTTPSServerTestCase(unittest.TestCase):
"""
derived classes need to implement a request_handler
"""
def setUp(self):
self.server_started = threading.Event()
self.thread = TestServerThread(self, self.request_handler)
self.thread.start()
self.server_started.wait()
def tearDown(self):
self.thread.stop()
def get_server(self):
host, port = self.HOST, self.PORT
if host == "127.0.0.1":
host = "localhost"
return "%s:%s" % (host, port)
if __name__ == "__main__":
unittest.main()
|
jiangyy/oiutils | refs/heads/master | oi/__init__.py | 2 | """
Oiutils
"""
|
Eficent/odoomrp-wip | refs/heads/8.0 | mrp_bom_catch_product_code/models/__init__.py | 29 | # -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from . import mrp_bom
|
alextricity25/parse_apache_configs | refs/heads/master | parse_apache_configs/test/test_parse_config.py | 1 | from os import listdir
from os.path import isfile, join
import unittest
from parse_apache_configs import parse_config
import pprint
class testParseConfig(unittest.TestCase):
#print "ENTERING TEST_PARSE_CONFIG" + "-"*8
def test_parse_config(self):
test_files = [ f for f in listdir("./test_conf_files") if isfile(join("./test_conf_files", f)) ]
for file_name in test_files:
pac = parse_config.ParseApacheConfig("./test_conf_files/" + file_name)
conf_list = pac.parse_config()
def test_parse_config_string_file(self):
test_files = [ f for f in listdir("./test_conf_files") if isfile(join("./test_conf_files", f)) ]
for file_name in test_files:
full_file_path = "./test_conf_files/" + file_name
with open(full_file_path, 'r') as fp:
file_as_string = fp.read()
pac = parse_config.ParseApacheConfig(apache_file_as_string=file_as_string)
conf_list = pac.parse_config()
#print conf_list
#pp(conf_list)
#TODO make sure we get back the right netstedList
|
rosmo/ansible | refs/heads/devel | lib/ansible/modules/monitoring/bigpanda.py | 92 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bigpanda
author: "Hagai Kariti (@hkariti)"
short_description: Notify BigPanda about deployments
version_added: "1.8"
description:
- Notify BigPanda when deployments start and end (successfully or not). Returns a deployment object containing all the parameters for future module calls.
options:
component:
description:
- "The name of the component being deployed. Ex: billing"
required: true
aliases: ['name']
version:
description:
- The deployment version.
required: true
token:
description:
- API token.
required: true
state:
description:
- State of the deployment.
required: true
choices: ['started', 'finished', 'failed']
hosts:
description:
- Name of affected host name. Can be a list.
required: false
default: machine's hostname
aliases: ['host']
env:
description:
- The environment name, typically 'production', 'staging', etc.
required: false
owner:
description:
- The person responsible for the deployment.
required: false
description:
description:
- Free text description of the deployment.
required: false
url:
description:
- Base URL of the API server.
required: False
default: https://api.bigpanda.io
validate_certs:
description:
- If C(no), SSL certificates for the target url will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
type: bool
# informational: requirements for nodes
requirements: [ ]
'''
EXAMPLES = '''
- bigpanda:
component: myapp
version: '1.3'
token: '{{ bigpanda_token }}'
state: started
- bigpanda:
component: myapp
version: '1.3'
token: '{{ bigpanda_token }}'
state: finished
# If outside servers aren't reachable from your machine, use delegate_to and override hosts:
- bigpanda:
component: myapp
version: '1.3'
token: '{{ bigpanda_token }}'
hosts: '{{ ansible_hostname }}'
state: started
delegate_to: localhost
register: deployment
- bigpanda:
component: '{{ deployment.component }}'
version: '{{ deployment.version }}'
token: '{{ deployment.token }}'
state: finished
delegate_to: localhost
'''
# ===========================================
# Module execution.
#
import json
import socket
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.urls import fetch_url
def main():
module = AnsibleModule(
argument_spec=dict(
component=dict(required=True, aliases=['name']),
version=dict(required=True),
token=dict(required=True, no_log=True),
state=dict(required=True, choices=['started', 'finished', 'failed']),
hosts=dict(required=False, default=[socket.gethostname()], aliases=['host']),
env=dict(required=False),
owner=dict(required=False),
description=dict(required=False),
message=dict(required=False),
source_system=dict(required=False, default='ansible'),
validate_certs=dict(default='yes', type='bool'),
url=dict(required=False, default='https://api.bigpanda.io'),
),
supports_check_mode=True,
)
token = module.params['token']
state = module.params['state']
url = module.params['url']
# Build the common request body
body = dict()
for k in ('component', 'version', 'hosts'):
v = module.params[k]
if v is not None:
body[k] = v
if not isinstance(body['hosts'], list):
body['hosts'] = [body['hosts']]
# Insert state-specific attributes to body
if state == 'started':
for k in ('source_system', 'env', 'owner', 'description'):
v = module.params[k]
if v is not None:
body[k] = v
request_url = url + '/data/events/deployments/start'
else:
message = module.params['message']
if message is not None:
body['errorMessage'] = message
if state == 'finished':
body['status'] = 'success'
else:
body['status'] = 'failure'
request_url = url + '/data/events/deployments/end'
# Build the deployment object we return
deployment = dict(token=token, url=url)
deployment.update(body)
if 'errorMessage' in deployment:
message = deployment.pop('errorMessage')
deployment['message'] = message
# If we're in check mode, just exit pretending like we succeeded
if module.check_mode:
module.exit_json(changed=True, **deployment)
# Send the data to bigpanda
data = json.dumps(body)
headers = {'Authorization': 'Bearer %s' % token, 'Content-Type': 'application/json'}
try:
response, info = fetch_url(module, request_url, data=data, headers=headers)
if info['status'] == 200:
module.exit_json(changed=True, **deployment)
else:
module.fail_json(msg=json.dumps(info))
except Exception as e:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
|
dapuck/pyleus | refs/heads/develop | examples/top_urls/top_urls/top_global.py | 9 | from __future__ import absolute_import
import logging
from operator import itemgetter
from pyleus.storm import SimpleBolt
log = logging.getLogger('top_global_bolt')
class TopGlobalBolt(SimpleBolt):
OPTIONS = ["N"]
OUTPUT_FIELDS = ["top_N"]
def initialize(self):
self.top_N = []
self.N = self.options["N"]
def process_tick(self):
log.debug("-------------")
log.debug(self.top_N)
self.emit((self.top_N,))
self.top_N = []
def process_tuple(self, tup):
task_ranking, = tup.values
log.debug("Task {0}: {1}".format(tup.task, task_ranking))
# Update top N
self.top_N.extend(task_ranking)
self.top_N.sort(key=itemgetter(1), reverse=True)
self.top_N = self.top_N[:self.N]
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG,
filename='/tmp/top_urls_top_global.log',
filemode='a',
)
TopGlobalBolt().run()
|
ioannistsanaktsidis/invenio | refs/heads/prod | modules/weblinkback/lib/weblinkback_webinterface.py | 24 | # -*- coding: utf-8 -*-
## Comments and reviews for records.
## This file is part of Invenio.
## Copyright (C) 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""WebLinkback - Web Interface"""
from invenio.messages import gettext_set_language
from invenio.webinterface_handler import wash_urlargd, WebInterfaceDirectory
from invenio.webuser import getUid, collect_user_info, page_not_authorized
from invenio.weblinkback import check_user_can_view_linkbacks, \
perform_sendtrackback, \
perform_request_display_record_linbacks, \
perform_request_display_approved_latest_added_linkbacks_to_accessible_records, \
perform_sendtrackback_disabled
from invenio.weblinkback_dblayer import approve_linkback, \
reject_linkback
from invenio.weblinkback_config import CFG_WEBLINKBACK_LATEST_COUNT_DEFAULT, \
CFG_WEBLINKBACK_SUBSCRIPTION_DEFAULT_ARGUMENT_NAME
from invenio.urlutils import redirect_to_url, make_canonical_urlargd
from invenio.config import CFG_SITE_URL, \
CFG_SITE_SECURE_URL, \
CFG_SITE_LANG, \
CFG_SITE_RECORD, \
CFG_WEBLINKBACK_TRACKBACK_ENABLED
from invenio.search_engine import guess_primary_collection_of_a_record, \
create_navtrail_links, \
get_colID
from invenio.webpage import pageheaderonly, pagefooteronly
from invenio.websearchadminlib import get_detailed_page_tabs
from invenio.access_control_engine import acc_authorize_action
import invenio.template
webstyle_templates = invenio.template.load('webstyle')
websearch_templates = invenio.template.load('websearch')
weblinkback_templates = invenio.template.load('weblinkback')
class WebInterfaceRecordLinkbacksPages(WebInterfaceDirectory):
"""Define the set of record/number/linkbacks pages."""
_exports = ['', 'display', 'index', 'approve', 'reject', 'sendtrackback']
def __init__(self, recid = -1):
self.recid = recid
def index(self, req, form):
"""
Redirect to display function
"""
return self.display(req, form)
def display(self, req, form):
"""
Display the linkbacks of a record and admin approve/reject features
"""
argd = wash_urlargd(form, {})
_ = gettext_set_language(argd['ln'])
# Check authorization
uid = getUid(req)
user_info = collect_user_info(req)
(auth_code, auth_msg) = check_user_can_view_linkbacks(user_info, self.recid)
if auth_code and user_info['email'] == 'guest':
# Ask to login
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'ln': argd['ln'],
'referer': CFG_SITE_URL + user_info['uri']}, {})
return redirect_to_url(req, target)
elif auth_code:
return page_not_authorized(req,
referer="../",
uid=uid,
text=auth_msg,
ln=argd['ln'])
show_admin = False
(auth_code, auth_msg) = acc_authorize_action(req, 'moderatelinkbacks', collection = guess_primary_collection_of_a_record(self.recid))
if not auth_code:
show_admin = True
body = perform_request_display_record_linbacks(req, self.recid, show_admin, weblinkback_templates=weblinkback_templates, ln=argd['ln'])
title = websearch_templates.tmpl_record_page_header_content(req, self.recid, argd['ln'])[0]
# navigation, tabs, top and bottom part
navtrail = create_navtrail_links(cc=guess_primary_collection_of_a_record(self.recid), ln=argd['ln'])
if navtrail:
navtrail += ' > '
navtrail += '<a class="navtrail" href="%s/%s/%s?ln=%s">'% (CFG_SITE_URL, CFG_SITE_RECORD, self.recid, argd['ln'])
navtrail += title
navtrail += '</a>'
navtrail += ' > <a class="navtrail">Linkbacks</a>'
mathjaxheader, jqueryheader = weblinkback_templates.tmpl_get_mathjaxheader_jqueryheader()
unordered_tabs = get_detailed_page_tabs(get_colID(guess_primary_collection_of_a_record(self.recid)),
self.recid,
ln=argd['ln'])
ordered_tabs_id = [(tab_id, values['order']) for (tab_id, values) in unordered_tabs.iteritems()]
ordered_tabs_id.sort(lambda x, y: cmp(x[1], y[1]))
link_ln = ''
if argd['ln'] != CFG_SITE_LANG:
link_ln = '?ln=%s' % argd['ln']
tabs = [(unordered_tabs[tab_id]['label'], \
'%s/%s/%s/%s%s' % (CFG_SITE_URL, CFG_SITE_RECORD, self.recid, tab_id, link_ln), \
tab_id in ['linkbacks'],
unordered_tabs[tab_id]['enabled']) \
for (tab_id, values) in ordered_tabs_id
if unordered_tabs[tab_id]['visible'] == True]
top = webstyle_templates.detailed_record_container_top(self.recid,
tabs,
argd['ln'])
bottom = webstyle_templates.detailed_record_container_bottom(self.recid,
tabs,
argd['ln'])
return pageheaderonly(title=title,
navtrail=navtrail,
uid=uid,
verbose=1,
metaheaderadd = mathjaxheader + jqueryheader,
req=req,
language=argd['ln'],
navmenuid='search',
navtrail_append_title_p=0) + \
websearch_templates.tmpl_search_pagestart(argd['ln']) + \
top + body + bottom + \
websearch_templates.tmpl_search_pageend(argd['ln']) + \
pagefooteronly(language=argd['ln'], req=req)
# Return the same page whether we ask for /CFG_SITE_RECORD/123/linkbacks or /CFG_SITE_RECORD/123/linkbacks/
__call__ = index
def approve(self, req, form):
"""
Approve a linkback
"""
argd = wash_urlargd(form, {'linkbackid': (int, -1)})
authorization = self.check_authorization_moderatelinkbacks(req, argd)
if not authorization:
approve_linkback(argd['linkbackid'], collect_user_info(req))
return self.display(req, form)
else:
return authorization
def reject(self, req, form):
"""
Reject a linkback
"""
argd = wash_urlargd(form, {'linkbackid': (int, -1)})
authorization = self.check_authorization_moderatelinkbacks(req, argd)
if not authorization:
reject_linkback(argd['linkbackid'], collect_user_info(req))
return self.display(req, form)
else:
return authorization
def check_authorization_moderatelinkbacks(self, req, argd):
"""
Check if user has authorization moderate linkbacks
@return if yes: nothing, if guest: login redirect, otherwise page_not_authorized
"""
# Check authorization
uid = getUid(req)
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(req, 'moderatelinkbacks', collection = guess_primary_collection_of_a_record(self.recid))
if auth_code and user_info['email'] == 'guest':
# Ask to login
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'ln': argd['ln'],
'referer': CFG_SITE_URL + user_info['uri']}, {})
return redirect_to_url(req, target)
elif auth_code:
return page_not_authorized(req,
referer="../",
uid=uid,
text=auth_msg,
ln=argd['ln'])
def sendtrackback(self, req, form):
"""
Send a new trackback
"""
if CFG_WEBLINKBACK_TRACKBACK_ENABLED:
argd = wash_urlargd(form, {'url': (str, CFG_WEBLINKBACK_SUBSCRIPTION_DEFAULT_ARGUMENT_NAME),
'title': (str, CFG_WEBLINKBACK_SUBSCRIPTION_DEFAULT_ARGUMENT_NAME),
'excerpt': (str, CFG_WEBLINKBACK_SUBSCRIPTION_DEFAULT_ARGUMENT_NAME),
'blog_name': (str, CFG_WEBLINKBACK_SUBSCRIPTION_DEFAULT_ARGUMENT_NAME),
'id': (str, CFG_WEBLINKBACK_SUBSCRIPTION_DEFAULT_ARGUMENT_NAME),
'source': (str, CFG_WEBLINKBACK_SUBSCRIPTION_DEFAULT_ARGUMENT_NAME),
})
perform_sendtrackback(req, self.recid, argd['url'], argd['title'], argd['excerpt'], argd['blog_name'], argd['id'], argd['source'], argd['ln'])
else:
perform_sendtrackback_disabled(req)
class WebInterfaceRecentLinkbacksPages(WebInterfaceDirectory):
"""Define the set of global /linkbacks pages."""
_exports = ['', 'display', 'index']
def index(self, req, form):
"""
Redirect to display function
"""
return self.display(req, form)
def display(self, req, form):
"""
Display approved latest added linkbacks of the invenio instance
"""
argd = wash_urlargd(form, {'rg': (int, CFG_WEBLINKBACK_LATEST_COUNT_DEFAULT)})
# count must be positive
if argd['rg'] < 0:
argd['rg'] = -argd['rg']
_ = gettext_set_language(argd['ln'])
user_info = collect_user_info(req)
body = perform_request_display_approved_latest_added_linkbacks_to_accessible_records(argd['rg'], argd['ln'], user_info, weblinkback_templates=weblinkback_templates)
navtrail = 'Recent Linkbacks'
mathjaxheader, jqueryheader = weblinkback_templates.tmpl_get_mathjaxheader_jqueryheader()
return pageheaderonly(title=navtrail,
navtrail=navtrail,
verbose=1,
metaheaderadd = mathjaxheader + jqueryheader,
req=req,
language=argd['ln'],
navmenuid='search',
navtrail_append_title_p=0) + \
websearch_templates.tmpl_search_pagestart(argd['ln']) + \
body + \
websearch_templates.tmpl_search_pageend(argd['ln']) + \
pagefooteronly(language=argd['ln'], req=req)
# Return the same page whether we ask for /linkbacks or /linkbacks/
__call__ = index
|
jasonwzhy/django | refs/heads/master | tests/model_package/models/publication.py | 586 | from django.db import models
class Publication(models.Model):
title = models.CharField(max_length=30)
|
openhatch/new-mini-tasks | refs/heads/master | vendor/packages/Django/django/contrib/localflavor/jp/forms.py | 194 | """
JP-specific Form helpers
"""
from __future__ import absolute_import
from django.contrib.localflavor.jp.jp_prefectures import JP_PREFECTURES
from django.forms.fields import RegexField, Select
from django.utils.translation import ugettext_lazy as _
class JPPostalCodeField(RegexField):
"""
A form field that validates its input is a Japanese postcode.
Accepts 7 digits, with or without a hyphen.
"""
default_error_messages = {
'invalid': _('Enter a postal code in the format XXXXXXX or XXX-XXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(JPPostalCodeField, self).__init__(r'^\d{3}-\d{4}$|^\d{7}$',
max_length, min_length, *args, **kwargs)
def clean(self, value):
"""
Validates the input and returns a string that contains only numbers.
Returns an empty string for empty values.
"""
v = super(JPPostalCodeField, self).clean(value)
return v.replace('-', '')
class JPPrefectureSelect(Select):
"""
A Select widget that uses a list of Japanese prefectures as its choices.
"""
def __init__(self, attrs=None):
super(JPPrefectureSelect, self).__init__(attrs, choices=JP_PREFECTURES)
|
i3visio/usufy | refs/heads/master | usufy/bin/_classgenerator.py | 1 | # -*- coding: cp1252 -*-
#
##################################################################################
#
# This file is part of usufy.py.
#
# Usufy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##################################################################################
import argparse
"""
Fields to read from the CSV file:
- Name of the file
- Name of the platform (e. g.: Boonex)
- Tags (e.g.: "social", "contact")
- Usufy URL (e.g.: "http://twitter.com/" + self.NICK_WILDCARD)
- notFoundText (e.g.: "<title>Platform</title>", "Error 404")
The current version of this program
# This file was automatically generated using _classgenerator.py v0.1.0 as part of usufy.py.
"""
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Automatic generator of classes', prog='_classgenerator.py', epilog="The file must contain the following information:Name of the file;Name of the platform;Tags;Usufy URL;notFoundTextNote that the part after the ';' should be a python-like valid string.", add_help=False)
parser._optionals.title = "Input options (one required)"
# adding the option
parser.add_argument('-f', '--file', metavar='<path_to_input_file_name>', action='store', type=argparse.FileType('r'), help='path to the file where the list of Classes is stored (one per line).', required=True)
parser.add_argument('-t', '--template', metavar='<path_to_template_file_name>', default='./_template-v1.3.0.py', action='store', type=argparse.FileType('r'), help='path to the template file.', required=False)
groupAbout = parser.add_argument_group('About arguments', 'Showing additional information about this program.')
groupAbout.add_argument('-h', '--help', action='help', help='shows this help and exists.')
groupAbout.add_argument('--version', action='version', version='%(prog)s 0.1.0', help='shows the version of the program and exists.')
args = parser.parse_args()
with open("../utils/config_usufy.py", "r") as tempF:
config = tempF.read()
# Reading the _template-vX.X.X.py file
base = args.template.read()
# Reading the platforms to be processed
platforms = args.file.read().splitlines()
for linea in platforms:
# eliminating the comments
if linea[0] != "#":
# loading the blanck template
aux = base
# Processing
print "Processing:\t" + linea
info = linea.split(";")
# Creating the wrapper
print "\tStep 1:\tCreating: " + info[0].lower() + ".py..."
with open(info[0].lower() + ".py", "w") as oF:
aux = aux.replace("<HERE_GOES_THE_PLATFORM_NAME>", info[1])
aux = aux.replace("<HERE_GO_THE_TAGS>", info[2])
aux = aux.replace("<HERE_GOES_THE_URL>", info[3])
aux = aux.replace("<HERE_GO_THE_NOT_FOUND>", info[4])
oF.write(aux)
# Editing the config_usufy
print "\tStep 2:\tAdding the recently generated wrapper to a new config_usufy.py..."
importText = "from wrappers." + info[0] + " import " + info[1]
config = config.replace("# <ADD_HERE_THE_NEW_IMPORTS>", importText + "\n# <ADD_HERE_THE_NEW_IMPORTS>")
appendText = "listAll.append(" + info[1] +"())"
config = config.replace("# <ADD_HERE_THE_NEW_PLATFORMS>", appendText + "\n\t# <ADD_HERE_THE_NEW_PLATFORMS>")
print "Writing down the recently generated config_usufy.py..."
with open("config_usufy.py", "w") as oF:
oF.write(config)
|
sukima/TiCachedImages | refs/heads/master | plugins/ti.alloy/plugin.py | 1729 | import os, sys, subprocess, hashlib
import subprocess
def check_output(*popenargs, **kwargs):
r"""Run command with arguments and return its output as a byte string.
Backported from Python 2.7 as it's implemented as pure python on stdlib.
>>> check_output(['/usr/bin/python', '--version'])
Python 2.6.2
"""
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
error = subprocess.CalledProcessError(retcode, cmd)
error.output = output
raise error
return output
def compile(config):
paths = {}
binaries = ["alloy","node"]
dotAlloy = os.path.abspath(os.path.join(config['project_dir'], 'build', '.alloynewcli'))
if os.path.exists(dotAlloy):
print "[DEBUG] build/.alloynewcli file found, skipping plugin..."
os.remove(dotAlloy)
else:
for binary in binaries:
try:
# see if the environment variable is defined
paths[binary] = os.environ["ALLOY_" + ("NODE_" if binary == "node" else "") + "PATH"]
except KeyError as ex:
# next try PATH, and then our guess paths
if sys.platform == "darwin" or sys.platform.startswith('linux'):
userPath = os.environ["HOME"]
guessPaths = [
"/usr/local/bin/"+binary,
"/opt/local/bin/"+binary,
userPath+"/local/bin/"+binary,
"/opt/bin/"+binary,
"/usr/bin/"+binary,
"/usr/local/share/npm/bin/"+binary
]
try:
binaryPath = check_output(["which",binary], stderr=subprocess.STDOUT).strip()
print "[DEBUG] %s installed at '%s'" % (binary,binaryPath)
except:
print "[WARN] Couldn't find %s on your PATH:" % binary
print "[WARN] %s" % os.environ["PATH"]
print "[WARN]"
print "[WARN] Checking for %s in a few default locations:" % binary
for p in guessPaths:
sys.stdout.write("[WARN] %s -> " % p)
if os.path.exists(p):
binaryPath = p
print "FOUND"
break
else:
print "not found"
binaryPath = None
if binaryPath is None:
print "[ERROR] Couldn't find %s" % binary
sys.exit(1)
else:
paths[binary] = binaryPath
# no guesses on windows, just use the PATH
elif sys.platform == "win32":
paths["alloy"] = "alloy.cmd"
f = os.path.abspath(os.path.join(config['project_dir'], 'app'))
if os.path.exists(f):
print "[INFO] alloy app found at %s" % f
rd = os.path.abspath(os.path.join(config['project_dir'], 'Resources'))
devicefamily = 'none'
simtype = 'none'
version = '0'
deploytype = 'development'
if config['platform']==u'ios':
version = config['iphone_version']
devicefamily = config['devicefamily']
deploytype = config['deploytype']
if config['platform']==u'android':
builder = config['android_builder']
version = builder.tool_api_level
deploytype = config['deploy_type']
if config['platform']==u'mobileweb':
builder = config['mobileweb_builder']
deploytype = config['deploytype']
cfg = "platform=%s,version=%s,simtype=%s,devicefamily=%s,deploytype=%s," % (config['platform'],version,simtype,devicefamily,deploytype)
if sys.platform == "win32":
cmd = [paths["alloy"], "compile", f, "--no-colors", "--config", cfg]
else:
cmd = [paths["node"], paths["alloy"], "compile", f, "--no-colors", "--config", cfg]
print "[INFO] Executing Alloy compile:"
print "[INFO] %s" % " ".join(cmd)
try:
print check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as ex:
if hasattr(ex, 'output'):
print ex.output
print "[ERROR] Alloy compile failed"
retcode = 1
if hasattr(ex, 'returncode'):
retcode = ex.returncode
sys.exit(retcode)
except EnvironmentError as ex:
print "[ERROR] Unexpected error with Alloy compiler plugin: %s" % ex.strerror
sys.exit(2)
|
ychen820/microblog | refs/heads/master | y/google-cloud-sdk/platform/google_appengine/lib/django-1.3/django/contrib/databrowse/sites.py | 329 | from django import http
from django.db import models
from django.contrib.databrowse.datastructures import EasyModel
from django.shortcuts import render_to_response
from django.utils.safestring import mark_safe
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class DatabrowsePlugin(object):
def urls(self, plugin_name, easy_instance_field):
"""
Given an EasyInstanceField object, returns a list of URLs for this
plugin's views of this object. These URLs should be absolute.
Returns None if the EasyInstanceField object doesn't get a
list of plugin-specific URLs.
"""
return None
def model_index_html(self, request, model, site):
"""
Returns a snippet of HTML to include on the model index page.
"""
return ''
def model_view(self, request, model_databrowse, url):
"""
Handles main URL routing for a plugin's model-specific pages.
"""
raise NotImplementedError
class ModelDatabrowse(object):
plugins = {}
def __init__(self, model, site):
self.model = model
self.site = site
def root(self, request, url):
"""
Handles main URL routing for the databrowse app.
`url` is the remainder of the URL -- e.g. 'objects/3'.
"""
# Delegate to the appropriate method, based on the URL.
if url is None:
return self.main_view(request)
try:
plugin_name, rest_of_url = url.split('/', 1)
except ValueError: # need more than 1 value to unpack
plugin_name, rest_of_url = url, None
try:
plugin = self.plugins[plugin_name]
except KeyError:
raise http.Http404('A plugin with the requested name does not exist.')
return plugin.model_view(request, self, rest_of_url)
def main_view(self, request):
easy_model = EasyModel(self.site, self.model)
html_snippets = mark_safe(u'\n'.join([p.model_index_html(request, self.model, self.site) for p in self.plugins.values()]))
return render_to_response('databrowse/model_detail.html', {
'model': easy_model,
'root_url': self.site.root_url,
'plugin_html': html_snippets,
})
class DatabrowseSite(object):
def __init__(self):
self.registry = {} # model_class -> databrowse_class
self.root_url = None
def register(self, model_or_iterable, databrowse_class=None, **options):
"""
Registers the given model(s) with the given databrowse site.
The model(s) should be Model classes, not instances.
If a databrowse class isn't given, it will use DefaultModelDatabrowse
(the default databrowse options).
If a model is already registered, this will raise AlreadyRegistered.
"""
databrowse_class = databrowse_class or DefaultModelDatabrowse
if issubclass(model_or_iterable, models.Model):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model in self.registry:
raise AlreadyRegistered('The model %s is already registered' % model.__name__)
self.registry[model] = databrowse_class
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
if issubclass(model_or_iterable, models.Model):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self.registry:
raise NotRegistered('The model %s is not registered' % model.__name__)
del self.registry[model]
def root(self, request, url):
"""
Handles main URL routing for the databrowse app.
`url` is the remainder of the URL -- e.g. 'comments/comment/'.
"""
self.root_url = request.path[:len(request.path) - len(url)]
url = url.rstrip('/') # Trim trailing slash, if it exists.
if url == '':
return self.index(request)
elif '/' in url:
return self.model_page(request, *url.split('/', 2))
raise http.Http404('The requested databrowse page does not exist.')
def index(self, request):
m_list = [EasyModel(self, m) for m in self.registry.keys()]
return render_to_response('databrowse/homepage.html', {'model_list': m_list, 'root_url': self.root_url})
def model_page(self, request, app_label, model_name, rest_of_url=None):
"""
Handles the model-specific functionality of the databrowse site, delegating
to the appropriate ModelDatabrowse class.
"""
model = models.get_model(app_label, model_name)
if model is None:
raise http.Http404("App %r, model %r, not found." % (app_label, model_name))
try:
databrowse_class = self.registry[model]
except KeyError:
raise http.Http404("This model exists but has not been registered with databrowse.")
return databrowse_class(model, self).root(request, rest_of_url)
site = DatabrowseSite()
from django.contrib.databrowse.plugins.calendars import CalendarPlugin
from django.contrib.databrowse.plugins.objects import ObjectDetailPlugin
from django.contrib.databrowse.plugins.fieldchoices import FieldChoicePlugin
class DefaultModelDatabrowse(ModelDatabrowse):
plugins = {'objects': ObjectDetailPlugin(), 'calendars': CalendarPlugin(), 'fields': FieldChoicePlugin()}
|
ares/robottelo | refs/heads/master | tests/foreman/cli/test_puppetclass.py | 3 | # -*- encoding: utf-8 -*-
"""Test class for Puppet Classes CLI
:Requirement: Puppetclass
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: CLI
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
from robottelo.cli.environment import Environment
from robottelo.cli.factory import (
make_org,
make_smart_variable,
publish_puppet_module)
from robottelo.cli.puppet import Puppet
from robottelo.constants import CUSTOM_PUPPET_REPO
from robottelo.decorators import (
tier2,
run_only_on
)
from robottelo.test import CLITestCase
class PuppetClassTestCase(CLITestCase):
"""Implements puppet class tests in CLI."""
@classmethod
def setUpClass(cls):
"""Import a parametrized puppet class.
"""
super(PuppetClassTestCase, cls).setUpClass()
cls.puppet_modules = [
{'author': 'robottelo', 'name': 'generic_1'},
]
cls.org = make_org()
cv = publish_puppet_module(
cls.puppet_modules, CUSTOM_PUPPET_REPO, cls.org['id'])
cls.env = Environment.list({
'search': u'content_view="{0}"'.format(cv['name'])
})[0]
cls.puppet = Puppet.info({
'name': cls.puppet_modules[0]['name'],
'environment': cls.env['name'],
})
@run_only_on('sat')
@tier2
def test_positive_list_smart_class_parameters(self):
"""List smart class parameters associated with the puppet class.
:id: 56b370c2-8fc6-49be-9676-242178cc709a
:expectedresults: Smart class parameters listed for the class.
"""
class_sc_parameters = Puppet.sc_params({
u'puppet-class': self.puppet['name']})
self.assertGreater(len(class_sc_parameters), 0)
@run_only_on('sat')
@tier2
def test_positive_list_smart_variables(self):
"""List smart variables associated with the puppet class.
:id: cb2b41c0-29cc-4c0b-a7c8-38403d6dda5b
:expectedresults: Smart variables listed for the class.
"""
make_smart_variable({'puppet-class': self.puppet['name']})
class_smart_variables = Puppet.smart_variables({
u'puppet-class': self.puppet['name']})
self.assertGreater(len(class_smart_variables), 0)
|
robhudson/django | refs/heads/master | tests/template_tests/filter_tests/test_upper.py | 388 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.template.defaultfilters import upper
from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class UpperTests(SimpleTestCase):
"""
The "upper" filter messes up entities (which are case-sensitive),
so it's not safe for non-escaping purposes.
"""
@setup({'upper01': '{% autoescape off %}{{ a|upper }} {{ b|upper }}{% endautoescape %}'})
def test_upper01(self):
output = self.engine.render_to_string('upper01', {'a': 'a & b', 'b': mark_safe('a & b')})
self.assertEqual(output, 'A & B A & B')
@setup({'upper02': '{{ a|upper }} {{ b|upper }}'})
def test_upper02(self):
output = self.engine.render_to_string('upper02', {'a': 'a & b', 'b': mark_safe('a & b')})
self.assertEqual(output, 'A & B A &AMP; B')
class FunctionTests(SimpleTestCase):
def test_upper(self):
self.assertEqual(upper('Mixed case input'), 'MIXED CASE INPUT')
def test_unicode(self):
# lowercase e umlaut
self.assertEqual(upper('\xeb'), '\xcb')
def test_non_string_input(self):
self.assertEqual(upper(123), '123')
|
habanero-rice/hcpp | refs/heads/master | test/performance-regression/full-apps/qmcpack/nexus/library/auxiliary.py | 1942 | |
ybellavance/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/test/test_fileio.py | 49 | # Adapted from test_file.py by Daniel Stutzbach
import sys
import os
import errno
import unittest
from array import array
from weakref import proxy
from functools import wraps
from test.support import TESTFN, check_warnings, run_unittest, make_bad_fd
from _io import FileIO as _FileIO
class AutoFileTests(unittest.TestCase):
# file tests for which a test file is automatically set up
def setUp(self):
self.f = _FileIO(TESTFN, 'w')
def tearDown(self):
if self.f:
self.f.close()
os.remove(TESTFN)
def testWeakRefs(self):
# verify weak references
p = proxy(self.f)
p.write(bytes(range(10)))
self.assertEqual(self.f.tell(), p.tell())
self.f.close()
self.f = None
self.assertRaises(ReferenceError, getattr, p, 'tell')
def testSeekTell(self):
self.f.write(bytes(range(20)))
self.assertEqual(self.f.tell(), 20)
self.f.seek(0)
self.assertEqual(self.f.tell(), 0)
self.f.seek(10)
self.assertEqual(self.f.tell(), 10)
self.f.seek(5, 1)
self.assertEqual(self.f.tell(), 15)
self.f.seek(-5, 1)
self.assertEqual(self.f.tell(), 10)
self.f.seek(-5, 2)
self.assertEqual(self.f.tell(), 15)
def testAttributes(self):
# verify expected attributes exist
f = self.f
self.assertEqual(f.mode, "wb")
self.assertEqual(f.closed, False)
# verify the attributes are readonly
for attr in 'mode', 'closed':
self.assertRaises((AttributeError, TypeError),
setattr, f, attr, 'oops')
def testReadinto(self):
# verify readinto
self.f.write(bytes([1, 2]))
self.f.close()
a = array('b', b'x'*10)
self.f = _FileIO(TESTFN, 'r')
n = self.f.readinto(a)
self.assertEqual(array('b', [1, 2]), a[:n])
def test_none_args(self):
self.f.write(b"hi\nbye\nabc")
self.f.close()
self.f = _FileIO(TESTFN, 'r')
self.assertEqual(self.f.read(None), b"hi\nbye\nabc")
self.f.seek(0)
self.assertEqual(self.f.readline(None), b"hi\n")
self.assertEqual(self.f.readlines(None), [b"bye\n", b"abc"])
def test_reject(self):
self.assertRaises(TypeError, self.f.write, "Hello!")
def testRepr(self):
self.assertEqual(repr(self.f), "<_io.FileIO name=%r mode=%r>"
% (self.f.name, self.f.mode))
del self.f.name
self.assertEqual(repr(self.f), "<_io.FileIO fd=%r mode=%r>"
% (self.f.fileno(), self.f.mode))
self.f.close()
self.assertEqual(repr(self.f), "<_io.FileIO [closed]>")
def testErrors(self):
f = self.f
self.assertTrue(not f.isatty())
self.assertTrue(not f.closed)
#self.assertEqual(f.name, TESTFN)
self.assertRaises(ValueError, f.read, 10) # Open for reading
f.close()
self.assertTrue(f.closed)
f = _FileIO(TESTFN, 'r')
self.assertRaises(TypeError, f.readinto, "")
self.assertTrue(not f.closed)
f.close()
self.assertTrue(f.closed)
def testMethods(self):
methods = ['fileno', 'isatty', 'read', 'readinto',
'seek', 'tell', 'truncate', 'write', 'seekable',
'readable', 'writable']
self.f.close()
self.assertTrue(self.f.closed)
for methodname in methods:
method = getattr(self.f, methodname)
# should raise on closed file
self.assertRaises(ValueError, method)
def testOpendir(self):
# Issue 3703: opening a directory should fill the errno
# Windows always returns "[Errno 13]: Permission denied
# Unix calls dircheck() and returns "[Errno 21]: Is a directory"
try:
_FileIO('.', 'r')
except IOError as e:
self.assertNotEqual(e.errno, 0)
self.assertEqual(e.filename, ".")
else:
self.fail("Should have raised IOError")
#A set of functions testing that we get expected behaviour if someone has
#manually closed the internal file descriptor. First, a decorator:
def ClosedFD(func):
@wraps(func)
def wrapper(self):
#forcibly close the fd before invoking the problem function
f = self.f
os.close(f.fileno())
try:
func(self, f)
finally:
try:
self.f.close()
except IOError:
pass
return wrapper
def ClosedFDRaises(func):
@wraps(func)
def wrapper(self):
#forcibly close the fd before invoking the problem function
f = self.f
os.close(f.fileno())
try:
func(self, f)
except IOError as e:
self.assertEqual(e.errno, errno.EBADF)
else:
self.fail("Should have raised IOError")
finally:
try:
self.f.close()
except IOError:
pass
return wrapper
@ClosedFDRaises
def testErrnoOnClose(self, f):
f.close()
@ClosedFDRaises
def testErrnoOnClosedWrite(self, f):
f.write(b'a')
@ClosedFDRaises
def testErrnoOnClosedSeek(self, f):
f.seek(0)
@ClosedFDRaises
def testErrnoOnClosedTell(self, f):
f.tell()
@ClosedFDRaises
def testErrnoOnClosedTruncate(self, f):
f.truncate(0)
@ClosedFD
def testErrnoOnClosedSeekable(self, f):
f.seekable()
@ClosedFD
def testErrnoOnClosedReadable(self, f):
f.readable()
@ClosedFD
def testErrnoOnClosedWritable(self, f):
f.writable()
@ClosedFD
def testErrnoOnClosedFileno(self, f):
f.fileno()
@ClosedFD
def testErrnoOnClosedIsatty(self, f):
self.assertEqual(f.isatty(), False)
def ReopenForRead(self):
try:
self.f.close()
except IOError:
pass
self.f = _FileIO(TESTFN, 'r')
os.close(self.f.fileno())
return self.f
@ClosedFDRaises
def testErrnoOnClosedRead(self, f):
f = self.ReopenForRead()
f.read(1)
@ClosedFDRaises
def testErrnoOnClosedReadall(self, f):
f = self.ReopenForRead()
f.readall()
@ClosedFDRaises
def testErrnoOnClosedReadinto(self, f):
f = self.ReopenForRead()
a = array('b', b'x'*10)
f.readinto(a)
class OtherFileTests(unittest.TestCase):
def testAbles(self):
try:
f = _FileIO(TESTFN, "w")
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
f.close()
f = _FileIO(TESTFN, "r")
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), False)
self.assertEqual(f.seekable(), True)
f.close()
f = _FileIO(TESTFN, "a+")
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
self.assertEqual(f.isatty(), False)
f.close()
if sys.platform != "win32":
try:
f = _FileIO("/dev/tty", "a")
except EnvironmentError:
# When run in a cron job there just aren't any
# ttys, so skip the test. This also handles other
# OS'es that don't support /dev/tty.
pass
else:
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
if sys.platform != "darwin" and \
'bsd' not in sys.platform and \
not sys.platform.startswith('sunos'):
# Somehow /dev/tty appears seekable on some BSDs
self.assertEqual(f.seekable(), False)
self.assertEqual(f.isatty(), True)
f.close()
finally:
os.unlink(TESTFN)
def testModeStrings(self):
# check invalid mode strings
for mode in ("", "aU", "wU+", "rw", "rt"):
try:
f = _FileIO(TESTFN, mode)
except ValueError:
pass
else:
f.close()
self.fail('%r is an invalid file mode' % mode)
def testUnicodeOpen(self):
# verify repr works for unicode too
f = _FileIO(str(TESTFN), "w")
f.close()
os.unlink(TESTFN)
def testBytesOpen(self):
# Opening a bytes filename
try:
fn = TESTFN.encode("ascii")
except UnicodeEncodeError:
# Skip test
return
f = _FileIO(fn, "w")
try:
f.write(b"abc")
f.close()
with open(TESTFN, "rb") as f:
self.assertEqual(f.read(), b"abc")
finally:
os.unlink(TESTFN)
def testInvalidFd(self):
self.assertRaises(ValueError, _FileIO, -10)
self.assertRaises(OSError, _FileIO, make_bad_fd())
if sys.platform == 'win32':
import msvcrt
self.assertRaises(IOError, msvcrt.get_osfhandle, make_bad_fd())
def testBadModeArgument(self):
# verify that we get a sensible error message for bad mode argument
bad_mode = "qwerty"
try:
f = _FileIO(TESTFN, bad_mode)
except ValueError as msg:
if msg.args[0] != 0:
s = str(msg)
if TESTFN in s or bad_mode not in s:
self.fail("bad error message for invalid mode: %s" % s)
# if msg.args[0] == 0, we're probably on Windows where there may be
# no obvious way to discover why open() failed.
else:
f.close()
self.fail("no error for invalid mode: %s" % bad_mode)
def testTruncate(self):
f = _FileIO(TESTFN, 'w')
f.write(bytes(bytearray(range(10))))
self.assertEqual(f.tell(), 10)
f.truncate(5)
self.assertEqual(f.tell(), 10)
self.assertEqual(f.seek(0, os.SEEK_END), 5)
f.truncate(15)
self.assertEqual(f.tell(), 5)
self.assertEqual(f.seek(0, os.SEEK_END), 15)
f.close()
def testTruncateOnWindows(self):
def bug801631():
# SF bug <http://www.python.org/sf/801631>
# "file.truncate fault on windows"
f = _FileIO(TESTFN, 'w')
f.write(bytes(range(11)))
f.close()
f = _FileIO(TESTFN,'r+')
data = f.read(5)
if data != bytes(range(5)):
self.fail("Read on file opened for update failed %r" % data)
if f.tell() != 5:
self.fail("File pos after read wrong %d" % f.tell())
f.truncate()
if f.tell() != 5:
self.fail("File pos after ftruncate wrong %d" % f.tell())
f.close()
size = os.path.getsize(TESTFN)
if size != 5:
self.fail("File size after ftruncate wrong %d" % size)
try:
bug801631()
finally:
os.unlink(TESTFN)
def testAppend(self):
try:
f = open(TESTFN, 'wb')
f.write(b'spam')
f.close()
f = open(TESTFN, 'ab')
f.write(b'eggs')
f.close()
f = open(TESTFN, 'rb')
d = f.read()
f.close()
self.assertEqual(d, b'spameggs')
finally:
try:
os.unlink(TESTFN)
except:
pass
def testInvalidInit(self):
self.assertRaises(TypeError, _FileIO, "1", 0, 0)
def testWarnings(self):
with check_warnings(quiet=True) as w:
self.assertEqual(w.warnings, [])
self.assertRaises(TypeError, _FileIO, [])
self.assertEqual(w.warnings, [])
self.assertRaises(ValueError, _FileIO, "/some/invalid/name", "rt")
self.assertEqual(w.warnings, [])
def test_main():
# Historically, these tests have been sloppy about removing TESTFN.
# So get rid of it no matter what.
try:
run_unittest(AutoFileTests, OtherFileTests)
finally:
if os.path.exists(TESTFN):
os.unlink(TESTFN)
if __name__ == '__main__':
test_main()
|
Skruf90/FriendlyTorrent | refs/heads/master | src/tornado/BitTornado/BT1/Storage.py | 15 | # Written by Bram Cohen
# see LICENSE.txt for license information
from BitTornado.piecebuffer import BufferPool
from threading import Lock
from time import time, strftime, localtime
import os
from os.path import exists, getsize, getmtime, basename
from traceback import print_exc
try:
from os import fsync
except ImportError:
fsync = lambda x: None
from bisect import bisect
try:
True
except:
True = 1
False = 0
DEBUG = False
MAXREADSIZE = 32768
MAXLOCKSIZE = 1000000000L
MAXLOCKRANGE = 3999999999L # only lock first 4 gig of file
_pool = BufferPool()
PieceBuffer = _pool.new
def dummy_status(fractionDone = None, activity = None):
pass
class Storage:
def __init__(self, files, piece_length, doneflag, config,
disabled_files = None):
# can raise IOError and ValueError
self.files = files
self.piece_length = piece_length
self.doneflag = doneflag
self.disabled = [False] * len(files)
self.file_ranges = []
self.disabled_ranges = []
self.working_ranges = []
numfiles = 0
total = 0l
so_far = 0l
self.handles = {}
self.whandles = {}
self.tops = {}
self.sizes = {}
self.mtimes = {}
if config.get('lock_files', True):
self.lock_file, self.unlock_file = self._lock_file, self._unlock_file
else:
self.lock_file, self.unlock_file = lambda x1,x2: None, lambda x1,x2: None
self.lock_while_reading = config.get('lock_while_reading', False)
self.lock = Lock()
if not disabled_files:
disabled_files = [False] * len(files)
for i in xrange(len(files)):
file, length = files[i]
if doneflag.isSet(): # bail out if doneflag is set
return
self.disabled_ranges.append(None)
if length == 0:
self.file_ranges.append(None)
self.working_ranges.append([])
else:
range = (total, total + length, 0, file)
self.file_ranges.append(range)
self.working_ranges.append([range])
numfiles += 1
total += length
if disabled_files[i]:
l = 0
else:
if exists(file):
l = getsize(file)
if l > length:
h = open(file, 'rb+')
h.truncate(length)
h.flush()
h.close()
l = length
else:
l = 0
h = open(file, 'wb+')
h.flush()
h.close()
self.mtimes[file] = getmtime(file)
self.tops[file] = l
self.sizes[file] = length
so_far += l
self.total_length = total
self._reset_ranges()
self.max_files_open = config['max_files_open']
if self.max_files_open > 0 and numfiles > self.max_files_open:
self.handlebuffer = []
else:
self.handlebuffer = None
if os.name == 'nt':
def _lock_file(self, name, f):
import msvcrt
for p in range(0, min(self.sizes[name],MAXLOCKRANGE), MAXLOCKSIZE):
f.seek(p)
msvcrt.locking(f.fileno(), msvcrt.LK_LOCK,
min(MAXLOCKSIZE,self.sizes[name]-p))
def _unlock_file(self, name, f):
import msvcrt
for p in range(0, min(self.sizes[name],MAXLOCKRANGE), MAXLOCKSIZE):
f.seek(p)
msvcrt.locking(f.fileno(), msvcrt.LK_UNLCK,
min(MAXLOCKSIZE,self.sizes[name]-p))
elif os.name == 'posix':
def _lock_file(self, name, f):
import fcntl
fcntl.flock(f.fileno(), fcntl.LOCK_EX)
def _unlock_file(self, name, f):
import fcntl
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
else:
def _lock_file(self, name, f):
pass
def _unlock_file(self, name, f):
pass
def was_preallocated(self, pos, length):
for file, begin, end in self._intervals(pos, length):
if self.tops.get(file, 0) < end:
return False
return True
def _sync(self, file):
self._close(file)
if self.handlebuffer:
self.handlebuffer.remove(file)
def sync(self):
# may raise IOError or OSError
for file in self.whandles.keys():
self._sync(file)
def set_readonly(self, f=None):
if f is None:
self.sync()
return
file = self.files[f][0]
if self.whandles.has_key(file):
self._sync(file)
def get_total_length(self):
return self.total_length
def _open(self, file, mode):
if self.mtimes.has_key(file):
try:
if self.handlebuffer is not None:
assert getsize(file) == self.tops[file]
newmtime = getmtime(file)
oldmtime = self.mtimes[file]
assert newmtime <= oldmtime+1
assert newmtime >= oldmtime-1
except:
if DEBUG:
print ( file+' modified: '
+strftime('(%x %X)',localtime(self.mtimes[file]))
+strftime(' != (%x %X) ?',localtime(getmtime(file))) )
raise IOError('modified during download')
try:
return open(file, mode)
except:
if DEBUG:
print_exc()
raise
def _close(self, file):
f = self.handles[file]
del self.handles[file]
if self.whandles.has_key(file):
del self.whandles[file]
f.flush()
self.unlock_file(file, f)
f.close()
self.tops[file] = getsize(file)
self.mtimes[file] = getmtime(file)
else:
if self.lock_while_reading:
self.unlock_file(file, f)
f.close()
def _close_file(self, file):
if not self.handles.has_key(file):
return
self._close(file)
if self.handlebuffer:
self.handlebuffer.remove(file)
def _get_file_handle(self, file, for_write):
if self.handles.has_key(file):
if for_write and not self.whandles.has_key(file):
self._close(file)
try:
f = self._open(file, 'rb+')
self.handles[file] = f
self.whandles[file] = 1
self.lock_file(file, f)
except (IOError, OSError), e:
if DEBUG:
print_exc()
raise IOError('unable to reopen '+file+': '+str(e))
if self.handlebuffer:
if self.handlebuffer[-1] != file:
self.handlebuffer.remove(file)
self.handlebuffer.append(file)
elif self.handlebuffer is not None:
self.handlebuffer.append(file)
else:
try:
if for_write:
f = self._open(file, 'rb+')
self.handles[file] = f
self.whandles[file] = 1
self.lock_file(file, f)
else:
f = self._open(file, 'rb')
self.handles[file] = f
if self.lock_while_reading:
self.lock_file(file, f)
except (IOError, OSError), e:
if DEBUG:
print_exc()
raise IOError('unable to open '+file+': '+str(e))
if self.handlebuffer is not None:
self.handlebuffer.append(file)
if len(self.handlebuffer) > self.max_files_open:
self._close(self.handlebuffer.pop(0))
return self.handles[file]
def _reset_ranges(self):
self.ranges = []
for l in self.working_ranges:
self.ranges.extend(l)
self.begins = [i[0] for i in self.ranges]
def _intervals(self, pos, amount):
r = []
stop = pos + amount
p = bisect(self.begins, pos) - 1
while p < len(self.ranges):
begin, end, offset, file = self.ranges[p]
if begin >= stop:
break
r.append(( file,
offset + max(pos, begin) - begin,
offset + min(end, stop) - begin ))
p += 1
return r
def read(self, pos, amount, flush_first = False):
r = PieceBuffer()
for file, pos, end in self._intervals(pos, amount):
if DEBUG:
print 'reading '+file+' from '+str(pos)+' to '+str(end)
self.lock.acquire()
h = self._get_file_handle(file, False)
if flush_first and self.whandles.has_key(file):
h.flush()
fsync(h)
h.seek(pos)
while pos < end:
length = min(end-pos, MAXREADSIZE)
data = h.read(length)
if len(data) != length:
raise IOError('error reading data from '+file)
r.append(data)
pos += length
self.lock.release()
return r
def write(self, pos, s):
# might raise an IOError
total = 0
for file, begin, end in self._intervals(pos, len(s)):
if DEBUG:
print 'writing '+file+' from '+str(pos)+' to '+str(end)
self.lock.acquire()
h = self._get_file_handle(file, True)
h.seek(begin)
h.write(s[total: total + end - begin])
self.lock.release()
total += end - begin
def top_off(self):
for begin, end, offset, file in self.ranges:
l = offset + end - begin
if l > self.tops.get(file, 0):
self.lock.acquire()
h = self._get_file_handle(file, True)
h.seek(l-1)
h.write(chr(0xFF))
self.lock.release()
def flush(self):
# may raise IOError or OSError
for file in self.whandles.keys():
self.lock.acquire()
self.handles[file].flush()
self.lock.release()
def close(self):
for file, f in self.handles.items():
try:
self.unlock_file(file, f)
except:
pass
try:
f.close()
except:
pass
self.handles = {}
self.whandles = {}
self.handlebuffer = None
def _get_disabled_ranges(self, f):
if not self.file_ranges[f]:
return ((),(),())
r = self.disabled_ranges[f]
if r:
return r
start, end, offset, file = self.file_ranges[f]
if DEBUG:
print 'calculating disabled range for '+self.files[f][0]
print 'bytes: '+str(start)+'-'+str(end)
print 'file spans pieces '+str(int(start/self.piece_length))+'-'+str(int((end-1)/self.piece_length)+1)
pieces = range( int(start/self.piece_length),
int((end-1)/self.piece_length)+1 )
offset = 0
disabled_files = []
if len(pieces) == 1:
if ( start % self.piece_length == 0
and end % self.piece_length == 0 ): # happens to be a single,
# perfect piece
working_range = [(start, end, offset, file)]
update_pieces = []
else:
midfile = os.path.join(self.bufferdir,str(f))
working_range = [(start, end, 0, midfile)]
disabled_files.append((midfile, start, end))
length = end - start
self.sizes[midfile] = length
piece = pieces[0]
update_pieces = [(piece, start-(piece*self.piece_length), length)]
else:
update_pieces = []
if start % self.piece_length != 0: # doesn't begin on an even piece boundary
end_b = pieces[1]*self.piece_length
startfile = os.path.join(self.bufferdir,str(f)+'b')
working_range_b = [ ( start, end_b, 0, startfile ) ]
disabled_files.append((startfile, start, end_b))
length = end_b - start
self.sizes[startfile] = length
offset = length
piece = pieces.pop(0)
update_pieces.append((piece, start-(piece*self.piece_length), length))
else:
working_range_b = []
if f != len(self.files)-1 and end % self.piece_length != 0:
# doesn't end on an even piece boundary
start_e = pieces[-1] * self.piece_length
endfile = os.path.join(self.bufferdir,str(f)+'e')
working_range_e = [ ( start_e, end, 0, endfile ) ]
disabled_files.append((endfile, start_e, end))
length = end - start_e
self.sizes[endfile] = length
piece = pieces.pop(-1)
update_pieces.append((piece, 0, length))
else:
working_range_e = []
if pieces:
working_range_m = [ ( pieces[0]*self.piece_length,
(pieces[-1]+1)*self.piece_length,
offset, file ) ]
else:
working_range_m = []
working_range = working_range_b + working_range_m + working_range_e
if DEBUG:
print str(working_range)
print str(update_pieces)
r = (tuple(working_range), tuple(update_pieces), tuple(disabled_files))
self.disabled_ranges[f] = r
return r
def set_bufferdir(self, dir):
self.bufferdir = dir
def enable_file(self, f):
if not self.disabled[f]:
return
self.disabled[f] = False
r = self.file_ranges[f]
if not r:
return
file = r[3]
if not exists(file):
h = open(file, 'wb+')
h.flush()
h.close()
if not self.tops.has_key(file):
self.tops[file] = getsize(file)
if not self.mtimes.has_key(file):
self.mtimes[file] = getmtime(file)
self.working_ranges[f] = [r]
def disable_file(self, f):
if self.disabled[f]:
return
self.disabled[f] = True
r = self._get_disabled_ranges(f)
if not r:
return
for file, begin, end in r[2]:
if not os.path.isdir(self.bufferdir):
os.makedirs(self.bufferdir)
if not exists(file):
h = open(file, 'wb+')
h.flush()
h.close()
if not self.tops.has_key(file):
self.tops[file] = getsize(file)
if not self.mtimes.has_key(file):
self.mtimes[file] = getmtime(file)
self.working_ranges[f] = r[0]
reset_file_status = _reset_ranges
def get_piece_update_list(self, f):
return self._get_disabled_ranges(f)[1]
def delete_file(self, f):
try:
os.remove(self.files[f][0])
except:
pass
'''
Pickled data format:
d['files'] = [ file #, size, mtime {, file #, size, mtime...} ]
file # in torrent, and the size and last modification
time for those files. Missing files are either empty
or disabled.
d['partial files'] = [ name, size, mtime... ]
Names, sizes and last modification times of files containing
partial piece data. Filenames go by the following convention:
{file #, 0-based}{nothing, "b" or "e"}
eg: "0e" "3" "4b" "4e"
Where "b" specifies the partial data for the first piece in
the file, "e" the last piece, and no letter signifying that
the file is disabled but is smaller than one piece, and that
all the data is cached inside so adjacent files may be
verified.
'''
def pickle(self):
files = []
pfiles = []
for i in xrange(len(self.files)):
if not self.files[i][1]: # length == 0
continue
if self.disabled[i]:
for file, start, end in self._get_disabled_ranges(i)[2]:
pfiles.extend([basename(file),getsize(file),int(getmtime(file))])
continue
file = self.files[i][0]
files.extend([i,getsize(file),int(getmtime(file))])
return {'files': files, 'partial files': pfiles}
def unpickle(self, data):
# assume all previously-disabled files have already been disabled
try:
files = {}
pfiles = {}
l = data['files']
assert len(l) % 3 == 0
l = [l[x:x+3] for x in xrange(0,len(l),3)]
for f, size, mtime in l:
files[f] = (size, mtime)
l = data.get('partial files',[])
assert len(l) % 3 == 0
l = [l[x:x+3] for x in xrange(0,len(l),3)]
for file, size, mtime in l:
pfiles[file] = (size, mtime)
valid_pieces = {}
for i in xrange(len(self.files)):
if self.disabled[i]:
continue
r = self.file_ranges[i]
if not r:
continue
start, end, offset, file =r
if DEBUG:
print 'adding '+file
for p in xrange( int(start/self.piece_length),
int((end-1)/self.piece_length)+1 ):
valid_pieces[p] = 1
if DEBUG:
print valid_pieces.keys()
def test(old, size, mtime):
oldsize, oldmtime = old
if size != oldsize:
return False
if mtime > oldmtime+1:
return False
if mtime < oldmtime-1:
return False
return True
for i in xrange(len(self.files)):
if self.disabled[i]:
for file, start, end in self._get_disabled_ranges(i)[2]:
f1 = basename(file)
if ( not pfiles.has_key(f1)
or not test(pfiles[f1],getsize(file),getmtime(file)) ):
if DEBUG:
print 'removing '+file
for p in xrange( int(start/self.piece_length),
int((end-1)/self.piece_length)+1 ):
if valid_pieces.has_key(p):
del valid_pieces[p]
continue
file, size = self.files[i]
if not size:
continue
if ( not files.has_key(i)
or not test(files[i],getsize(file),getmtime(file)) ):
start, end, offset, file = self.file_ranges[i]
if DEBUG:
print 'removing '+file
for p in xrange( int(start/self.piece_length),
int((end-1)/self.piece_length)+1 ):
if valid_pieces.has_key(p):
del valid_pieces[p]
except:
if DEBUG:
print_exc()
return []
if DEBUG:
print valid_pieces.keys()
return valid_pieces.keys()
|
sudosurootdev/external_chromium_org | refs/heads/L5 | tools/telemetry/telemetry/value/failure.py | 40 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import traceback
from telemetry import value as value_module
class FailureValue(value_module.Value):
def __init__(self, page, exc_info, description=None):
"""A value representing a failure when running the page.
Args:
page: The page where this failure occurs.
exc_info: The exception info (sys.exc_info()) corresponding to
this failure.
"""
exc_type = exc_info[0].__name__
super(FailureValue, self).__init__(page, exc_type, '', True, description)
self._exc_info = exc_info
@classmethod
def FromMessage(cls, page, message):
"""Creates a failure value for a given string message.
Args:
page: The page where this failure occurs.
message: A string message describing the failure.
"""
exc_info = cls._GetExcInfoFromMessage(message)
return FailureValue(page, exc_info)
@staticmethod
def _GetExcInfoFromMessage(message):
try:
raise Exception(message)
except Exception:
return sys.exc_info()
def __repr__(self):
if self.page:
page_name = self.page.url
else:
page_name = None
return 'FailureValue(%s, %s)' % (
page_name, GetStringFromExcInfo(self._exc_info))
@property
def exc_info(self):
return self._exc_info
def GetBuildbotDataType(self, output_context):
return None
def GetBuildbotValue(self):
return None
def GetChartAndTraceNameForPerPageResult(self):
return None
def GetRepresentativeNumber(self):
return None
def GetRepresentativeString(self):
return None
@staticmethod
def GetJSONTypeName():
return 'failure'
def AsDict(self):
d = super(FailureValue, self).AsDict()
d['value'] = GetStringFromExcInfo(self.exc_info)
return d
@staticmethod
def FromDict(value_dict, page_dict):
kwargs = value_module.Value.GetConstructorKwArgs(value_dict, page_dict)
del kwargs['name']
del kwargs['units']
important = kwargs.get('important', None)
if important != None:
del kwargs['important']
kwargs['exc_info'] = FailureValue._GetExcInfoFromMessage(
value_dict['value'])
return FailureValue(**kwargs)
@classmethod
def MergeLikeValuesFromSamePage(cls, values):
assert False, 'Should not be called.'
@classmethod
def MergeLikeValuesFromDifferentPages(cls, values,
group_by_name_suffix=False):
assert False, 'Should not be called.'
def GetStringFromExcInfo(exc_info):
return ''.join(traceback.format_exception(*exc_info))
|
KaranToor/MA450 | refs/heads/master | google-cloud-sdk/lib/googlecloudsdk/command_lib/deployment_manager/flags.py | 2 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper methods for configuring deployment manager command flags."""
from googlecloudsdk.api_lib.deployment_manager import dm_v2_util
from googlecloudsdk.calliope import arg_parsers
def AddDeploymentNameFlag(parser):
"""Add properties flag."""
parser.add_argument('deployment_name', help='Deployment name.')
def AddPropertiesFlag(parser):
"""Add properties flag."""
parser.add_argument(
'--properties',
help='A comma seperated, key=value, map '
'to be used when deploying a template file directly.',
type=arg_parsers.ArgDict(operators=dm_v2_util.NewParserDict()),
dest='properties')
def AddAsyncFlag(parser):
"""Add the async argument."""
parser.add_argument(
'--async',
help='Return immediately and print information about the Operation in '
'progress rather than waiting for the Operation to complete. '
'(default=False)',
dest='async',
default=False,
action='store_true')
def AddDeletePolicyFlag(parser, request_class):
"""Add the delete_policy argument."""
parser.add_argument(
'--delete-policy',
help=('Delete policy for resources that will change as part of an update '
'or delete. DELETE deletes the resource while ABANDON just removes '
'the resource reference from the deployment.'),
default='DELETE',
choices=(sorted(request_class.DeletePolicyValueValuesEnum
.to_dict().keys())))
|
testvidya11/ejrf | refs/heads/master | questionnaire/features/questionnaire_steps.py | 1 | from time import sleep
from lettuce import step, world
from questionnaire.features.pages.questionnaires import QuestionnairePage
from questionnaire.models import Questionnaire, Section, SubSection, Question, QuestionGroup, QuestionGroupOrder, QuestionOption
@step(u'And I have a questionnaire with sections and subsections')
def and_i_have_a_questionnaire_with_sections_and_subsections(step):
world.questionnaire = Questionnaire.objects.create(name="JRF 2013 Core English", description="From dropbox as given by Rouslan",
status=Questionnaire.PUBLISHED)
world.section_1 = Section.objects.create(title="Reported Cases of Selected Vaccine Preventable Diseases (VPDs)", order=1,
questionnaire=world.questionnaire, name="Reported Cases",
description="some description")
world.section_2 = Section.objects.create(title="Section 2", order=2, questionnaire=world.questionnaire, name="Section2")
world.sub_section = SubSection.objects.create(title="Reported cases for the year 2013", order=1, section=world.section_1)
@step(u'And I have a question group and questions in that group')
def and_i_have_a_question_group_and_questions_in_that_group(step):
world.question1 = Question.objects.create(text='Disease', UID='C00001', answer_type='MultiChoice')
world.question2 = Question.objects.create(text='B. Number of cases tested',
instructions="Enter the total number of cases for which",
UID='C00003', answer_type='Number')
world.question3 = Question.objects.create(text='C. Number of cases positive',
instructions="Include only those cases the infectious agent.",
UID='C00004', answer_type='Number')
world.question_group = QuestionGroup.objects.create(subsection=world.sub_section, order=1, name="Immunization", allow_multiples=1)
world.question_group.question.add(world.question1, world.question3, world.question2)
QuestionOption.objects.create(text='Option 2', question=world.question1)
@step(u'And I set orders for the questions in the group')
def and_i_set_orders_for_the_questions_in_the_group(step):
QuestionGroupOrder.objects.create(question=world.question1, question_group=world.question_group, order=1)
QuestionGroupOrder.objects.create(question=world.question2, question_group=world.question_group, order=2)
QuestionGroupOrder.objects.create(question=world.question3, question_group=world.question_group, order=3)
@step(u'And I visit that questionnaires section page')
def and_i_visit_that_questionnaires_section_page(step):
world.page = QuestionnairePage(world.browser, world.section_1)
world.page.visit()
@step(u'Then I should see the section title and descriptions')
def then_i_should_see_the_section_title_and_descriptions(step):
world.page.is_text_present(world.section_1.title, world.section_1.description)
@step(u'And I should see the questions')
def then_i_should_see_the_questions(step):
world.page.is_text_present(world.question1.text,world.question2.text,world.question3.text)
@step(u'And I should see the answer fields')
def and_i_should_see_the_answer_fields(step):
world.page.validate_fields()
@step(u'And I should see the instructions')
def and_i_should_see_the_instructions(step):
world.page.validate_instructions(world.question2)
@step(u'And i have a subgroup with questions in that group')
def and_i_have_a_subgroup_with_questions_in_that_group(step):
world.question_1a = Question.objects.create(text='Disease', UID='C00021', answer_type='MultiChoice')
world.question_subgroup = QuestionGroup.objects.create(subsection=world.sub_section, order=1,
parent=world.question_group, name="Immunization subgroup")
world.question_subgroup.question.add(world.question_1a)
@step(u'And I set question orders for the group and subgroup')
def and_i_set_question_orders_for_the_group_and_subgroup(step):
QuestionGroupOrder.objects.create(question=world.question_1a, question_group=world.question_group, order=4)
@step(u'Then I should see the group title and description')
def then_i_should_see_the_group_title_and_description(step):
world.page.is_text_present(world.question_group.name)
@step(u'And I should see the subgroup title and description')
def and_i_should_see_the_subgroup_title_and_description(step):
world.page.is_text_present(world.question_subgroup.name)
@step(u'When I click on a different section tab')
def when_i_click_on_a_different_section_tab(step):
world.page.click_by_id("section-%s" % world.section_2.id)
@step(u'Then I should see that section page')
def then_i_should_see_that_section_page(step):
world.page = QuestionnairePage(world.browser, world.section_2)
world.page.validate_url()
@step(u'Then I should see an Add More button')
def then_i_should_see_an_add_more_button(step):
world.page.is_text_present('Add More')
@step(u'When I click the Add More button')
def when_i_click_the_add_more_button(step):
world.page.click_by_css('.add-more')
@step(u'Then I should see a new question group')
def then_i_should_see_a_new_subsection(step):
assert(world.page.number_of_elements("Immunization"), 2)
@step(u'When I click the question group delete button')
def when_i_click_the_sub_section_delete_button(step):
world.page.click_by_css('.delete-more')
@step(u'Then I should not see that question group')
def then_i_should_not_see_that_question_group(step):
assert(world.page.number_of_elements("Immunization"), 1)
@step(u'And I have a grid group with all options of the primary question showable')
def and_i_have_a_grid_group_with_all_options_of_the_primary_question_showable(step):
world.grid_group = QuestionGroup.objects.create(subsection=world.sub_section, order=1, grid=True, display_all=True)
@step(u'And I have 3 questions in that group one of which is primary')
def and_i_have_3_questions_in_that_group_one_of_which_is_primary(step):
question1 = Question.objects.create(text='Disease', UID='C00001', answer_type='MultiChoice', is_primary=True)
question2 = Question.objects.create(text='Total Cases', UID='C00002', answer_type='Number',
instructions="Include only those cases found positive for the infectious.")
world.question3 = Question.objects.create(text='Number of cases tested', UID='C00003', answer_type='Number')
world.question4 = Question.objects.create(text='Number of cases positive', UID='C00004', answer_type='Number')
world.question5 = Question.objects.create(text='Number of cases positive', UID='004404', answer_type='Number')
world.grid_group.question.add(question1, question2, world.question5)
world.option1 = QuestionOption.objects.create(text="Diphteria", question=question1)
world.option2 = QuestionOption.objects.create(text="Measles", question=question1)
world.option3 = QuestionOption.objects.create(text="Pertussis", question=question1)
world.option4 = QuestionOption.objects.create(text="Yellow fever", question=question1)
world.option5 = QuestionOption.objects.create(text="Mumps", question=question1)
world.option6 = QuestionOption.objects.create(text="Rubella", question=question1)
QuestionGroupOrder.objects.create(question=question1, question_group=world.grid_group, order=1)
QuestionGroupOrder.objects.create(question=question2, question_group=world.grid_group, order=2)
QuestionGroupOrder.objects.create(question=world.question3, question_group=world.grid_group, order=3)
QuestionGroupOrder.objects.create(question=world.question4, question_group=world.grid_group, order=4)
QuestionGroupOrder.objects.create(question=world.question5, question_group=world.grid_group, order=5)
@step(u'Then I should see that grid with all the options of the primary question shown')
def then_i_should_see_that_grid_with_all_the_options_of_the_primary_question_shown(step):
for i in range(1, 5):
world.page.is_text_present(eval("world.option%d" % i).text)
@step(u'And I have a sub group in that group with two questions')
def and_i_have_a_sub_group_in_that_group_with_two_questions(step):
sub_group = QuestionGroup.objects.create(subsection=world.sub_section, order=2, grid=True,
name="Labaratory Investigation",
display_all=True, parent=world.grid_group,
instructions="Include only those cases found positive.")
sub_group.question.add(world.question3, world.question4) |
goliveirab/odoo | refs/heads/8.0 | openerp/addons/test_inherits/__openerp__.py | 357 | # -*- coding: utf-8 -*-
{
'name': 'test-inherits',
'version': '0.1',
'category': 'Tests',
'description': """A module to verify the inheritance using _inherits.""",
'author': 'Camptocamp',
'website': 'http://www.camptocamp.com',
'depends': ['base'],
'data': [
'ir.model.access.csv',
'demo_data.xml',
],
'installable': True,
'auto_install': False,
}
|
goodwinnk/intellij-community | refs/heads/master | python/testData/copyPaste/SimpleExpressionPartCaretAtLineEnd.src.py | 37 | <selection>20</selection> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.