code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
import os
import random
import pygame
import sys
from util import utils
from time import sleep
square_l = 10
pacman_r, pacman_c = None, None
food_r, food_c = None, None
r,c = None, None
visited = []
fPath = []
def dfs(grid, start, end, path = []):
#print(start)
global visited
global fPath
fPath.append(start)
stack = list()
path = path + [start]
if start == end:
#path.append(len(path))
return path
up = (start[0]-1, start[1])
left = (start[0], start[1]-1)
right = (start[0], start[1]+1)
down = (start[0]+1, start[1])
if (grid[up[0]][up[1]] != '%') and (not up in visited):
stack.append(up)
visited.append(up)
if (grid[left[0]][left[1]] != '%') and (not left in visited):
stack.append(left)
visited.append(left)
if (grid[right[0]][right[1]] != '%') and (not right in visited):
stack.append(right)
visited.append(right)
if (grid[down[0]][down[1]] != '%') and (not down in visited):
stack.append(down)
visited.append(down)
while len(stack) > 0:
node = stack.pop()
newpath = dfs(grid, node, end, path)
if newpath:
return newpath
return None
def bfs(grid, start, end, path = []):
#print(start)
global visited
global fPath
stack = list()
stack.append(start)
while len(stack) > 0:
node = stack.pop(0)
fPath.append((node[0], node[1]))
if (node[0], node[1]) == end:
while True:
path.insert(0, node)
node = node[2]
#print(node)
if ((node[0], node[1]) == start):
path.insert(0, start)
return path
up = (node[0]-1, node[1])
left = (node[0], node[1]-1)
right = (node[0], node[1]+1)
down = (node[0]+1, node[1])
if (grid[up[0]][up[1]] != '%') and (not up in visited):
stack.append((up[0], up[1], node))
visited.append(up)
if (grid[left[0]][left[1]] != '%') and (not left in visited):
stack.append((left[0], left[1], node))
visited.append(left)
if (grid[right[0]][right[1]] != '%') and (not right in visited):
stack.append((right[0], right[1], node))
visited.append(right)
if (grid[down[0]][down[1]] != '%') and (not down in visited):
stack.append((down[0], down[1], node))
visited.append(down)
return None
def astar(grid, start, end):
frontier = list()
costs = {}
explored = list()
path = {}
frontier.append(start)
costs[start] = manDistance(start, end)
while len(frontier) > 0:
#take cheapest one. Implement with priority queue
index = 0
minv = costs[frontier[index]]
for i in range(len(frontier)):
if costs[frontier[i]] < minv:
minv = costs[frontier[i]]
index = i
node = frontier.pop(index)
if node == end:
respath = [node]
while True:
respath.insert(0, path[node])
node = path[node]
if node == start:
return respath
explored.append(node)
stack = []
up = (node[0]-1, node[1])
left = (node[0], node[1]-1)
right = (node[0], node[1]+1)
down = (node[0]+1, node[1])
if (grid[up[0]][up[1]] != '%'):
cost = 1 if (grid[up[0]][up[1]] == '-') else 0
stack.append(((up[0], up[1]), cost))
if (grid[left[0]][left[1]] != '%'):
cost = 1 if (grid[left[0]][left[1]] == '-') else 0
stack.append(((left[0], left[1]), cost))
if (grid[right[0]][right[1]] != '%'):
cost = 1 if (grid[right[0]][right[1]] == '-') else 0
stack.append(((right[0], right[1]), cost))
if (grid[down[0]][down[1]] != '%'):
cost = 1 if (grid[down[0]][down[1]] == '-') else 0
stack.append(((down[0], down[1]), cost))
for child in stack:
if not child[0] in explored or not child[0] in frontier:
path[child[0]] = node
frontier.append(child[0])
costs[child[0]] = (costs[node] + child[1] +
abs(manDistance(child[0], end) - manDistance(node, end)))
elif costs[child[0]] > (costs[node] + child[1] + abs(manDistance(child[0], end) - manDistance(node, end))):
path[child[0]] = node
costs[child[0]] = costs[node] + child[1] + abs(manDistance(child[0], end) - manDistance(node, end))
return None
def manDistance(node, goal):
x = abs(node[0] - goal[0])
y = abs(node[1] - goal[1])
return x+y
def readMaze(filename):
global pacman_r, pacman_c
global food_r, food_c
global r,c
filePath = os.path.join(utils.getResourcesPath(), filename)
f = open(filePath, 'r')
pacman_r, pacman_c = [ int(i) for i in f.readline().strip().split() ]
food_r, food_c = [ int(i) for i in f.readline().strip().split() ]
r,c = [ int(i) for i in f.readline().strip().split() ]
grid = []
for i in range(0, r):
grid.append(f.readline().strip())
return grid
def renderMaze(window, m, pos):
window.fill((255, 255, 255))
global r, c
for y in range(r):
for x in range(c):
if m[y][x] == '%':
box = pygame.Rect(x*square_l, y*square_l, square_l, square_l)
pygame.draw.rect(window, (0, 0, 0), box, 0)
box = pygame.Rect(pos[1]*square_l, pos[0]*square_l, square_l, square_l)
pygame.draw.rect(window, (255, 0, 0), box, 0)
pygame.display.update()
pygame.time.delay(10)
def main():
#pygame.init()
maze = readMaze('hackerrank\pacman.txt')
#window = pygame.display.set_mode((c * square_l, r * square_l))
#visited.append((pacman_r, pacman_c))
res = dfs(maze, (pacman_r, pacman_c), (food_r, food_c))
print(len(res))
#renderMaze(window, maze, (pacman_c, pacman_r))
for line in res:
#renderMaze(window, maze, line)
print(line[0], line[1])
#sleep(0.5)
# print(len(fPath))
# for line in fPath:
# #renderMaze(window, maze, line)
# print(line[0], line[1])
# #sleep(0.5)
# while True:
# for event in pygame.event.get():
# if event.type == pygame.QUIT:
# sys.exit(0)
main() | 5agado/intro-ai | src/test/mazeOfPacman.py | Python | apache-2.0 | 6,740 |
import sys
import time
import numpy as np
try:
import serial
except:
import pip
pip.main(['install','pyserial'])
import serial
from serial.tools import list_ports
class TCLab(object):
def __init__(self, port=None, baud=9600):
port = self.findPort()
print('Opening connection')
self.sp = serial.Serial(port=port, baudrate=baud, timeout=2)
self.sp.flushInput()
self.sp.flushOutput()
time.sleep(3)
print('TCLab connected via Arduino on port ' + port)
def findPort(self):
found = False
for port in list(list_ports.comports()):
# Arduino Uno
if port[2].startswith('USB VID:PID=16D0:0613'):
port = port[0]
found = True
# Arduino HDuino
if port[2].startswith('USB VID:PID=1A86:7523'):
port = port[0]
found = True
# Arduino Leonardo
if port[2].startswith('USB VID:PID=2341:8036'):
port = port[0]
found = True
if (not found):
print('Arduino COM port not found')
print('Please ensure that the USB cable is connected')
print('--- Printing Serial Ports ---')
for port in list(serial.tools.list_ports.comports()):
print(port[0] + ' ' + port[1] + ' ' + port[2])
print('For Windows:')
print(' Open device manager, select "Ports (COM & LPT)"')
print(' Look for COM port of Arduino such as COM4')
print('For MacOS:')
print(' Open terminal and type: ls /dev/*.')
print(' Search for /dev/tty.usbmodem* or /dev/tty.usbserial*. The port number is *.')
print('For Linux')
print(' Open terminal and type: ls /dev/tty*')
print(' Search for /dev/ttyUSB* or /dev/ttyACM*. The port number is *.')
print('')
port = input('Input port: ')
# or hard-code it here
#port = 'COM3' # for Windows
#port = '/dev/tty.wchusbserial1410' # for MacOS
return port
def stop(self):
return self.read('X')
def version(self):
return self.read('VER')
@property
def T1(self):
self._T1 = float(self.read('T1'))
return self._T1
@property
def T2(self):
self._T2 = float(self.read('T2'))
return self._T2
def LED(self,pwm):
pwm = max(0.0,min(100.0,pwm))/2.0
self.write('LED',pwm)
return pwm
def Q1(self,pwm):
pwm = max(0.0,min(100.0,pwm))
self.write('Q1',pwm)
return pwm
def Q2(self,pwm):
pwm = max(0.0,min(100.0,pwm))
self.write('Q2',pwm)
return pwm
# save txt file with data and set point
# t = time
# u1,u2 = heaters
# y1,y2 = tempeatures
# sp1,sp2 = setpoints
def save_txt(self,t,u1,u2,y1,y2,sp1,sp2):
data = np.vstack((t,u1,u2,y1,y2,sp1,sp2)) # vertical stack
data = data.T # transpose data
top = 'Time (sec), Heater 1 (%), Heater 2 (%), ' \
+ 'Temperature 1 (degC), Temperature 2 (degC), ' \
+ 'Set Point 1 (degC), Set Point 2 (degC)'
np.savetxt('data.txt',data,delimiter=',',header=top,comments='')
def read(self,cmd):
cmd_str = self.build_cmd_str(cmd,'')
try:
self.sp.write(cmd_str.encode())
self.sp.flush()
except Exception:
return None
return self.sp.readline().decode('UTF-8').replace("\r\n", "")
def write(self,cmd,pwm):
cmd_str = self.build_cmd_str(cmd,(pwm,))
try:
self.sp.write(cmd_str.encode())
self.sp.flush()
except:
return None
return self.sp.readline().decode('UTF-8').replace("\r\n", "")
def build_cmd_str(self,cmd, args=None):
"""
Build a command string that can be sent to the arduino.
Input:
cmd (str): the command to send to the arduino, must not
contain a % character
args (iterable): the arguments to send to the command
"""
if args:
args = ' '.join(map(str, args))
else:
args = ''
return "{cmd} {args}\n".format(cmd=cmd, args=args)
def close(self):
try:
self.sp.close()
print('Arduino disconnected successfully')
except:
print('Problems disconnecting from Arduino.')
print('Please unplug and reconnect Arduino.')
return True
| APMonitor/arduino | 0_Test_Device/Python/tclab/tclab.py | Python | apache-2.0 | 4,881 |
class ObjectXEDA:
kind= -1
name= ''
posX= 0
posY= 0
centerX= 0
centerY= 0
w= 0
h= 0
angle= 0
selected= False
mirrored= False
locked= False
glow= False
dragging= False
def is_over(self, x, y):
'''responde se a coordenada indicado esta sobre esse objeto
'''
pass
def start_drag(self):
pass
def stop_drag(self):
pass
class SCHObject(ObjectXEDA):
'''
componentes (descritos aqui)
fios
juncoes
power
portas (entre folhas)
net names
textos livres
caixa de texto
bus
entradas de bus
graficos (linhas, circulos, retangulos, arcos, poligonos)
'''
def __init__(self):
self.pn= '74hc123'
self.ref= 'U12'
self.comment= '74HC123'
#parts deveria ser uma lista de parts do componentes, cada lista com uma lista dos pinos desta parte
#seguida dos graficos dessa parte
self.parts= 2
self.pins= [[(0, 0, 'bla', 'id', 'angle', 'clk', 'dot', 'show-name', 'show-id'), (50, 0, 'ble', 'id', 'angle', 'clk', 'dot', 'show-name', 'show-id'), (200, 0, 'bli', 'id', 'angle', 'clk', 'dot', 'show-name', 'show-id')],
[(0, 100, 'da', 'id', 'angle', 'clk', 'dot', 'show-name', 'show-id'), (50, 150, 'de', 'id', 'angle', 'clk', 'dot', 'show-name', 'show-id'), (200, 300, 'di', 'id', 'angle', 'clk', 'dot', 'show-name', 'show-id'), (250, 50, 'do', 'id', 'angle', 'clk', 'dot', 'show-name', 'show-id')]
]
self.partId= 0
self.drawns= []
self.foots= ['so16', 'so16.3', 'dip16']
self.footId= 1
def draw(self):
pass
class PCBObject(ObjectXEDA):
'''
componentes (como descritos aqui)
pad isolado
via
linhas
arcos (partindo do centro e dos extermos)
textos
'''
def __init__(self):
self.foot= 'sop23'
self.ref= 'U12'
self.comment= 'teste'
self.pads= [(0, 0, 'id', 'quadrado', 'w', 'h', 'top', 'furo-diametro', 'net-name', 'mask'),
(25, 0, 'id', 'quadrado', 'w', 'h', 'top', 'furo-diametro', 'net-name', 'mask')
]
self.drawns= []
def draw(self):
pass
| xgvargas/xeda | sw/objects.py | Python | apache-2.0 | 2,224 |
from appkit.api.v0_2_8 import App
app = App(__name__)
@app.route("/")
def home():
return '<a href="#" target="_blank">Clique</a>'
app.run()
| diogocs1/facebookexplorer | teste.py | Python | apache-2.0 | 144 |
import sys
import traceback
from catalyst.errors import ZiplineError
def silent_except_hook(exctype, excvalue, exctraceback):
if exctype in [PricingDataBeforeTradingError, PricingDataNotLoadedError,
SymbolNotFoundOnExchange, NoDataAvailableOnExchange,
ExchangeAuthEmpty]:
fn = traceback.extract_tb(exctraceback)[-1][0]
ln = traceback.extract_tb(exctraceback)[-1][1]
print("Error traceback: {1} (line {2})\n"
"{0.__name__}: {3}".format(exctype, fn, ln, excvalue))
else:
sys.__excepthook__(exctype, excvalue, exctraceback)
sys.excepthook = silent_except_hook
class ExchangeRequestError(ZiplineError):
msg = (
'Request failed: {error}'
).strip()
class ExchangeRequestErrorTooManyAttempts(ZiplineError):
msg = (
'Request failed: {error}, giving up after {attempts} attempts'
).strip()
class ExchangeBarDataError(ZiplineError):
msg = (
'Unable to retrieve bar data: {data_type}, ' +
'giving up after {attempts} attempts: {error}'
).strip()
class ExchangePortfolioDataError(ZiplineError):
msg = (
'Unable to retrieve portfolio data: {data_type}, ' +
'giving up after {attempts} attempts: {error}'
).strip()
class ExchangeTransactionError(ZiplineError):
msg = (
'Unable to execute transaction: {transaction_type}, ' +
'giving up after {attempts} attempts: {error}'
).strip()
class ExchangeNotFoundError(ZiplineError):
msg = (
'Exchange {exchange_name} not found. Please specify exchanges '
'supported by Catalyst and verify spelling for accuracy.'
).strip()
class ExchangeAuthNotFound(ZiplineError):
msg = (
'Please create an auth.json file containing the api token and key for '
'exchange {exchange}. Place the file here: {filename}'
).strip()
class ExchangeAuthEmpty(ZiplineError):
msg = (
'Please enter your API token key and secret for exchange {exchange} '
'in the following file: {filename}'
).strip()
class RemoteAuthEmpty(ZiplineError):
msg = (
'Please enter your API token key and secret for the remote server '
'in the following file: {filename}'
).strip()
class ExchangeSymbolsNotFound(ZiplineError):
msg = (
'Unable to download or find a local copy of symbols.json for exchange '
'{exchange}. The file should be here: {filename}'
).strip()
class AlgoPickleNotFound(ZiplineError):
msg = (
'Pickle not found for algo {algo} in path {filename}'
).strip()
class InvalidHistoryFrequencyAlias(ZiplineError):
msg = (
'Invalid frequency alias {freq}. Valid suffixes are M (minute) '
'and D (day). For example, these aliases would be valid '
'1M, 5M, 1D.'
).strip()
class InvalidHistoryFrequencyError(ZiplineError):
msg = (
'Frequency {frequency} not supported by the exchange.'
).strip()
class UnsupportedHistoryFrequencyError(ZiplineError):
msg = (
'{exchange} does not support candle frequency {freq}, please choose '
'from: {freqs}.'
).strip()
class InvalidHistoryTimeframeError(ZiplineError):
msg = (
'CCXT timeframe {timeframe} not supported by the exchange.'
).strip()
class MismatchingFrequencyError(ZiplineError):
msg = (
'Bar aggregate frequency {frequency} not compatible with '
'data frequency {data_frequency}.'
).strip()
class InvalidSymbolError(ZiplineError):
msg = (
'Invalid trading pair symbol: {symbol}. '
'Catalyst symbols must follow this convention: '
'[Base Currency]_[Quote Currency]. For example: eth_usd, btc_usd, '
'neo_eth, ubq_btc. Error details: {error}'
).strip()
class InvalidOrderStyle(ZiplineError):
msg = (
'Order style {style} not supported by exchange {exchange}.'
).strip()
class CreateOrderError(ZiplineError):
msg = (
'Unable to create order on exchange {exchange} {error}.'
).strip()
class OrderNotFound(ZiplineError):
msg = (
'Order {order_id} not found on exchange {exchange}.'
).strip()
class OrphanOrderError(ZiplineError):
msg = (
'Order {order_id} found in exchange {exchange} but not tracked by '
'the algorithm.'
).strip()
class OrphanOrderReverseError(ZiplineError):
msg = (
'Order {order_id} tracked by algorithm, but not found in exchange '
'{exchange}.'
).strip()
class OrderCancelError(ZiplineError):
msg = (
'Unable to cancel order {order_id} on exchange {exchange} {error}.'
).strip()
class SidHashError(ZiplineError):
msg = (
'Unable to hash sid from symbol {symbol}.'
).strip()
class QuoteCurrencyNotFoundError(ZiplineError):
msg = (
'Algorithm quote currency {quote_currency} not found in account '
'balances on {exchange}: {balances}'
).strip()
class MismatchingQuoteCurrencies(ZiplineError):
msg = (
'Unable to trade with quote currency {quote_currency} when the '
'algorithm uses {algo_currency}.'
).strip()
class MismatchingQuoteCurrenciesExchanges(ZiplineError):
msg = (
'Unable to trade with quote currency {quote_currency} when the '
'exchange {exchange_name} users {exchange_currency}.'
).strip()
class SymbolNotFoundOnExchange(ZiplineError):
"""
Raised when a symbol() call contains a non-existent symbol.
"""
msg = ('Symbol {symbol} not found on exchange {exchange}. '
'Choose from: {supported_symbols}').strip()
class BundleNotFoundError(ZiplineError):
msg = ('Unable to find bundle data for exchange {exchange} and '
'data frequency {data_frequency}.'
'Please ingest some price data.'
'See `catalyst ingest-exchange --help` for details.').strip()
class TempBundleNotFoundError(ZiplineError):
msg = ('Temporary bundle not found in: {path}.').strip()
class EmptyValuesInBundleError(ZiplineError):
msg = ('{name} with end minute {end_minute} has empty rows '
'in ranges: {dates}').strip()
class PricingDataBeforeTradingError(ZiplineError):
msg = ('Pricing data for trading pairs {symbols} on exchange {exchange} '
'starts on {first_trading_day}, but you are either trying to trade '
'or retrieve pricing data on {dt}. Adjust your dates accordingly.'
).strip()
class PricingDataNotLoadedError(ZiplineError):
msg = ('Missing data for {exchange} {symbols} in date range '
'[{start_dt} - {end_dt}]'
'\nPlease run: `catalyst ingest-exchange -x {exchange} -f '
'{data_frequency} -i {symbol_list}`. See catalyst documentation '
'for details.').strip()
class PricingDataValueError(ZiplineError):
msg = ('Unable to retrieve pricing data for {exchange} {symbol} '
'[{start_dt} - {end_dt}]: {error}').strip()
class DataCorruptionError(ZiplineError):
msg = (
'Unable to validate data for {exchange} {symbols} in date range '
'[{start_dt} - {end_dt}]. The data is either corrupted or '
'unavailable. Please try deleting this bundle:'
'\n`catalyst clean-exchange -x {exchange}\n'
'Then, ingest the data again. Please contact the Catalyst team if '
'the issue persists.'
).strip()
class ApiCandlesError(ZiplineError):
msg = (
'Unable to fetch candles from the remote API: {error}.'
).strip()
class NoDataAvailableOnExchange(ZiplineError):
msg = (
'Requested data for trading pair {symbol} is not available on '
'exchange {exchange} '
'in `{data_frequency}` frequency at this time. '
'Check `http://enigma.co/catalyst/status` for market coverage.'
).strip()
class NoValueForField(ZiplineError):
msg = (
'Value not found for field: {field}.'
).strip()
class OrderTypeNotSupported(ZiplineError):
msg = (
'Order type `{order_type}` currently not supported by Catalyst. '
'Please use `limit` or `market` orders only.'
).strip()
class NotEnoughCapitalError(ZiplineError):
msg = (
'Not enough capital on exchange {exchange} for trading. Each '
'exchange should contain at least as much {quote_currency} '
'as the specified `capital_base`. The current balance {balance} is '
'lower than the `capital_base`: {capital_base}'
).strip()
class NotEnoughCashError(ZiplineError):
msg = (
'Total {currency} amount on {exchange} is lower than the cash '
'reserved for this algo: {total} < {cash}. While trades can be made '
'on the exchange accounts outside of the algo, exchange must have '
'enough free {currency} to cover the algo cash.'
).strip()
class LastCandleTooEarlyError(ZiplineError):
msg = (
'The trade date of the last candle {last_traded} is before the '
'specified end date minus one candle {end_dt}. Please verify how '
'{exchange} calculates the start date of OHLCV candles.'
).strip()
class TickerNotFoundError(ZiplineError):
msg = (
'Unable to fetch ticker for {symbol} on {exchange}.'
).strip()
class BalanceNotFoundError(ZiplineError):
msg = (
'{currency} not found in account balance on {exchange}: {balances}.'
).strip()
class BalanceTooLowError(ZiplineError):
msg = (
'Balance for {currency} on {exchange} too low: {free} < {amount}. '
'Positions have likely been sold outside of this algorithm. Please '
'add positions to hold a free amount greater than {amount}, or clean '
'the state of this algo and restart.'
).strip()
class NoCandlesReceivedFromExchange(ZiplineError):
msg = (
'Although requesting {bar_count} candles until {end_dt} of '
'asset {asset}, an empty list of candles was received for {exchange}.'
).strip()
| enigmampc/catalyst | catalyst/exchange/exchange_errors.py | Python | apache-2.0 | 10,006 |
from gitcd.interface.cli.abstract import BaseCommand
from gitcd.git.branch import Branch
from gitcd.app.clean import Clean as CleanHelper
class Clean(BaseCommand):
updateRemote = True
def run(self, branch: Branch):
helper = CleanHelper()
branchesToDelete = helper.getBranchesToDelete()
self.interface.writeOut('Branches to delete')
if len(branchesToDelete) == 0:
self.interface.ok(' - no branches to delete')
for branchToDelete in branchesToDelete:
self.interface.red(" - %s" % branchToDelete.getName())
self.interface.writeOut('')
if len(branchesToDelete) == 0:
self.interface.ok('Nice, your local repository is clean already.')
return True
delete = self.interface.askFor(
'Do you want me to delete those branches locally?',
['yes', 'no'],
'yes'
)
if delete == 'yes':
helper.deleteBranches(branchesToDelete)
return True
| claudio-walser/gitcd | gitcd/interface/cli/clean.py | Python | apache-2.0 | 1,028 |
# Copyright 2006 James Tauber and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyjamas import DOM
from pyjamas.ui.ButtonBase import ButtonBase
from pyjamas.ui import Event
_CheckBox_unique_id=0;
class CheckBox(ButtonBase):
def __init__(self, label=None, asHTML=False, **kwargs):
if not kwargs.has_key('StyleName'): kwargs['StyleName']="gwt-CheckBox"
if label:
if asHTML:
kwargs['HTML'] = label
else:
kwargs['Text'] = label
self.initElement(DOM.createInputCheck(), **kwargs)
def initElement(self, element, **kwargs):
self.inputElem = element
self.labelElem = DOM.createLabel()
ButtonBase.__init__(self, DOM.createSpan(), **kwargs)
self.unsinkEvents(Event.FOCUSEVENTS| Event.ONCLICK)
DOM.sinkEvents(self.inputElem, Event.FOCUSEVENTS | Event.ONCLICK | DOM.getEventsSunk(self.inputElem))
DOM.appendChild(self.getElement(), self.inputElem)
DOM.appendChild(self.getElement(), self.labelElem)
uid = "check%d" % self.getUniqueID()
DOM.setAttribute(self.inputElem, "id", uid)
DOM.setAttribute(self.labelElem, "htmlFor", uid)
# emulate static
def getUniqueID(self):
global _CheckBox_unique_id
_CheckBox_unique_id += 1
return _CheckBox_unique_id;
def getHTML(self):
return DOM.getInnerHTML(self.labelElem)
def getName(self):
return DOM.getAttribute(self.inputElem, "name")
def getText(self):
return DOM.getInnerText(self.labelElem)
def setChecked(self, checked):
DOM.setBooleanAttribute(self.inputElem, "checked", checked)
DOM.setBooleanAttribute(self.inputElem, "defaultChecked", checked)
def isChecked(self):
if self.isAttached():
propName = "checked"
else:
propName = "defaultChecked"
return DOM.getBooleanAttribute(self.inputElem, propName)
def isEnabled(self):
return not DOM.getBooleanAttribute(self.inputElem, "disabled")
def setEnabled(self, enabled):
DOM.setBooleanAttribute(self.inputElem, "disabled", not enabled)
def setFocus(focused):
if focused:
Focus.focus(self.inputElem)
else:
Focus.blur(self.inputElem)
def setHTML(self, html):
DOM.setInnerHTML(self.labelElem, html)
def setName(self, name):
DOM.setAttribute(self.inputElem, "name", name)
def setTabIndex(self, index):
Focus.setTabIndex(self.inputElem, index)
def setText(self, text):
DOM.setInnerText(self.labelElem, text)
def onDetach(self):
self.setChecked(self.isChecked())
ButtonBase.onDetach(self)
| lovelysystems/pyjamas | library/pyjamas/ui/CheckBox.py | Python | apache-2.0 | 3,250 |
"""Support for RESTful API sensors."""
import json
import logging
from xml.parsers.expat import ExpatError
import httpx
from jsonpath import jsonpath
import voluptuous as vol
import xmltodict
from homeassistant.components.sensor import DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA
from homeassistant.const import (
CONF_AUTHENTICATION,
CONF_DEVICE_CLASS,
CONF_FORCE_UPDATE,
CONF_HEADERS,
CONF_METHOD,
CONF_NAME,
CONF_PASSWORD,
CONF_PAYLOAD,
CONF_RESOURCE,
CONF_RESOURCE_TEMPLATE,
CONF_TIMEOUT,
CONF_UNIT_OF_MEASUREMENT,
CONF_USERNAME,
CONF_VALUE_TEMPLATE,
CONF_VERIFY_SSL,
HTTP_BASIC_AUTHENTICATION,
HTTP_DIGEST_AUTHENTICATION,
)
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.reload import async_setup_reload_service
from . import DOMAIN, PLATFORMS
from .data import DEFAULT_TIMEOUT, RestData
_LOGGER = logging.getLogger(__name__)
DEFAULT_METHOD = "GET"
DEFAULT_NAME = "REST Sensor"
DEFAULT_VERIFY_SSL = True
DEFAULT_FORCE_UPDATE = False
CONF_JSON_ATTRS = "json_attributes"
CONF_JSON_ATTRS_PATH = "json_attributes_path"
METHODS = ["POST", "GET"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Exclusive(CONF_RESOURCE, CONF_RESOURCE): cv.url,
vol.Exclusive(CONF_RESOURCE_TEMPLATE, CONF_RESOURCE): cv.template,
vol.Optional(CONF_AUTHENTICATION): vol.In(
[HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]
),
vol.Optional(CONF_HEADERS): vol.Schema({cv.string: cv.string}),
vol.Optional(CONF_JSON_ATTRS, default=[]): cv.ensure_list_csv,
vol.Optional(CONF_METHOD, default=DEFAULT_METHOD): vol.In(METHODS),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PAYLOAD): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_JSON_ATTRS_PATH): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
PLATFORM_SCHEMA = vol.All(
cv.has_at_least_one_key(CONF_RESOURCE, CONF_RESOURCE_TEMPLATE), PLATFORM_SCHEMA
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the RESTful sensor."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
name = config.get(CONF_NAME)
resource = config.get(CONF_RESOURCE)
resource_template = config.get(CONF_RESOURCE_TEMPLATE)
method = config.get(CONF_METHOD)
payload = config.get(CONF_PAYLOAD)
verify_ssl = config.get(CONF_VERIFY_SSL)
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
headers = config.get(CONF_HEADERS)
unit = config.get(CONF_UNIT_OF_MEASUREMENT)
device_class = config.get(CONF_DEVICE_CLASS)
value_template = config.get(CONF_VALUE_TEMPLATE)
json_attrs = config.get(CONF_JSON_ATTRS)
json_attrs_path = config.get(CONF_JSON_ATTRS_PATH)
force_update = config.get(CONF_FORCE_UPDATE)
timeout = config.get(CONF_TIMEOUT)
if value_template is not None:
value_template.hass = hass
if resource_template is not None:
resource_template.hass = hass
resource = resource_template.render(parse_result=False)
if username and password:
if config.get(CONF_AUTHENTICATION) == HTTP_DIGEST_AUTHENTICATION:
auth = httpx.DigestAuth(username, password)
else:
auth = (username, password)
else:
auth = None
rest = RestData(method, resource, auth, headers, payload, verify_ssl, timeout)
await rest.async_update()
if rest.data is None:
raise PlatformNotReady
# Must update the sensor now (including fetching the rest resource) to
# ensure it's updating its state.
async_add_entities(
[
RestSensor(
hass,
rest,
name,
unit,
device_class,
value_template,
json_attrs,
force_update,
resource_template,
json_attrs_path,
)
],
True,
)
class RestSensor(Entity):
"""Implementation of a REST sensor."""
def __init__(
self,
hass,
rest,
name,
unit_of_measurement,
device_class,
value_template,
json_attrs,
force_update,
resource_template,
json_attrs_path,
):
"""Initialize the REST sensor."""
self._hass = hass
self.rest = rest
self._name = name
self._state = None
self._unit_of_measurement = unit_of_measurement
self._device_class = device_class
self._value_template = value_template
self._json_attrs = json_attrs
self._attributes = None
self._force_update = force_update
self._resource_template = resource_template
self._json_attrs_path = json_attrs_path
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def device_class(self):
"""Return the class of this sensor."""
return self._device_class
@property
def available(self):
"""Return if the sensor data are available."""
return self.rest.data is not None
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def force_update(self):
"""Force update."""
return self._force_update
async def async_update(self):
"""Get the latest data from REST API and update the state."""
if self._resource_template is not None:
self.rest.set_url(self._resource_template.render(parse_result=False))
await self.rest.async_update()
value = self.rest.data
_LOGGER.debug("Data fetched from resource: %s", value)
if self.rest.headers is not None:
# If the http request failed, headers will be None
content_type = self.rest.headers.get("content-type")
if content_type and (
content_type.startswith("text/xml")
or content_type.startswith("application/xml")
):
try:
value = json.dumps(xmltodict.parse(value))
_LOGGER.debug("JSON converted from XML: %s", value)
except ExpatError:
_LOGGER.warning(
"REST xml result could not be parsed and converted to JSON"
)
_LOGGER.debug("Erroneous XML: %s", value)
if self._json_attrs:
self._attributes = {}
if value:
try:
json_dict = json.loads(value)
if self._json_attrs_path is not None:
json_dict = jsonpath(json_dict, self._json_attrs_path)
# jsonpath will always store the result in json_dict[0]
# so the next line happens to work exactly as needed to
# find the result
if isinstance(json_dict, list):
json_dict = json_dict[0]
if isinstance(json_dict, dict):
attrs = {
k: json_dict[k] for k in self._json_attrs if k in json_dict
}
self._attributes = attrs
else:
_LOGGER.warning(
"JSON result was not a dictionary"
" or list with 0th element a dictionary"
)
except ValueError:
_LOGGER.warning("REST result could not be parsed as JSON")
_LOGGER.debug("Erroneous JSON: %s", value)
else:
_LOGGER.warning("Empty reply found when expecting JSON data")
if value is not None and self._value_template is not None:
value = self._value_template.async_render_with_possible_json_value(
value, None
)
self._state = value
async def async_will_remove_from_hass(self):
"""Shutdown the session."""
await self.rest.async_remove()
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
| balloob/home-assistant | homeassistant/components/rest/sensor.py | Python | apache-2.0 | 9,072 |
"""Utility functions for FAUCET."""
# Copyright (C) 2015 Brad Cowie, Christopher Lorier and Joe Stringer.
# Copyright (C) 2015 Research and Education Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2018 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from logging.handlers import WatchedFileHandler
import os
import signal
import sys
from functools import wraps
def kill_on_exception(logname):
"""decorator to ensure functions will kill ryu when an unhandled exception
occurs"""
def _koe(func):
@wraps(func)
def __koe(*args, **kwargs):
try:
func(*args, **kwargs)
except:
logging.getLogger(logname).exception(
'Unhandled exception, killing RYU')
logging.shutdown()
os.kill(os.getpid(), signal.SIGTERM)
return __koe
return _koe
def utf8_decode(msg_str):
"""Gracefully decode a possibly UTF-8 string."""
return msg_str.decode('utf-8', errors='replace')
def get_sys_prefix():
"""Returns an additional prefix for log and configuration files when used in
a virtual environment"""
# Find the appropriate prefix for config and log file default locations
# in case Faucet is run in a virtual environment. virtualenv marks the
# original path in sys.real_prefix. If this value exists, and is
# different from sys.prefix, then we are most likely running in a
# virtualenv. Also check for Py3.3+ pyvenv.
sysprefix = ''
if (getattr(sys, 'real_prefix', sys.prefix) != sys.prefix or
getattr(sys, 'base_prefix', sys.prefix) != sys.prefix):
sysprefix = sys.prefix
return sysprefix
_PREFIX = get_sys_prefix()
# To specify a boolean-only setting, set the default value to a bool type.
DEFAULTS = {
'FAUCET_CONFIG': ''.join((
_PREFIX,
'/etc/faucet/faucet.yaml',
':',
_PREFIX,
'/etc/ryu/faucet/faucet.yaml')),
'FAUCET_CONFIG_STAT_RELOAD': False,
'FAUCET_LOG_LEVEL': 'INFO',
'FAUCET_LOG': _PREFIX + '/var/log/faucet/faucet.log',
'FAUCET_EVENT_SOCK': '', # Special-case, see get_setting().
'FAUCET_EXCEPTION_LOG': _PREFIX + '/var/log/faucet/faucet_exception.log',
'FAUCET_PROMETHEUS_PORT': '9302',
'FAUCET_PROMETHEUS_ADDR': '0.0.0.0',
'FAUCET_PIPELINE_DIR': _PREFIX + '/etc/faucet' + ':' + _PREFIX + '/etc/ryu/faucet',
'GAUGE_CONFIG': ''.join((
_PREFIX,
'/etc/faucet/gauge.yaml',
':',
_PREFIX,
'/etc/ryu/faucet/gauge.yaml')),
'GAUGE_CONFIG_STAT_RELOAD': False,
'GAUGE_LOG_LEVEL': 'INFO',
'GAUGE_PROMETHEUS_ADDR': '0.0.0.0',
'GAUGE_EXCEPTION_LOG': _PREFIX + '/var/log/faucet/gauge_exception.log',
'GAUGE_LOG': _PREFIX + '/var/log/faucet/gauge.log'
}
def _cast_bool(value):
"""Return True if value is a non-zero int."""
try:
return int(value) != 0
except ValueError:
return False
def get_setting(name, path_eval=False):
"""Returns value of specified configuration setting."""
default_value = DEFAULTS[name]
result = os.getenv(name, default_value)
# split on ':' and find the first suitable path
if (path_eval and
isinstance(result, str) and
isinstance(default_value, str) and not
isinstance(default_value, bool)):
locations = result.split(":")
result = None
for loc in locations:
if os.path.isfile(loc):
result = loc
break
if result is None:
result = locations[0]
# Check for setting that expects a boolean result.
if isinstance(default_value, bool):
return _cast_bool(result)
# Special default for FAUCET_EVENT_SOCK.
if name == 'FAUCET_EVENT_SOCK':
if result == '0':
return ''
if _cast_bool(result):
return _PREFIX + '/var/run/faucet/faucet.sock'
return result
def get_logger(logname, logfile, loglevel, propagate):
"""Create and return a logger object."""
stream_handlers = {
'STDOUT': sys.stdout,
'STDERR': sys.stderr,
}
try:
if logfile in stream_handlers:
logger_handler = logging.StreamHandler(stream_handlers[logfile])
else:
logger_handler = WatchedFileHandler(logfile)
except PermissionError as err: # pytype: disable=name-error
print(err)
sys.exit(-1)
logger = logging.getLogger(logname)
log_fmt = '%(asctime)s %(name)-6s %(levelname)-8s %(message)s'
logger_handler.setFormatter(
logging.Formatter(log_fmt, '%b %d %H:%M:%S'))
logger.addHandler(logger_handler)
logger.propagate = propagate
logger.setLevel(loglevel)
return logger
def close_logger(logger):
"""Close all handlers on logger object."""
if logger is None:
return
for handler in list(logger.handlers):
handler.close()
logger.removeHandler(handler)
def dpid_log(dpid):
"""Log a DP ID as hex/decimal."""
return 'DPID %u (0x%x)' % (dpid, dpid)
def btos(b_str):
"""Return byte array/string as string."""
return b_str.encode('utf-8').decode('utf-8', 'strict')
def stat_config_files(config_hashes):
"""Return dict of a subset of stat attributes on config files."""
config_files_stats = {}
for config_file in list(config_hashes.keys()):
try:
config_file_stat = os.stat(config_file)
except OSError:
continue
config_files_stats[config_file] = (
config_file_stat.st_size,
config_file_stat.st_mtime,
config_file_stat.st_ctime)
return config_files_stats
| wackerly/faucet | faucet/valve_util.py | Python | apache-2.0 | 6,228 |
# -*- coding:utf-8 -*-
# Modified from https://github.com/tylerneylon/explacy
import io
from collections import defaultdict
from pprint import pprint
from phrasetree.tree import Tree
def make_table(rows, insert_header=False):
col_widths = [max(len(s) for s in col) for col in zip(*rows[1:])]
rows[0] = [x[:l] for x, l in zip(rows[0], col_widths)]
fmt = '\t'.join('%%-%ds' % width for width in col_widths)
if insert_header:
rows.insert(1, ['─' * width for width in col_widths])
return '\n'.join(fmt % tuple(row) for row in rows)
def _start_end(arrow):
start, end = arrow['from'], arrow['to']
mn = min(start, end)
mx = max(start, end)
return start, end, mn, mx
def pretty_tree_horizontal(arrows, _do_print_debug_info=False):
"""Print the dependency tree horizontally
Args:
arrows:
_do_print_debug_info: (Default value = False)
Returns:
"""
# Set the base height; these may increase to allow room for arrowheads after this.
arrows_with_deps = defaultdict(set)
for i, arrow in enumerate(arrows):
arrow['underset'] = set()
if _do_print_debug_info:
print('Arrow %d: "%s" -> "%s"' % (i, arrow['from'], arrow['to']))
num_deps = 0
start, end, mn, mx = _start_end(arrow)
for j, other in enumerate(arrows):
if arrow is other:
continue
o_start, o_end, o_mn, o_mx = _start_end(other)
if ((start == o_start and mn <= o_end <= mx) or
(start != o_start and mn <= o_start <= mx)):
num_deps += 1
if _do_print_debug_info:
print('%d is over %d' % (i, j))
arrow['underset'].add(j)
arrow['num_deps_left'] = arrow['num_deps'] = num_deps
arrows_with_deps[num_deps].add(i)
if _do_print_debug_info:
print('')
print('arrows:')
pprint(arrows)
print('')
print('arrows_with_deps:')
pprint(arrows_with_deps)
# Render the arrows in characters. Some heights will be raised to make room for arrowheads.
sent_len = (max([max(arrow['from'], arrow['to']) for arrow in arrows]) if arrows else 0) + 1
lines = [[] for i in range(sent_len)]
num_arrows_left = len(arrows)
while num_arrows_left > 0:
assert len(arrows_with_deps[0])
arrow_index = arrows_with_deps[0].pop()
arrow = arrows[arrow_index]
src, dst, mn, mx = _start_end(arrow)
# Check the height needed.
height = 3
if arrow['underset']:
height = max(arrows[i]['height'] for i in arrow['underset']) + 1
height = max(height, 3, len(lines[dst]) + 3)
arrow['height'] = height
if _do_print_debug_info:
print('')
print('Rendering arrow %d: "%s" -> "%s"' % (arrow_index,
arrow['from'],
arrow['to']))
print(' height = %d' % height)
goes_up = src > dst
# Draw the outgoing src line.
if lines[src] and len(lines[src]) < height:
lines[src][-1].add('w')
while len(lines[src]) < height - 1:
lines[src].append(set(['e', 'w']))
if len(lines[src]) < height:
lines[src].append({'e'})
lines[src][height - 1].add('n' if goes_up else 's')
# Draw the incoming dst line.
lines[dst].append(u'►')
while len(lines[dst]) < height:
lines[dst].append(set(['e', 'w']))
lines[dst][-1] = set(['e', 's']) if goes_up else set(['e', 'n'])
# Draw the adjoining vertical line.
for i in range(mn + 1, mx):
while len(lines[i]) < height - 1:
lines[i].append(' ')
lines[i].append(set(['n', 's']))
# Update arrows_with_deps.
for arr_i, arr in enumerate(arrows):
if arrow_index in arr['underset']:
arrows_with_deps[arr['num_deps_left']].remove(arr_i)
arr['num_deps_left'] -= 1
arrows_with_deps[arr['num_deps_left']].add(arr_i)
num_arrows_left -= 1
return render_arrows(lines)
def render_arrows(lines):
arr_chars = {'ew': u'─',
'ns': u'│',
'en': u'└',
'es': u'┌',
'enw': u'┴',
'ensw': u'┼',
'ens': u'├',
'esw': u'┬'}
# Convert the character lists into strings.
max_len = max(len(line) for line in lines)
for i in range(len(lines)):
lines[i] = [arr_chars[''.join(sorted(ch))] if type(ch) is set else ch for ch in lines[i]]
lines[i] = ''.join(reversed(lines[i]))
lines[i] = ' ' * (max_len - len(lines[i])) + lines[i]
return lines
def render_span(begin, end, unidirectional=False):
if end - begin == 1:
return ['───►']
elif end - begin == 2:
return [
'──┐',
'──┴►',
] if unidirectional else [
'◄─┐',
'◄─┴►',
]
rows = []
for i in range(begin, end):
if i == (end - begin) // 2 + begin:
rows.append(' ├►')
elif i == begin:
rows.append('──┐' if unidirectional else '◄─┐')
elif i == end - 1:
rows.append('──┘' if unidirectional else '◄─┘')
else:
rows.append(' │')
return rows
def tree_to_list(T):
return [T.label(), [tree_to_list(t) if isinstance(t, Tree) else t for t in T]]
def list_to_tree(L):
if isinstance(L, str):
return L
return Tree(L[0], [list_to_tree(child) for child in L[1]])
def render_labeled_span(b, e, spans, labels, label, offset, unidirectional=False):
spans.extend([''] * (b - offset))
spans.extend(render_span(b, e, unidirectional))
center = b + (e - b) // 2
labels.extend([''] * (center - offset))
labels.append(label)
labels.extend([''] * (e - center - 1))
def main():
# arrows = [{'from': 1, 'to': 0}, {'from': 2, 'to': 1}, {'from': 2, 'to': 4}, {'from': 2, 'to': 5},
# {'from': 4, 'to': 3}]
# lines = pretty_tree_horizontal(arrows)
# print('\n'.join(lines))
# print('\n'.join([
# '◄─┐',
# ' │',
# ' ├►',
# ' │',
# '◄─┘',
# ]))
print('\n'.join(render_span(7, 12)))
if __name__ == '__main__':
main()
left_rule = {'<': ':', '^': ':', '>': '-'}
right_rule = {'<': '-', '^': ':', '>': ':'}
def evalute_field(record, field_spec):
"""Evalute a field of a record using the type of the field_spec as a guide.
Args:
record:
field_spec:
Returns:
"""
if type(field_spec) is int:
return str(record[field_spec])
elif type(field_spec) is str:
return str(getattr(record, field_spec))
else:
return str(field_spec(record))
def markdown_table(headings, records, fields=None, alignment=None, file=None):
"""Generate a Doxygen-flavor Markdown table from records.
See https://stackoverflow.com/questions/13394140/generate-markdown-tables
file -- Any object with a 'write' method that takes a single string
parameter.
records -- Iterable. Rows will be generated from this.
fields -- List of fields for each row. Each entry may be an integer,
string or a function. If the entry is an integer, it is assumed to be
an index of each record. If the entry is a string, it is assumed to be
a field of each record. If the entry is a function, it is called with
the record and its return value is taken as the value of the field.
headings -- List of column headings.
alignment - List of pairs alignment characters. The first of the pair
specifies the alignment of the header, (Doxygen won't respect this, but
it might look good, the second specifies the alignment of the cells in
the column.
Possible alignment characters are:
'<' = Left align
'>' = Right align (default for cells)
'^' = Center (default for column headings)
Args:
headings:
records:
fields: (Default value = None)
alignment: (Default value = None)
file: (Default value = None)
Returns:
"""
if not file:
file = io.StringIO()
num_columns = len(headings)
if not fields:
fields = list(range(num_columns))
assert len(headings) == num_columns
# Compute the table cell data
columns = [[] for i in range(num_columns)]
for record in records:
for i, field in enumerate(fields):
columns[i].append(evalute_field(record, field))
# Fill out any missing alignment characters.
extended_align = alignment if alignment is not None else [('^', '<')]
if len(extended_align) > num_columns:
extended_align = extended_align[0:num_columns]
elif len(extended_align) < num_columns:
extended_align += [('^', '>') for i in range(num_columns - len(extended_align))]
heading_align, cell_align = [x for x in zip(*extended_align)]
field_widths = [len(max(column, key=len)) if len(column) > 0 else 0
for column in columns]
heading_widths = [max(len(head), 2) for head in headings]
column_widths = [max(x) for x in zip(field_widths, heading_widths)]
_ = ' | '.join(['{:' + a + str(w) + '}'
for a, w in zip(heading_align, column_widths)])
heading_template = '| ' + _ + ' |'
_ = ' | '.join(['{:' + a + str(w) + '}'
for a, w in zip(cell_align, column_widths)])
row_template = '| ' + _ + ' |'
_ = ' | '.join([left_rule[a] + '-' * (w - 2) + right_rule[a]
for a, w in zip(cell_align, column_widths)])
ruling = '| ' + _ + ' |'
file.write(heading_template.format(*headings).rstrip() + '\n')
file.write(ruling.rstrip() + '\n')
for row in zip(*columns):
file.write(row_template.format(*row).rstrip() + '\n')
if isinstance(file, io.StringIO):
text = file.getvalue()
file.close()
return text
| hankcs/HanLP | plugins/hanlp_common/hanlp_common/visualization.py | Python | apache-2.0 | 10,303 |
from ..remote import RemoteModel
class SnmpMibRemote(RemoteModel):
"""
| ``id:`` none
| ``attribute type:`` string
| ``mib:`` none
| ``attribute type:`` string
| ``version:`` none
| ``attribute type:`` string
| ``source:`` none
| ``attribute type:`` string
| ``vendor:`` none
| ``attribute type:`` string
"""
properties = ("id",
"mib",
"version",
"source",
"vendor",
)
| infobloxopen/infoblox-netmri | infoblox_netmri/api/remote/models/snmp_mib_remote.py | Python | apache-2.0 | 532 |
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""``tornado.web`` provides a simple web framework with asynchronous
features that allow it to scale to large numbers of open connections,
making it ideal for `long polling
<http://en.wikipedia.org/wiki/Push_technology#Long_polling>`_.
Here is a simple "Hello, world" example app:
.. testcode::
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
tornado.ioloop.IOLoop.current().start()
.. testoutput::
:hide:
See the :doc:`guide` for additional information.
Thread-safety notes
-------------------
In general, methods on `RequestHandler` and elsewhere in Tornado are
not thread-safe. In particular, methods such as
`~RequestHandler.write()`, `~RequestHandler.finish()`, and
`~RequestHandler.flush()` must only be called from the main thread. If
you use multiple threads it is important to use `.IOLoop.add_callback`
to transfer control back to the main thread before finishing the
request, or to limit your use of other threads to
`.IOLoop.run_in_executor` and ensure that your callbacks running in
the executor do not refer to Tornado objects.
"""
import base64
import binascii
import datetime
import email.utils
import functools
import gzip
import hashlib
import hmac
import http.cookies
from inspect import isclass
from io import BytesIO
import mimetypes
import numbers
import os.path
import re
import sys
import threading
import time
import tornado
import traceback
import types
import urllib.parse
from urllib.parse import urlencode
from tornado.concurrent import Future, future_set_result_unless_cancelled
from tornado import escape
from tornado import gen
from tornado.httpserver import HTTPServer
from tornado import httputil
from tornado import iostream
import tornado.locale
from tornado import locale
from tornado.log import access_log, app_log, gen_log
from tornado import template
from tornado.escape import utf8, _unicode
from tornado.routing import (
AnyMatches,
DefaultHostMatches,
HostMatches,
ReversibleRouter,
Rule,
ReversibleRuleRouter,
URLSpec,
_RuleList,
)
from tornado.util import ObjectDict, unicode_type, _websocket_mask
url = URLSpec
from typing import (
Dict,
Any,
Union,
Optional,
Awaitable,
Tuple,
List,
Callable,
Iterable,
Generator,
Type,
cast,
overload,
)
from types import TracebackType
import typing
if typing.TYPE_CHECKING:
from typing import Set # noqa: F401
# The following types are accepted by RequestHandler.set_header
# and related methods.
_HeaderTypes = Union[bytes, unicode_type, int, numbers.Integral, datetime.datetime]
_CookieSecretTypes = Union[str, bytes, Dict[int, str], Dict[int, bytes]]
MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1
"""The oldest signed value version supported by this version of Tornado.
Signed values older than this version cannot be decoded.
.. versionadded:: 3.2.1
"""
MAX_SUPPORTED_SIGNED_VALUE_VERSION = 2
"""The newest signed value version supported by this version of Tornado.
Signed values newer than this version cannot be decoded.
.. versionadded:: 3.2.1
"""
DEFAULT_SIGNED_VALUE_VERSION = 2
"""The signed value version produced by `.RequestHandler.create_signed_value`.
May be overridden by passing a ``version`` keyword argument.
.. versionadded:: 3.2.1
"""
DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`.
May be overridden by passing a ``min_version`` keyword argument.
.. versionadded:: 3.2.1
"""
class _ArgDefaultMarker:
pass
_ARG_DEFAULT = _ArgDefaultMarker()
class RequestHandler(object):
"""Base class for HTTP request handlers.
Subclasses must define at least one of the methods defined in the
"Entry points" section below.
Applications should not construct `RequestHandler` objects
directly and subclasses should not override ``__init__`` (override
`~RequestHandler.initialize` instead).
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT", "OPTIONS")
_template_loaders = {} # type: Dict[str, template.BaseLoader]
_template_loader_lock = threading.Lock()
_remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
_stream_request_body = False
# Will be set in _execute.
_transforms = None # type: List[OutputTransform]
path_args = None # type: List[str]
path_kwargs = None # type: Dict[str, str]
def __init__(
self,
application: "Application",
request: httputil.HTTPServerRequest,
**kwargs: Any
) -> None:
super(RequestHandler, self).__init__()
self.application = application
self.request = request
self._headers_written = False
self._finished = False
self._auto_finish = True
self._prepared_future = None
self.ui = ObjectDict(
(n, self._ui_method(m)) for n, m in application.ui_methods.items()
)
# UIModules are available as both `modules` and `_tt_modules` in the
# template namespace. Historically only `modules` was available
# but could be clobbered by user additions to the namespace.
# The template {% module %} directive looks in `_tt_modules` to avoid
# possible conflicts.
self.ui["_tt_modules"] = _UIModuleNamespace(self, application.ui_modules)
self.ui["modules"] = self.ui["_tt_modules"]
self.clear()
assert self.request.connection is not None
# TODO: need to add set_close_callback to HTTPConnection interface
self.request.connection.set_close_callback( # type: ignore
self.on_connection_close
)
self.initialize(**kwargs) # type: ignore
def _initialize(self) -> None:
pass
initialize = _initialize # type: Callable[..., None]
"""Hook for subclass initialization. Called for each request.
A dictionary passed as the third argument of a ``URLSpec`` will be
supplied as keyword arguments to ``initialize()``.
Example::
class ProfileHandler(RequestHandler):
def initialize(self, database):
self.database = database
def get(self, username):
...
app = Application([
(r'/user/(.*)', ProfileHandler, dict(database=database)),
])
"""
@property
def settings(self) -> Dict[str, Any]:
"""An alias for `self.application.settings <Application.settings>`."""
return self.application.settings
def _unimplemented_method(self, *args: str, **kwargs: str) -> None:
raise HTTPError(405)
head = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
get = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
post = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
delete = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
patch = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
put = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
options = _unimplemented_method # type: Callable[..., Optional[Awaitable[None]]]
def prepare(self) -> Optional[Awaitable[None]]:
"""Called at the beginning of a request before `get`/`post`/etc.
Override this method to perform common initialization regardless
of the request method.
Asynchronous support: Use ``async def`` or decorate this method with
`.gen.coroutine` to make it asynchronous.
If this method returns an ``Awaitable`` execution will not proceed
until the ``Awaitable`` is done.
.. versionadded:: 3.1
Asynchronous support.
"""
pass
def on_finish(self) -> None:
"""Called after the end of a request.
Override this method to perform cleanup, logging, etc.
This method is a counterpart to `prepare`. ``on_finish`` may
not produce any output, as it is called after the response
has been sent to the client.
"""
pass
def on_connection_close(self) -> None:
"""Called in async handlers if the client closed the connection.
Override this to clean up resources associated with
long-lived connections. Note that this method is called only if
the connection was closed during asynchronous processing; if you
need to do cleanup after every request override `on_finish`
instead.
Proxies may keep a connection open for a time (perhaps
indefinitely) after the client has gone away, so this method
may not be called promptly after the end user closes their
connection.
"""
if _has_stream_request_body(self.__class__):
if not self.request._body_future.done():
self.request._body_future.set_exception(iostream.StreamClosedError())
self.request._body_future.exception()
def clear(self) -> None:
"""Resets all headers and content for this response."""
self._headers = httputil.HTTPHeaders(
{
"Server": "TornadoServer/%s" % tornado.version,
"Content-Type": "text/html; charset=UTF-8",
"Date": httputil.format_timestamp(time.time()),
}
)
self.set_default_headers()
self._write_buffer = [] # type: List[bytes]
self._status_code = 200
self._reason = httputil.responses[200]
def set_default_headers(self) -> None:
"""Override this to set HTTP headers at the beginning of the request.
For example, this is the place to set a custom ``Server`` header.
Note that setting such headers in the normal flow of request
processing may not do what you want, since headers may be reset
during error handling.
"""
pass
def set_status(self, status_code: int, reason: str = None) -> None:
"""Sets the status code for our response.
:arg int status_code: Response status code.
:arg str reason: Human-readable reason phrase describing the status
code. If ``None``, it will be filled in from
`http.client.responses` or "Unknown".
.. versionchanged:: 5.0
No longer validates that the response code is in
`http.client.responses`.
"""
self._status_code = status_code
if reason is not None:
self._reason = escape.native_str(reason)
else:
self._reason = httputil.responses.get(status_code, "Unknown")
def get_status(self) -> int:
"""Returns the status code for our response."""
return self._status_code
def set_header(self, name: str, value: _HeaderTypes) -> None:
"""Sets the given response header name and value.
All header values are converted to strings (`datetime` objects
are formatted according to the HTTP specification for the
``Date`` header).
"""
self._headers[name] = self._convert_header_value(value)
def add_header(self, name: str, value: _HeaderTypes) -> None:
"""Adds the given response header and value.
Unlike `set_header`, `add_header` may be called multiple times
to return multiple values for the same header.
"""
self._headers.add(name, self._convert_header_value(value))
def clear_header(self, name: str) -> None:
"""Clears an outgoing header, undoing a previous `set_header` call.
Note that this method does not apply to multi-valued headers
set by `add_header`.
"""
if name in self._headers:
del self._headers[name]
_INVALID_HEADER_CHAR_RE = re.compile(r"[\x00-\x1f]")
def _convert_header_value(self, value: _HeaderTypes) -> str:
# Convert the input value to a str. This type check is a bit
# subtle: The bytes case only executes on python 3, and the
# unicode case only executes on python 2, because the other
# cases are covered by the first match for str.
if isinstance(value, str):
retval = value
elif isinstance(value, bytes): # py3
# Non-ascii characters in headers are not well supported,
# but if you pass bytes, use latin1 so they pass through as-is.
retval = value.decode("latin1")
elif isinstance(value, unicode_type): # py2
# TODO: This is inconsistent with the use of latin1 above,
# but it's been that way for a long time. Should it change?
retval = escape.utf8(value)
elif isinstance(value, numbers.Integral):
# return immediately since we know the converted value will be safe
return str(value)
elif isinstance(value, datetime.datetime):
return httputil.format_timestamp(value)
else:
raise TypeError("Unsupported header value %r" % value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request.
if RequestHandler._INVALID_HEADER_CHAR_RE.search(retval):
raise ValueError("Unsafe header value %r", retval)
return retval
@overload
def get_argument(self, name: str, default: str, strip: bool = True) -> str:
pass
@overload # noqa: F811
def get_argument(
self, name: str, default: _ArgDefaultMarker = _ARG_DEFAULT, strip: bool = True
) -> str:
pass
@overload # noqa: F811
def get_argument(
self, name: str, default: None, strip: bool = True
) -> Optional[str]:
pass
def get_argument( # noqa: F811
self,
name: str,
default: Union[None, str, _ArgDefaultMarker] = _ARG_DEFAULT,
strip: bool = True,
) -> Optional[str]:
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the request more than once, we return the
last value.
This method searches both the query and body arguments.
"""
return self._get_argument(name, default, self.request.arguments, strip)
def get_arguments(self, name: str, strip: bool = True) -> List[str]:
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
This method searches both the query and body arguments.
"""
# Make sure `get_arguments` isn't accidentally being called with a
# positional argument that's assumed to be a default (like in
# `get_argument`.)
assert isinstance(strip, bool)
return self._get_arguments(name, self.request.arguments, strip)
def get_body_argument(
self,
name: str,
default: Union[None, str, _ArgDefaultMarker] = _ARG_DEFAULT,
strip: bool = True,
) -> Optional[str]:
"""Returns the value of the argument with the given name
from the request body.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
.. versionadded:: 3.2
"""
return self._get_argument(name, default, self.request.body_arguments, strip)
def get_body_arguments(self, name: str, strip: bool = True) -> List[str]:
"""Returns a list of the body arguments with the given name.
If the argument is not present, returns an empty list.
.. versionadded:: 3.2
"""
return self._get_arguments(name, self.request.body_arguments, strip)
def get_query_argument(
self,
name: str,
default: Union[None, str, _ArgDefaultMarker] = _ARG_DEFAULT,
strip: bool = True,
) -> Optional[str]:
"""Returns the value of the argument with the given name
from the request query string.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
.. versionadded:: 3.2
"""
return self._get_argument(name, default, self.request.query_arguments, strip)
def get_query_arguments(self, name: str, strip: bool = True) -> List[str]:
"""Returns a list of the query arguments with the given name.
If the argument is not present, returns an empty list.
.. versionadded:: 3.2
"""
return self._get_arguments(name, self.request.query_arguments, strip)
def _get_argument(
self,
name: str,
default: Union[None, str, _ArgDefaultMarker],
source: Dict[str, List[bytes]],
strip: bool = True,
) -> Optional[str]:
args = self._get_arguments(name, source, strip=strip)
if not args:
if isinstance(default, _ArgDefaultMarker):
raise MissingArgumentError(name)
return default
return args[-1]
def _get_arguments(
self, name: str, source: Dict[str, List[bytes]], strip: bool = True
) -> List[str]:
values = []
for v in source.get(name, []):
s = self.decode_argument(v, name=name)
if isinstance(s, unicode_type):
# Get rid of any weird control chars (unless decoding gave
# us bytes, in which case leave it alone)
s = RequestHandler._remove_control_chars_regex.sub(" ", s)
if strip:
s = s.strip()
values.append(s)
return values
def decode_argument(self, value: bytes, name: str = None) -> str:
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
By default, this method decodes the argument as utf-8 and returns
a unicode string, but this may be overridden in subclasses.
This method is used as a filter for both `get_argument()` and for
values extracted from the url and passed to `get()`/`post()`/etc.
The name of the argument is provided if known, but may be None
(e.g. for unnamed groups in the url regex).
"""
try:
return _unicode(value)
except UnicodeDecodeError:
raise HTTPError(
400, "Invalid unicode in %s: %r" % (name or "url", value[:40])
)
@property
def cookies(self) -> Dict[str, http.cookies.Morsel]:
"""An alias for
`self.request.cookies <.httputil.HTTPServerRequest.cookies>`."""
return self.request.cookies
def get_cookie(self, name: str, default: str = None) -> Optional[str]:
"""Returns the value of the request cookie with the given name.
If the named cookie is not present, returns ``default``.
This method only returns cookies that were present in the request.
It does not see the outgoing cookies set by `set_cookie` in this
handler.
"""
if self.request.cookies is not None and name in self.request.cookies:
return self.request.cookies[name].value
return default
def set_cookie(
self,
name: str,
value: Union[str, bytes],
domain: str = None,
expires: Union[float, Tuple, datetime.datetime] = None,
path: str = "/",
expires_days: int = None,
**kwargs: Any
) -> None:
"""Sets an outgoing cookie name/value with the given options.
Newly-set cookies are not immediately visible via `get_cookie`;
they are not present until the next request.
expires may be a numeric timestamp as returned by `time.time`,
a time tuple as returned by `time.gmtime`, or a
`datetime.datetime` object.
Additional keyword arguments are set on the cookies.Morsel
directly.
See https://docs.python.org/3/library/http.cookies.html#http.cookies.Morsel
for available attributes.
"""
# The cookie library only accepts type str, in both python 2 and 3
name = escape.native_str(name)
value = escape.native_str(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookie"):
self._new_cookie = http.cookies.SimpleCookie()
if name in self._new_cookie:
del self._new_cookie[name]
self._new_cookie[name] = value
morsel = self._new_cookie[name]
if domain:
morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(days=expires_days)
if expires:
morsel["expires"] = httputil.format_timestamp(expires)
if path:
morsel["path"] = path
for k, v in kwargs.items():
if k == "max_age":
k = "max-age"
# skip falsy values for httponly and secure flags because
# SimpleCookie sets them regardless
if k in ["httponly", "secure"] and not v:
continue
morsel[k] = v
def clear_cookie(self, name: str, path: str = "/", domain: str = None) -> None:
"""Deletes the cookie with the given name.
Due to limitations of the cookie protocol, you must pass the same
path and domain to clear a cookie as were used when that cookie
was set (but there is no way to find out on the server side
which values were used for a given cookie).
Similar to `set_cookie`, the effect of this method will not be
seen until the following request.
"""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires, domain=domain)
def clear_all_cookies(self, path: str = "/", domain: str = None) -> None:
"""Deletes all the cookies the user sent with this request.
See `clear_cookie` for more information on the path and domain
parameters.
Similar to `set_cookie`, the effect of this method will not be
seen until the following request.
.. versionchanged:: 3.2
Added the ``path`` and ``domain`` parameters.
"""
for name in self.request.cookies:
self.clear_cookie(name, path=path, domain=domain)
def set_secure_cookie(
self,
name: str,
value: Union[str, bytes],
expires_days: int = 30,
version: int = None,
**kwargs: Any
) -> None:
"""Signs and timestamps a cookie so it cannot be forged.
You must specify the ``cookie_secret`` setting in your Application
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use `get_secure_cookie()`.
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
Secure cookies may contain arbitrary byte values, not just unicode
strings (unlike regular cookies)
Similar to `set_cookie`, the effect of this method will not be
seen until the following request.
.. versionchanged:: 3.2.1
Added the ``version`` argument. Introduced cookie version 2
and made it the default.
"""
self.set_cookie(
name,
self.create_signed_value(name, value, version=version),
expires_days=expires_days,
**kwargs
)
def create_signed_value(
self, name: str, value: Union[str, bytes], version: int = None
) -> bytes:
"""Signs and timestamps a string so it cannot be forged.
Normally used via set_secure_cookie, but provided as a separate
method for non-cookie uses. To decode a value not stored
as a cookie use the optional value argument to get_secure_cookie.
.. versionchanged:: 3.2.1
Added the ``version`` argument. Introduced cookie version 2
and made it the default.
"""
self.require_setting("cookie_secret", "secure cookies")
secret = self.application.settings["cookie_secret"]
key_version = None
if isinstance(secret, dict):
if self.application.settings.get("key_version") is None:
raise Exception("key_version setting must be used for secret_key dicts")
key_version = self.application.settings["key_version"]
return create_signed_value(
secret, name, value, version=version, key_version=key_version
)
def get_secure_cookie(
self,
name: str,
value: str = None,
max_age_days: int = 31,
min_version: int = None,
) -> Optional[bytes]:
"""Returns the given signed cookie if it validates, or None.
The decoded cookie value is returned as a byte string (unlike
`get_cookie`).
Similar to `get_cookie`, this method only returns cookies that
were present in the request. It does not see outgoing cookies set by
`set_secure_cookie` in this handler.
.. versionchanged:: 3.2.1
Added the ``min_version`` argument. Introduced cookie version 2;
both versions 1 and 2 are accepted by default.
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
value = self.get_cookie(name)
return decode_signed_value(
self.application.settings["cookie_secret"],
name,
value,
max_age_days=max_age_days,
min_version=min_version,
)
def get_secure_cookie_key_version(
self, name: str, value: str = None
) -> Optional[int]:
"""Returns the signing key version of the secure cookie.
The version is returned as int.
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
value = self.get_cookie(name)
if value is None:
return None
return get_signature_key_version(value)
def redirect(self, url: str, permanent: bool = False, status: int = None) -> None:
"""Sends a redirect to the given (optionally relative) URL.
If the ``status`` argument is specified, that value is used as the
HTTP status code; otherwise either 301 (permanent) or 302
(temporary) is chosen based on the ``permanent`` argument.
The default is 302 (temporary).
"""
if self._headers_written:
raise Exception("Cannot redirect after headers have been written")
if status is None:
status = 301 if permanent else 302
else:
assert isinstance(status, int) and 300 <= status <= 399
self.set_status(status)
self.set_header("Location", utf8(url))
self.finish()
def write(self, chunk: Union[str, bytes, dict]) -> None:
"""Writes the given chunk to the output buffer.
To write the output to the network, use the `flush()` method below.
If the given chunk is a dictionary, we write it as JSON and set
the Content-Type of the response to be ``application/json``.
(if you want to send JSON as a different ``Content-Type``, call
``set_header`` *after* calling ``write()``).
Note that lists are not converted to JSON because of a potential
cross-site security vulnerability. All JSON output should be
wrapped in a dictionary. More details at
http://haacked.com/archive/2009/06/25/json-hijacking.aspx/ and
https://github.com/facebook/tornado/issues/1009
"""
if self._finished:
raise RuntimeError("Cannot write() after finish()")
if not isinstance(chunk, (bytes, unicode_type, dict)):
message = "write() only accepts bytes, unicode, and dict objects"
if isinstance(chunk, list):
message += (
". Lists not accepted for security reasons; see "
+ "http://www.tornadoweb.org/en/stable/web.html#tornado.web.RequestHandler.write" # noqa: E501
)
raise TypeError(message)
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
chunk = utf8(chunk)
self._write_buffer.append(chunk)
def render(self, template_name: str, **kwargs: Any) -> "Future[None]":
"""Renders the template with the given arguments as the response.
``render()`` calls ``finish()``, so no other output methods can be called
after it.
Returns a `.Future` with the same semantics as the one returned by `finish`.
Awaiting this `.Future` is optional.
.. versionchanged:: 5.1
Now returns a `.Future` instead of ``None``.
"""
if self._finished:
raise RuntimeError("Cannot render() after finish()")
html = self.render_string(template_name, **kwargs)
# Insert the additional JS and CSS added by the modules on the page
js_embed = []
js_files = []
css_embed = []
css_files = []
html_heads = []
html_bodies = []
for module in getattr(self, "_active_modules", {}).values():
embed_part = module.embedded_javascript()
if embed_part:
js_embed.append(utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes)):
js_files.append(_unicode(file_part))
else:
js_files.extend(file_part)
embed_part = module.embedded_css()
if embed_part:
css_embed.append(utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes)):
css_files.append(_unicode(file_part))
else:
css_files.extend(file_part)
head_part = module.html_head()
if head_part:
html_heads.append(utf8(head_part))
body_part = module.html_body()
if body_part:
html_bodies.append(utf8(body_part))
if js_files:
# Maintain order of JavaScript files given by modules
js = self.render_linked_js(js_files)
sloc = html.rindex(b"</body>")
html = html[:sloc] + utf8(js) + b"\n" + html[sloc:]
if js_embed:
js_bytes = self.render_embed_js(js_embed)
sloc = html.rindex(b"</body>")
html = html[:sloc] + js_bytes + b"\n" + html[sloc:]
if css_files:
css = self.render_linked_css(css_files)
hloc = html.index(b"</head>")
html = html[:hloc] + utf8(css) + b"\n" + html[hloc:]
if css_embed:
css_bytes = self.render_embed_css(css_embed)
hloc = html.index(b"</head>")
html = html[:hloc] + css_bytes + b"\n" + html[hloc:]
if html_heads:
hloc = html.index(b"</head>")
html = html[:hloc] + b"".join(html_heads) + b"\n" + html[hloc:]
if html_bodies:
hloc = html.index(b"</body>")
html = html[:hloc] + b"".join(html_bodies) + b"\n" + html[hloc:]
return self.finish(html)
def render_linked_js(self, js_files: Iterable[str]) -> str:
"""Default method used to render the final js links for the
rendered webpage.
Override this method in a sub-classed controller to change the output.
"""
paths = []
unique_paths = set() # type: Set[str]
for path in js_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
return "".join(
'<script src="'
+ escape.xhtml_escape(p)
+ '" type="text/javascript"></script>'
for p in paths
)
def render_embed_js(self, js_embed: Iterable[bytes]) -> bytes:
"""Default method used to render the final embedded js for the
rendered webpage.
Override this method in a sub-classed controller to change the output.
"""
return (
b'<script type="text/javascript">\n//<![CDATA[\n'
+ b"\n".join(js_embed)
+ b"\n//]]>\n</script>"
)
def render_linked_css(self, css_files: Iterable[str]) -> str:
"""Default method used to render the final css links for the
rendered webpage.
Override this method in a sub-classed controller to change the output.
"""
paths = []
unique_paths = set() # type: Set[str]
for path in css_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
return "".join(
'<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths
)
def render_embed_css(self, css_embed: Iterable[bytes]) -> bytes:
"""Default method used to render the final embedded css for the
rendered webpage.
Override this method in a sub-classed controller to change the output.
"""
return b'<style type="text/css">\n' + b"\n".join(css_embed) + b"\n</style>"
def render_string(self, template_name: str, **kwargs: Any) -> bytes:
"""Generate the given template with the given arguments.
We return the generated byte string (in utf8). To generate and
write a template as a response, use render() above.
"""
# If no template_path is specified, use the path of the calling file
template_path = self.get_template_path()
if not template_path:
frame = sys._getframe(0)
web_file = frame.f_code.co_filename
while frame.f_code.co_filename == web_file:
frame = frame.f_back
assert frame.f_code.co_filename is not None
template_path = os.path.dirname(frame.f_code.co_filename)
with RequestHandler._template_loader_lock:
if template_path not in RequestHandler._template_loaders:
loader = self.create_template_loader(template_path)
RequestHandler._template_loaders[template_path] = loader
else:
loader = RequestHandler._template_loaders[template_path]
t = loader.load(template_name)
namespace = self.get_template_namespace()
namespace.update(kwargs)
return t.generate(**namespace)
def get_template_namespace(self) -> Dict[str, Any]:
"""Returns a dictionary to be used as the default template namespace.
May be overridden by subclasses to add or modify values.
The results of this method will be combined with additional
defaults in the `tornado.template` module and keyword arguments
to `render` or `render_string`.
"""
namespace = dict(
handler=self,
request=self.request,
current_user=self.current_user,
locale=self.locale,
_=self.locale.translate,
pgettext=self.locale.pgettext,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
reverse_url=self.reverse_url,
)
namespace.update(self.ui)
return namespace
def create_template_loader(self, template_path: str) -> template.BaseLoader:
"""Returns a new template loader for the given path.
May be overridden by subclasses. By default returns a
directory-based loader on the given path, using the
``autoescape`` and ``template_whitespace`` application
settings. If a ``template_loader`` application setting is
supplied, uses that instead.
"""
settings = self.application.settings
if "template_loader" in settings:
return settings["template_loader"]
kwargs = {}
if "autoescape" in settings:
# autoescape=None means "no escaping", so we have to be sure
# to only pass this kwarg if the user asked for it.
kwargs["autoescape"] = settings["autoescape"]
if "template_whitespace" in settings:
kwargs["whitespace"] = settings["template_whitespace"]
return template.Loader(template_path, **kwargs)
def flush(self, include_footers: bool = False) -> "Future[None]":
"""Flushes the current output buffer to the network.
The ``callback`` argument, if given, can be used for flow control:
it will be run when all flushed data has been written to the socket.
Note that only one flush callback can be outstanding at a time;
if another flush occurs before the previous flush's callback
has been run, the previous callback will be discarded.
.. versionchanged:: 4.0
Now returns a `.Future` if no callback is given.
.. versionchanged:: 6.0
The ``callback`` argument was removed.
"""
assert self.request.connection is not None
chunk = b"".join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
assert chunk is not None
self._status_code, self._headers, chunk = transform.transform_first_chunk(
self._status_code, self._headers, chunk, include_footers
)
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
chunk = b""
# Finalize the cookie headers (which have been stored in a side
# object so an outgoing cookie could be overwritten before it
# is sent).
if hasattr(self, "_new_cookie"):
for cookie in self._new_cookie.values():
self.add_header("Set-Cookie", cookie.OutputString(None))
start_line = httputil.ResponseStartLine("", self._status_code, self._reason)
return self.request.connection.write_headers(
start_line, self._headers, chunk
)
else:
for transform in self._transforms:
chunk = transform.transform_chunk(chunk, include_footers)
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method != "HEAD":
return self.request.connection.write(chunk)
else:
future = Future() # type: Future[None]
future.set_result(None)
return future
def finish(self, chunk: Union[str, bytes, dict] = None) -> "Future[None]":
"""Finishes this response, ending the HTTP request.
Passing a ``chunk`` to ``finish()`` is equivalent to passing that
chunk to ``write()`` and then calling ``finish()`` with no arguments.
Returns a `.Future` which may optionally be awaited to track the sending
of the response to the client. This `.Future` resolves when all the response
data has been sent, and raises an error if the connection is closed before all
data can be sent.
.. versionchanged:: 5.1
Now returns a `.Future` instead of ``None``.
"""
if self._finished:
raise RuntimeError("finish() called twice")
if chunk is not None:
self.write(chunk)
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
if not self._headers_written:
if (
self._status_code == 200
and self.request.method in ("GET", "HEAD")
and "Etag" not in self._headers
):
self.set_etag_header()
if self.check_etag_header():
self._write_buffer = []
self.set_status(304)
if self._status_code in (204, 304) or (
self._status_code >= 100 and self._status_code < 200
):
assert not self._write_buffer, (
"Cannot send body with %s" % self._status_code
)
self._clear_headers_for_304()
elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
assert self.request.connection is not None
# Now that the request is finished, clear the callback we
# set on the HTTPConnection (which would otherwise prevent the
# garbage collection of the RequestHandler when there
# are keepalive connections)
self.request.connection.set_close_callback(None) # type: ignore
future = self.flush(include_footers=True)
self.request.connection.finish()
self._log()
self._finished = True
self.on_finish()
self._break_cycles()
return future
def detach(self) -> iostream.IOStream:
"""Take control of the underlying stream.
Returns the underlying `.IOStream` object and stops all
further HTTP processing. Intended for implementing protocols
like websockets that tunnel over an HTTP handshake.
This method is only supported when HTTP/1.1 is used.
.. versionadded:: 5.1
"""
self._finished = True
# TODO: add detach to HTTPConnection?
return self.request.connection.detach() # type: ignore
def _break_cycles(self) -> None:
# Break up a reference cycle between this handler and the
# _ui_module closures to allow for faster GC on CPython.
self.ui = None # type: ignore
def send_error(self, status_code: int = 500, **kwargs: Any) -> None:
"""Sends the given HTTP error code to the browser.
If `flush()` has already been called, it is not possible to send
an error, so this method will simply terminate the response.
If output has been written but not yet flushed, it will be discarded
and replaced with the error page.
Override `write_error()` to customize the error page that is returned.
Additional keyword arguments are passed through to `write_error`.
"""
if self._headers_written:
gen_log.error("Cannot send error response after headers written")
if not self._finished:
# If we get an error between writing headers and finishing,
# we are unlikely to be able to finish due to a
# Content-Length mismatch. Try anyway to release the
# socket.
try:
self.finish()
except Exception:
gen_log.error("Failed to flush partial response", exc_info=True)
return
self.clear()
reason = kwargs.get("reason")
if "exc_info" in kwargs:
exception = kwargs["exc_info"][1]
if isinstance(exception, HTTPError) and exception.reason:
reason = exception.reason
self.set_status(status_code, reason=reason)
try:
self.write_error(status_code, **kwargs)
except Exception:
app_log.error("Uncaught exception in write_error", exc_info=True)
if not self._finished:
self.finish()
def write_error(self, status_code: int, **kwargs: Any) -> None:
"""Override to implement custom error pages.
``write_error`` may call `write`, `render`, `set_header`, etc
to produce output as usual.
If this error was caused by an uncaught exception (including
HTTPError), an ``exc_info`` triple will be available as
``kwargs["exc_info"]``. Note that this exception may not be
the "current" exception for purposes of methods like
``sys.exc_info()`` or ``traceback.format_exc``.
"""
if self.settings.get("serve_traceback") and "exc_info" in kwargs:
# in debug mode, try to send a traceback
self.set_header("Content-Type", "text/plain")
for line in traceback.format_exception(*kwargs["exc_info"]):
self.write(line)
self.finish()
else:
self.finish(
"<html><title>%(code)d: %(message)s</title>"
"<body>%(code)d: %(message)s</body></html>"
% {"code": status_code, "message": self._reason}
)
@property
def locale(self) -> tornado.locale.Locale:
"""The locale for the current session.
Determined by either `get_user_locale`, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or `get_browser_locale`, which uses the ``Accept-Language``
header.
.. versionchanged: 4.1
Added a property setter.
"""
if not hasattr(self, "_locale"):
loc = self.get_user_locale()
if loc is not None:
self._locale = loc
else:
self._locale = self.get_browser_locale()
assert self._locale
return self._locale
@locale.setter
def locale(self, value: tornado.locale.Locale) -> None:
self._locale = value
def get_user_locale(self) -> Optional[tornado.locale.Locale]:
"""Override to determine the locale from the authenticated user.
If None is returned, we fall back to `get_browser_locale()`.
This method should return a `tornado.locale.Locale` object,
most likely obtained via a call like ``tornado.locale.get("en")``
"""
return None
def get_browser_locale(self, default: str = "en_US") -> tornado.locale.Locale:
"""Determines the user's locale from ``Accept-Language`` header.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
if "Accept-Language" in self.request.headers:
languages = self.request.headers["Accept-Language"].split(",")
locales = []
for language in languages:
parts = language.strip().split(";")
if len(parts) > 1 and parts[1].startswith("q="):
try:
score = float(parts[1][2:])
except (ValueError, TypeError):
score = 0.0
else:
score = 1.0
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda pair: pair[1], reverse=True)
codes = [l[0] for l in locales]
return locale.get(*codes)
return locale.get(default)
@property
def current_user(self) -> Any:
"""The authenticated user for this request.
This is set in one of two ways:
* A subclass may override `get_current_user()`, which will be called
automatically the first time ``self.current_user`` is accessed.
`get_current_user()` will only be called once per request,
and is cached for future access::
def get_current_user(self):
user_cookie = self.get_secure_cookie("user")
if user_cookie:
return json.loads(user_cookie)
return None
* It may be set as a normal variable, typically from an overridden
`prepare()`::
@gen.coroutine
def prepare(self):
user_id_cookie = self.get_secure_cookie("user_id")
if user_id_cookie:
self.current_user = yield load_user(user_id_cookie)
Note that `prepare()` may be a coroutine while `get_current_user()`
may not, so the latter form is necessary if loading the user requires
asynchronous operations.
The user object may be any type of the application's choosing.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
@current_user.setter
def current_user(self, value: Any) -> None:
self._current_user = value
def get_current_user(self) -> Any:
"""Override to determine the current user from, e.g., a cookie.
This method may not be a coroutine.
"""
return None
def get_login_url(self) -> str:
"""Override to customize the login URL based on the request.
By default, we use the ``login_url`` application setting.
"""
self.require_setting("login_url", "@tornado.web.authenticated")
return self.application.settings["login_url"]
def get_template_path(self) -> Optional[str]:
"""Override to customize template path for each handler.
By default, we use the ``template_path`` application setting.
Return None to load templates relative to the calling file.
"""
return self.application.settings.get("template_path")
@property
def xsrf_token(self) -> bytes:
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
This property is of type `bytes`, but it contains only ASCII
characters. If a character string is required, there is no
need to base64-encode it; just decode the byte string as
UTF-8.
.. versionchanged:: 3.2.2
The xsrf token will now be have a random mask applied in every
request, which makes it safe to include the token in pages
that are compressed. See http://breachattack.com for more
information on the issue fixed by this change. Old (version 1)
cookies will be converted to version 2 when this method is called
unless the ``xsrf_cookie_version`` `Application` setting is
set to 1.
.. versionchanged:: 4.3
The ``xsrf_cookie_kwargs`` `Application` setting may be
used to supply additional cookie options (which will be
passed directly to `set_cookie`). For example,
``xsrf_cookie_kwargs=dict(httponly=True, secure=True)``
will set the ``secure`` and ``httponly`` flags on the
``_xsrf`` cookie.
"""
if not hasattr(self, "_xsrf_token"):
version, token, timestamp = self._get_raw_xsrf_token()
output_version = self.settings.get("xsrf_cookie_version", 2)
cookie_kwargs = self.settings.get("xsrf_cookie_kwargs", {})
if output_version == 1:
self._xsrf_token = binascii.b2a_hex(token)
elif output_version == 2:
mask = os.urandom(4)
self._xsrf_token = b"|".join(
[
b"2",
binascii.b2a_hex(mask),
binascii.b2a_hex(_websocket_mask(mask, token)),
utf8(str(int(timestamp))),
]
)
else:
raise ValueError("unknown xsrf cookie version %d", output_version)
if version is None:
if self.current_user and "expires_days" not in cookie_kwargs:
cookie_kwargs["expires_days"] = 30
self.set_cookie("_xsrf", self._xsrf_token, **cookie_kwargs)
return self._xsrf_token
def _get_raw_xsrf_token(self) -> Tuple[Optional[int], bytes, float]:
"""Read or generate the xsrf token in its raw form.
The raw_xsrf_token is a tuple containing:
* version: the version of the cookie from which this token was read,
or None if we generated a new token in this request.
* token: the raw token data; random (non-ascii) bytes.
* timestamp: the time this token was generated (will not be accurate
for version 1 cookies)
"""
if not hasattr(self, "_raw_xsrf_token"):
cookie = self.get_cookie("_xsrf")
if cookie:
version, token, timestamp = self._decode_xsrf_token(cookie)
else:
version, token, timestamp = None, None, None
if token is None:
version = None
token = os.urandom(16)
timestamp = time.time()
assert token is not None
assert timestamp is not None
self._raw_xsrf_token = (version, token, timestamp)
return self._raw_xsrf_token
def _decode_xsrf_token(
self, cookie: str
) -> Tuple[Optional[int], Optional[bytes], Optional[float]]:
"""Convert a cookie string into a the tuple form returned by
_get_raw_xsrf_token.
"""
try:
m = _signed_value_version_re.match(utf8(cookie))
if m:
version = int(m.group(1))
if version == 2:
_, mask_str, masked_token, timestamp_str = cookie.split("|")
mask = binascii.a2b_hex(utf8(mask_str))
token = _websocket_mask(mask, binascii.a2b_hex(utf8(masked_token)))
timestamp = int(timestamp_str)
return version, token, timestamp
else:
# Treat unknown versions as not present instead of failing.
raise Exception("Unknown xsrf cookie version")
else:
version = 1
try:
token = binascii.a2b_hex(utf8(cookie))
except (binascii.Error, TypeError):
token = utf8(cookie)
# We don't have a usable timestamp in older versions.
timestamp = int(time.time())
return (version, token, timestamp)
except Exception:
# Catch exceptions and return nothing instead of failing.
gen_log.debug("Uncaught exception in _decode_xsrf_token", exc_info=True)
return None, None, None
def check_xsrf_cookie(self) -> None:
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
To prevent cross-site request forgery, we set an ``_xsrf``
cookie and include the same value as a non-cookie
field with all ``POST`` requests. If the two do not match, we
reject the form submission as a potential forgery.
The ``_xsrf`` value may be set as either a form field named ``_xsrf``
or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
(the latter is accepted for compatibility with Django).
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
.. versionchanged:: 3.2.2
Added support for cookie version 2. Both versions 1 and 2 are
supported.
"""
# Prior to release 1.1.1, this check was ignored if the HTTP header
# ``X-Requested-With: XMLHTTPRequest`` was present. This exception
# has been shown to be insecure and has been removed. For more
# information please see
# http://www.djangoproject.com/weblog/2011/feb/08/security/
# http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
token = (
self.get_argument("_xsrf", None)
or self.request.headers.get("X-Xsrftoken")
or self.request.headers.get("X-Csrftoken")
)
if not token:
raise HTTPError(403, "'_xsrf' argument missing from POST")
_, token, _ = self._decode_xsrf_token(token)
_, expected_token, _ = self._get_raw_xsrf_token()
if not token:
raise HTTPError(403, "'_xsrf' argument has invalid format")
if not hmac.compare_digest(utf8(token), utf8(expected_token)):
raise HTTPError(403, "XSRF cookie does not match POST argument")
def xsrf_form_html(self) -> str:
"""An HTML ``<input/>`` element to be included with all POST forms.
It defines the ``_xsrf`` input value, which we check on all POST
requests to prevent cross-site request forgery. If you have set
the ``xsrf_cookies`` application setting, you must include this
HTML within all of your HTML forms.
In a template, this method should be called with ``{% module
xsrf_form_html() %}``
See `check_xsrf_cookie()` above for more information.
"""
return (
'<input type="hidden" name="_xsrf" value="'
+ escape.xhtml_escape(self.xsrf_token)
+ '"/>'
)
def static_url(self, path: str, include_host: bool = None, **kwargs: Any) -> str:
"""Returns a static URL for the given relative static file path.
This method requires you set the ``static_path`` setting in your
application (which specifies the root directory of your static
files).
This method returns a versioned url (by default appending
``?v=<signature>``), which allows the static files to be
cached indefinitely. This can be disabled by passing
``include_version=False`` (in the default implementation;
other static file implementations are not required to support
this, but they may support other options).
By default this method returns URLs relative to the current
host, but if ``include_host`` is true the URL returned will be
absolute. If this handler has an ``include_host`` attribute,
that value will be used as the default for all `static_url`
calls that do not pass ``include_host`` as a keyword argument.
"""
self.require_setting("static_path", "static_url")
get_url = self.settings.get(
"static_handler_class", StaticFileHandler
).make_static_url
if include_host is None:
include_host = getattr(self, "include_host", False)
if include_host:
base = self.request.protocol + "://" + self.request.host
else:
base = ""
return base + get_url(self.settings, path, **kwargs)
def require_setting(self, name: str, feature: str = "this feature") -> None:
"""Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name):
raise Exception(
"You must define the '%s' setting in your "
"application to use %s" % (name, feature)
)
def reverse_url(self, name: str, *args: Any) -> str:
"""Alias for `Application.reverse_url`."""
return self.application.reverse_url(name, *args)
def compute_etag(self) -> Optional[str]:
"""Computes the etag header to be used for this request.
By default uses a hash of the content written so far.
May be overridden to provide custom etag implementations,
or may return None to disable tornado's default etag support.
"""
hasher = hashlib.sha1()
for part in self._write_buffer:
hasher.update(part)
return '"%s"' % hasher.hexdigest()
def set_etag_header(self) -> None:
"""Sets the response's Etag header using ``self.compute_etag()``.
Note: no header will be set if ``compute_etag()`` returns ``None``.
This method is called automatically when the request is finished.
"""
etag = self.compute_etag()
if etag is not None:
self.set_header("Etag", etag)
def check_etag_header(self) -> bool:
"""Checks the ``Etag`` header against requests's ``If-None-Match``.
Returns ``True`` if the request's Etag matches and a 304 should be
returned. For example::
self.set_etag_header()
if self.check_etag_header():
self.set_status(304)
return
This method is called automatically when the request is finished,
but may be called earlier for applications that override
`compute_etag` and want to do an early check for ``If-None-Match``
before completing the request. The ``Etag`` header should be set
(perhaps with `set_etag_header`) before calling this method.
"""
computed_etag = utf8(self._headers.get("Etag", ""))
# Find all weak and strong etag values from If-None-Match header
# because RFC 7232 allows multiple etag values in a single header.
etags = re.findall(
br'\*|(?:W/)?"[^"]*"', utf8(self.request.headers.get("If-None-Match", ""))
)
if not computed_etag or not etags:
return False
match = False
if etags[0] == b"*":
match = True
else:
# Use a weak comparison when comparing entity-tags.
def val(x: bytes) -> bytes:
return x[2:] if x.startswith(b"W/") else x
for etag in etags:
if val(etag) == val(computed_etag):
match = True
break
return match
async def _execute(
self, transforms: List["OutputTransform"], *args: bytes, **kwargs: bytes
) -> None:
"""Executes this request with the given output transforms."""
self._transforms = transforms
try:
if self.request.method not in self.SUPPORTED_METHODS:
raise HTTPError(405)
self.path_args = [self.decode_argument(arg) for arg in args]
self.path_kwargs = dict(
(k, self.decode_argument(v, name=k)) for (k, v) in kwargs.items()
)
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if self.request.method not in (
"GET",
"HEAD",
"OPTIONS",
) and self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie()
result = self.prepare()
if result is not None:
result = await result
if self._prepared_future is not None:
# Tell the Application we've finished with prepare()
# and are ready for the body to arrive.
future_set_result_unless_cancelled(self._prepared_future, None)
if self._finished:
return
if _has_stream_request_body(self.__class__):
# In streaming mode request.body is a Future that signals
# the body has been completely received. The Future has no
# result; the data has been passed to self.data_received
# instead.
try:
await self.request._body_future
except iostream.StreamClosedError:
return
method = getattr(self, self.request.method.lower())
result = method(*self.path_args, **self.path_kwargs)
if result is not None:
result = await result
if self._auto_finish and not self._finished:
self.finish()
except Exception as e:
try:
self._handle_request_exception(e)
except Exception:
app_log.error("Exception in exception handler", exc_info=True)
finally:
# Unset result to avoid circular references
result = None
if self._prepared_future is not None and not self._prepared_future.done():
# In case we failed before setting _prepared_future, do it
# now (to unblock the HTTP server). Note that this is not
# in a finally block to avoid GC issues prior to Python 3.4.
self._prepared_future.set_result(None)
def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
"""Implement this method to handle streamed request data.
Requires the `.stream_request_body` decorator.
May be a coroutine for flow control.
"""
raise NotImplementedError()
def _log(self) -> None:
"""Logs the current request.
Sort of deprecated since this functionality was moved to the
Application, but left in place for the benefit of existing apps
that have overridden this method.
"""
self.application.log_request(self)
def _request_summary(self) -> str:
return "%s %s (%s)" % (
self.request.method,
self.request.uri,
self.request.remote_ip,
)
def _handle_request_exception(self, e: BaseException) -> None:
if isinstance(e, Finish):
# Not an error; just finish the request without logging.
if not self._finished:
self.finish(*e.args)
return
try:
self.log_exception(*sys.exc_info())
except Exception:
# An error here should still get a best-effort send_error()
# to avoid leaking the connection.
app_log.error("Error in exception logger", exc_info=True)
if self._finished:
# Extra errors after the request has been finished should
# be logged, but there is no reason to continue to try and
# send a response.
return
if isinstance(e, HTTPError):
self.send_error(e.status_code, exc_info=sys.exc_info())
else:
self.send_error(500, exc_info=sys.exc_info())
def log_exception(
self,
typ: "Optional[Type[BaseException]]",
value: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
"""Override to customize logging of uncaught exceptions.
By default logs instances of `HTTPError` as warnings without
stack traces (on the ``tornado.general`` logger), and all
other exceptions as errors with stack traces (on the
``tornado.application`` logger).
.. versionadded:: 3.1
"""
if isinstance(value, HTTPError):
if value.log_message:
format = "%d %s: " + value.log_message
args = [value.status_code, self._request_summary()] + list(value.args)
gen_log.warning(format, *args)
else:
app_log.error( # type: ignore
"Uncaught exception %s\n%r",
self._request_summary(),
self.request,
exc_info=(typ, value, tb),
)
def _ui_module(self, name: str, module: Type["UIModule"]) -> Callable[..., str]:
def render(*args, **kwargs) -> str: # type: ignore
if not hasattr(self, "_active_modules"):
self._active_modules = {} # type: Dict[str, UIModule]
if name not in self._active_modules:
self._active_modules[name] = module(self)
rendered = self._active_modules[name].render(*args, **kwargs)
return rendered
return render
def _ui_method(self, method: Callable[..., str]) -> Callable[..., str]:
return lambda *args, **kwargs: method(self, *args, **kwargs)
def _clear_headers_for_304(self) -> None:
# 304 responses should not contain entity headers (defined in
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
# not explicitly allowed by
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
headers = [
"Allow",
"Content-Encoding",
"Content-Language",
"Content-Length",
"Content-MD5",
"Content-Range",
"Content-Type",
"Last-Modified",
]
for h in headers:
self.clear_header(h)
def stream_request_body(cls: Type[RequestHandler]) -> Type[RequestHandler]:
"""Apply to `RequestHandler` subclasses to enable streaming body support.
This decorator implies the following changes:
* `.HTTPServerRequest.body` is undefined, and body arguments will not
be included in `RequestHandler.get_argument`.
* `RequestHandler.prepare` is called when the request headers have been
read instead of after the entire body has been read.
* The subclass must define a method ``data_received(self, data):``, which
will be called zero or more times as data is available. Note that
if the request has an empty body, ``data_received`` may not be called.
* ``prepare`` and ``data_received`` may return Futures (such as via
``@gen.coroutine``, in which case the next method will not be called
until those futures have completed.
* The regular HTTP method (``post``, ``put``, etc) will be called after
the entire body has been read.
See the `file receiver demo <https://github.com/tornadoweb/tornado/tree/master/demos/file_upload/>`_
for example usage.
""" # noqa: E501
if not issubclass(cls, RequestHandler):
raise TypeError("expected subclass of RequestHandler, got %r", cls)
cls._stream_request_body = True
return cls
def _has_stream_request_body(cls: Type[RequestHandler]) -> bool:
if not issubclass(cls, RequestHandler):
raise TypeError("expected subclass of RequestHandler, got %r", cls)
return cls._stream_request_body
def removeslash(
method: Callable[..., Optional[Awaitable[None]]]
) -> Callable[..., Optional[Awaitable[None]]]:
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper( # type: ignore
self: RequestHandler, *args, **kwargs
) -> Optional[Awaitable[None]]:
if self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path.rstrip("/")
if uri: # don't try to redirect '/' to ''
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return None
else:
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
def addslash(
method: Callable[..., Optional[Awaitable[None]]]
) -> Callable[..., Optional[Awaitable[None]]]:
"""Use this decorator to add a missing trailing slash to the request path.
For example, a request to ``/foo`` would redirect to ``/foo/`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/?'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper( # type: ignore
self: RequestHandler, *args, **kwargs
) -> Optional[Awaitable[None]]:
if not self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path + "/"
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return None
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
class _ApplicationRouter(ReversibleRuleRouter):
"""Routing implementation used internally by `Application`.
Provides a binding between `Application` and `RequestHandler`.
This implementation extends `~.routing.ReversibleRuleRouter` in a couple of ways:
* it allows to use `RequestHandler` subclasses as `~.routing.Rule` target and
* it allows to use a list/tuple of rules as `~.routing.Rule` target.
``process_rule`` implementation will substitute this list with an appropriate
`_ApplicationRouter` instance.
"""
def __init__(self, application: "Application", rules: _RuleList = None) -> None:
assert isinstance(application, Application)
self.application = application
super(_ApplicationRouter, self).__init__(rules)
def process_rule(self, rule: Rule) -> Rule:
rule = super(_ApplicationRouter, self).process_rule(rule)
if isinstance(rule.target, (list, tuple)):
rule.target = _ApplicationRouter( # type: ignore
self.application, rule.target
)
return rule
def get_target_delegate(
self, target: Any, request: httputil.HTTPServerRequest, **target_params: Any
) -> Optional[httputil.HTTPMessageDelegate]:
if isclass(target) and issubclass(target, RequestHandler):
return self.application.get_handler_delegate(
request, target, **target_params
)
return super(_ApplicationRouter, self).get_target_delegate(
target, request, **target_params
)
class Application(ReversibleRouter):
r"""A collection of request handlers that make up a web application.
Instances of this class are callable and can be passed directly to
HTTPServer to serve the application::
application = web.Application([
(r"/", MainPageHandler),
])
http_server = httpserver.HTTPServer(application)
http_server.listen(8080)
ioloop.IOLoop.current().start()
The constructor for this class takes in a list of `~.routing.Rule`
objects or tuples of values corresponding to the arguments of
`~.routing.Rule` constructor: ``(matcher, target, [target_kwargs], [name])``,
the values in square brackets being optional. The default matcher is
`~.routing.PathMatches`, so ``(regexp, target)`` tuples can also be used
instead of ``(PathMatches(regexp), target)``.
A common routing target is a `RequestHandler` subclass, but you can also
use lists of rules as a target, which create a nested routing configuration::
application = web.Application([
(HostMatches("example.com"), [
(r"/", MainPageHandler),
(r"/feed", FeedHandler),
]),
])
In addition to this you can use nested `~.routing.Router` instances,
`~.httputil.HTTPMessageDelegate` subclasses and callables as routing targets
(see `~.routing` module docs for more information).
When we receive requests, we iterate over the list in order and
instantiate an instance of the first request class whose regexp
matches the request path. The request class can be specified as
either a class object or a (fully-qualified) name.
A dictionary may be passed as the third element (``target_kwargs``)
of the tuple, which will be used as keyword arguments to the handler's
constructor and `~RequestHandler.initialize` method. This pattern
is used for the `StaticFileHandler` in this example (note that a
`StaticFileHandler` can be installed automatically with the
static_path setting described below)::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
We support virtual hosts with the `add_handlers` method, which takes in
a host regular expression as the first argument::
application.add_handlers(r"www\.myhost\.com", [
(r"/article/([0-9]+)", ArticleHandler),
])
If there's no match for the current request's host, then ``default_host``
parameter value is matched against host regular expressions.
.. warning::
Applications that do not use TLS may be vulnerable to :ref:`DNS
rebinding <dnsrebinding>` attacks. This attack is especially
relevant to applications that only listen on ``127.0.0.1`` or
other private networks. Appropriate host patterns must be used
(instead of the default of ``r'.*'``) to prevent this risk. The
``default_host`` argument must not be used in applications that
may be vulnerable to DNS rebinding.
You can serve static files by sending the ``static_path`` setting
as a keyword argument. We will serve those files from the
``/static/`` URI (this is configurable with the
``static_url_prefix`` setting), and we will serve ``/favicon.ico``
and ``/robots.txt`` from the same directory. A custom subclass of
`StaticFileHandler` can be specified with the
``static_handler_class`` setting.
.. versionchanged:: 4.5
Integration with the new `tornado.routing` module.
"""
def __init__(
self,
handlers: _RuleList = None,
default_host: str = None,
transforms: List[Type["OutputTransform"]] = None,
**settings: Any
) -> None:
if transforms is None:
self.transforms = [] # type: List[Type[OutputTransform]]
if settings.get("compress_response") or settings.get("gzip"):
self.transforms.append(GZipContentEncoding)
else:
self.transforms = transforms
self.default_host = default_host
self.settings = settings
self.ui_modules = {
"linkify": _linkify,
"xsrf_form_html": _xsrf_form_html,
"Template": TemplateModule,
}
self.ui_methods = {} # type: Dict[str, Callable[..., str]]
self._load_ui_modules(settings.get("ui_modules", {}))
self._load_ui_methods(settings.get("ui_methods", {}))
if self.settings.get("static_path"):
path = self.settings["static_path"]
handlers = list(handlers or [])
static_url_prefix = settings.get("static_url_prefix", "/static/")
static_handler_class = settings.get(
"static_handler_class", StaticFileHandler
)
static_handler_args = settings.get("static_handler_args", {})
static_handler_args["path"] = path
for pattern in [
re.escape(static_url_prefix) + r"(.*)",
r"/(favicon\.ico)",
r"/(robots\.txt)",
]:
handlers.insert(0, (pattern, static_handler_class, static_handler_args))
if self.settings.get("debug"):
self.settings.setdefault("autoreload", True)
self.settings.setdefault("compiled_template_cache", False)
self.settings.setdefault("static_hash_cache", False)
self.settings.setdefault("serve_traceback", True)
self.wildcard_router = _ApplicationRouter(self, handlers)
self.default_router = _ApplicationRouter(
self, [Rule(AnyMatches(), self.wildcard_router)]
)
# Automatically reload modified modules
if self.settings.get("autoreload"):
from tornado import autoreload
autoreload.start()
def listen(self, port: int, address: str = "", **kwargs: Any) -> HTTPServer:
"""Starts an HTTP server for this application on the given port.
This is a convenience alias for creating an `.HTTPServer`
object and calling its listen method. Keyword arguments not
supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
`.HTTPServer` constructor. For advanced uses
(e.g. multi-process mode), do not use this method; create an
`.HTTPServer` and call its
`.TCPServer.bind`/`.TCPServer.start` methods directly.
Note that after calling this method you still need to call
``IOLoop.current().start()`` to start the server.
Returns the `.HTTPServer` object.
.. versionchanged:: 4.3
Now returns the `.HTTPServer` object.
"""
server = HTTPServer(self, **kwargs)
server.listen(port, address)
return server
def add_handlers(self, host_pattern: str, host_handlers: _RuleList) -> None:
"""Appends the given handlers to our handler list.
Host patterns are processed sequentially in the order they were
added. All matching patterns will be considered.
"""
host_matcher = HostMatches(host_pattern)
rule = Rule(host_matcher, _ApplicationRouter(self, host_handlers))
self.default_router.rules.insert(-1, rule)
if self.default_host is not None:
self.wildcard_router.add_rules(
[(DefaultHostMatches(self, host_matcher.host_pattern), host_handlers)]
)
def add_transform(self, transform_class: Type["OutputTransform"]) -> None:
self.transforms.append(transform_class)
def _load_ui_methods(self, methods: Any) -> None:
if isinstance(methods, types.ModuleType):
self._load_ui_methods(dict((n, getattr(methods, n)) for n in dir(methods)))
elif isinstance(methods, list):
for m in methods:
self._load_ui_methods(m)
else:
for name, fn in methods.items():
if (
not name.startswith("_")
and hasattr(fn, "__call__")
and name[0].lower() == name[0]
):
self.ui_methods[name] = fn
def _load_ui_modules(self, modules: Any) -> None:
if isinstance(modules, types.ModuleType):
self._load_ui_modules(dict((n, getattr(modules, n)) for n in dir(modules)))
elif isinstance(modules, list):
for m in modules:
self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.items():
try:
if issubclass(cls, UIModule):
self.ui_modules[name] = cls
except TypeError:
pass
def __call__(
self, request: httputil.HTTPServerRequest
) -> Optional[Awaitable[None]]:
# Legacy HTTPServer interface
dispatcher = self.find_handler(request)
return dispatcher.execute()
def find_handler(
self, request: httputil.HTTPServerRequest, **kwargs: Any
) -> "_HandlerDelegate":
route = self.default_router.find_handler(request)
if route is not None:
return cast("_HandlerDelegate", route)
if self.settings.get("default_handler_class"):
return self.get_handler_delegate(
request,
self.settings["default_handler_class"],
self.settings.get("default_handler_args", {}),
)
return self.get_handler_delegate(request, ErrorHandler, {"status_code": 404})
def get_handler_delegate(
self,
request: httputil.HTTPServerRequest,
target_class: Type[RequestHandler],
target_kwargs: Dict[str, Any] = None,
path_args: List[bytes] = None,
path_kwargs: Dict[str, bytes] = None,
) -> "_HandlerDelegate":
"""Returns `~.httputil.HTTPMessageDelegate` that can serve a request
for application and `RequestHandler` subclass.
:arg httputil.HTTPServerRequest request: current HTTP request.
:arg RequestHandler target_class: a `RequestHandler` class.
:arg dict target_kwargs: keyword arguments for ``target_class`` constructor.
:arg list path_args: positional arguments for ``target_class`` HTTP method that
will be executed while handling a request (``get``, ``post`` or any other).
:arg dict path_kwargs: keyword arguments for ``target_class`` HTTP method.
"""
return _HandlerDelegate(
self, request, target_class, target_kwargs, path_args, path_kwargs
)
def reverse_url(self, name: str, *args: Any) -> str:
"""Returns a URL path for handler named ``name``
The handler must be added to the application as a named `URLSpec`.
Args will be substituted for capturing groups in the `URLSpec` regex.
They will be converted to strings if necessary, encoded as utf8,
and url-escaped.
"""
reversed_url = self.default_router.reverse_url(name, *args)
if reversed_url is not None:
return reversed_url
raise KeyError("%s not found in named urls" % name)
def log_request(self, handler: RequestHandler) -> None:
"""Writes a completed HTTP request to the logs.
By default writes to the python root logger. To change
this behavior either subclass Application and override this method,
or pass a function in the application settings dictionary as
``log_function``.
"""
if "log_function" in self.settings:
self.settings["log_function"](handler)
return
if handler.get_status() < 400:
log_method = access_log.info
elif handler.get_status() < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * handler.request.request_time()
log_method(
"%d %s %.2fms",
handler.get_status(),
handler._request_summary(),
request_time,
)
class _HandlerDelegate(httputil.HTTPMessageDelegate):
def __init__(
self,
application: Application,
request: httputil.HTTPServerRequest,
handler_class: Type[RequestHandler],
handler_kwargs: Optional[Dict[str, Any]],
path_args: Optional[List[bytes]],
path_kwargs: Optional[Dict[str, bytes]],
) -> None:
self.application = application
self.connection = request.connection
self.request = request
self.handler_class = handler_class
self.handler_kwargs = handler_kwargs or {}
self.path_args = path_args or []
self.path_kwargs = path_kwargs or {}
self.chunks = [] # type: List[bytes]
self.stream_request_body = _has_stream_request_body(self.handler_class)
def headers_received(
self,
start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
headers: httputil.HTTPHeaders,
) -> Optional[Awaitable[None]]:
if self.stream_request_body:
self.request._body_future = Future()
return self.execute()
return None
def data_received(self, data: bytes) -> Optional[Awaitable[None]]:
if self.stream_request_body:
return self.handler.data_received(data)
else:
self.chunks.append(data)
return None
def finish(self) -> None:
if self.stream_request_body:
future_set_result_unless_cancelled(self.request._body_future, None)
else:
self.request.body = b"".join(self.chunks)
self.request._parse_body()
self.execute()
def on_connection_close(self) -> None:
if self.stream_request_body:
self.handler.on_connection_close()
else:
self.chunks = None # type: ignore
def execute(self) -> Optional[Awaitable[None]]:
# If template cache is disabled (usually in the debug mode),
# re-compile templates and reload static files on every
# request so you don't need to restart to see changes
if not self.application.settings.get("compiled_template_cache", True):
with RequestHandler._template_loader_lock:
for loader in RequestHandler._template_loaders.values():
loader.reset()
if not self.application.settings.get("static_hash_cache", True):
StaticFileHandler.reset()
self.handler = self.handler_class(
self.application, self.request, **self.handler_kwargs
)
transforms = [t(self.request) for t in self.application.transforms]
if self.stream_request_body:
self.handler._prepared_future = Future()
# Note that if an exception escapes handler._execute it will be
# trapped in the Future it returns (which we are ignoring here,
# leaving it to be logged when the Future is GC'd).
# However, that shouldn't happen because _execute has a blanket
# except handler, and we cannot easily access the IOLoop here to
# call add_future (because of the requirement to remain compatible
# with WSGI)
fut = gen.convert_yielded(
self.handler._execute(transforms, *self.path_args, **self.path_kwargs)
)
fut.add_done_callback(lambda f: f.result())
# If we are streaming the request body, then execute() is finished
# when the handler has prepared to receive the body. If not,
# it doesn't matter when execute() finishes (so we return None)
return self.handler._prepared_future
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response.
Raising an `HTTPError` is a convenient alternative to calling
`RequestHandler.send_error` since it automatically ends the
current function.
To customize the response sent with an `HTTPError`, override
`RequestHandler.write_error`.
:arg int status_code: HTTP status code. Must be listed in
`httplib.responses <http.client.responses>` unless the ``reason``
keyword argument is given.
:arg str log_message: Message to be written to the log for this error
(will not be shown to the user unless the `Application` is in debug
mode). May contain ``%s``-style placeholders, which will be filled
in with remaining positional parameters.
:arg str reason: Keyword-only argument. The HTTP "reason" phrase
to pass in the status line along with ``status_code``. Normally
determined automatically from ``status_code``, but can be used
to use a non-standard numeric code.
"""
def __init__(
self, status_code: int = 500, log_message: str = None, *args: Any, **kwargs: Any
) -> None:
self.status_code = status_code
self.log_message = log_message
self.args = args
self.reason = kwargs.get("reason", None)
if log_message and not args:
self.log_message = log_message.replace("%", "%%")
def __str__(self) -> str:
message = "HTTP %d: %s" % (
self.status_code,
self.reason or httputil.responses.get(self.status_code, "Unknown"),
)
if self.log_message:
return message + " (" + (self.log_message % self.args) + ")"
else:
return message
class Finish(Exception):
"""An exception that ends the request without producing an error response.
When `Finish` is raised in a `RequestHandler`, the request will
end (calling `RequestHandler.finish` if it hasn't already been
called), but the error-handling methods (including
`RequestHandler.write_error`) will not be called.
If `Finish()` was created with no arguments, the pending response
will be sent as-is. If `Finish()` was given an argument, that
argument will be passed to `RequestHandler.finish()`.
This can be a more convenient way to implement custom error pages
than overriding ``write_error`` (especially in library code)::
if self.current_user is None:
self.set_status(401)
self.set_header('WWW-Authenticate', 'Basic realm="something"')
raise Finish()
.. versionchanged:: 4.3
Arguments passed to ``Finish()`` will be passed on to
`RequestHandler.finish`.
"""
pass
class MissingArgumentError(HTTPError):
"""Exception raised by `RequestHandler.get_argument`.
This is a subclass of `HTTPError`, so if it is uncaught a 400 response
code will be used instead of 500 (and a stack trace will not be logged).
.. versionadded:: 3.1
"""
def __init__(self, arg_name: str) -> None:
super(MissingArgumentError, self).__init__(
400, "Missing argument %s" % arg_name
)
self.arg_name = arg_name
class ErrorHandler(RequestHandler):
"""Generates an error response with ``status_code`` for all requests."""
def initialize(self, status_code: int) -> None:
self.set_status(status_code)
def prepare(self) -> None:
raise HTTPError(self._status_code)
def check_xsrf_cookie(self) -> None:
# POSTs to an ErrorHandler don't actually have side effects,
# so we don't need to check the xsrf token. This allows POSTs
# to the wrong url to return a 404 instead of 403.
pass
class RedirectHandler(RequestHandler):
"""Redirects the client to the given URL for all GET requests.
You should provide the keyword argument ``url`` to the handler, e.g.::
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
`RedirectHandler` supports regular expression substitutions. E.g., to
swap the first and second parts of a path while preserving the remainder::
application = web.Application([
(r"/(.*?)/(.*?)/(.*)", web.RedirectHandler, {"url": "/{1}/{0}/{2}"}),
])
The final URL is formatted with `str.format` and the substrings that match
the capturing groups. In the above example, a request to "/a/b/c" would be
formatted like::
str.format("/{1}/{0}/{2}", "a", "b", "c") # -> "/b/a/c"
Use Python's :ref:`format string syntax <formatstrings>` to customize how
values are substituted.
.. versionchanged:: 4.5
Added support for substitutions into the destination URL.
.. versionchanged:: 5.0
If any query arguments are present, they will be copied to the
destination URL.
"""
def initialize(self, url: str, permanent: bool = True) -> None:
self._url = url
self._permanent = permanent
def get(self, *args: Any) -> None:
to_url = self._url.format(*args)
if self.request.query_arguments:
# TODO: figure out typing for the next line.
to_url = httputil.url_concat(
to_url,
list(httputil.qs_to_qsl(self.request.query_arguments)), # type: ignore
)
self.redirect(to_url, permanent=self._permanent)
class StaticFileHandler(RequestHandler):
"""A simple handler that can serve static content from a directory.
A `StaticFileHandler` is configured automatically if you pass the
``static_path`` keyword argument to `Application`. This handler
can be customized with the ``static_url_prefix``, ``static_handler_class``,
and ``static_handler_args`` settings.
To map an additional path to this handler for a static data directory
you would add a line to your application like::
application = web.Application([
(r"/content/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
The handler constructor requires a ``path`` argument, which specifies the
local root directory of the content to be served.
Note that a capture group in the regex is required to parse the value for
the ``path`` argument to the get() method (different than the constructor
argument above); see `URLSpec` for details.
To serve a file like ``index.html`` automatically when a directory is
requested, set ``static_handler_args=dict(default_filename="index.html")``
in your application settings, or add ``default_filename`` as an initializer
argument for your ``StaticFileHandler``.
To maximize the effectiveness of browser caching, this class supports
versioned urls (by default using the argument ``?v=``). If a version
is given, we instruct the browser to cache this file indefinitely.
`make_static_url` (also available as `RequestHandler.static_url`) can
be used to construct a versioned url.
This handler is intended primarily for use in development and light-duty
file serving; for heavy traffic it will be more efficient to use
a dedicated static file server (such as nginx or Apache). We support
the HTTP ``Accept-Ranges`` mechanism to return partial content (because
some browsers require this functionality to be present to seek in
HTML5 audio or video).
**Subclassing notes**
This class is designed to be extensible by subclassing, but because
of the way static urls are generated with class methods rather than
instance methods, the inheritance patterns are somewhat unusual.
Be sure to use the ``@classmethod`` decorator when overriding a
class method. Instance methods may use the attributes ``self.path``
``self.absolute_path``, and ``self.modified``.
Subclasses should only override methods discussed in this section;
overriding other methods is error-prone. Overriding
``StaticFileHandler.get`` is particularly problematic due to the
tight coupling with ``compute_etag`` and other methods.
To change the way static urls are generated (e.g. to match the behavior
of another server or CDN), override `make_static_url`, `parse_url_path`,
`get_cache_time`, and/or `get_version`.
To replace all interaction with the filesystem (e.g. to serve
static content from a database), override `get_content`,
`get_content_size`, `get_modified_time`, `get_absolute_path`, and
`validate_absolute_path`.
.. versionchanged:: 3.1
Many of the methods for subclasses were added in Tornado 3.1.
"""
CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years
_static_hashes = {} # type: Dict[str, Optional[str]]
_lock = threading.Lock() # protects _static_hashes
def initialize(self, path: str, default_filename: str = None) -> None:
self.root = path
self.default_filename = default_filename
@classmethod
def reset(cls) -> None:
with cls._lock:
cls._static_hashes = {}
def head(self, path: str) -> Awaitable[None]:
return self.get(path, include_body=False)
async def get(self, path: str, include_body: bool = True) -> None:
# Set up our path instance variables.
self.path = self.parse_url_path(path)
del path # make sure we don't refer to path instead of self.path again
absolute_path = self.get_absolute_path(self.root, self.path)
self.absolute_path = self.validate_absolute_path(self.root, absolute_path)
if self.absolute_path is None:
return
self.modified = self.get_modified_time()
self.set_headers()
if self.should_return_304():
self.set_status(304)
return
request_range = None
range_header = self.request.headers.get("Range")
if range_header:
# As per RFC 2616 14.16, if an invalid Range header is specified,
# the request will be treated as if the header didn't exist.
request_range = httputil._parse_request_range(range_header)
size = self.get_content_size()
if request_range:
start, end = request_range
if start is not None and start < 0:
start += size
if start < 0:
start = 0
if (
start is not None
and (start >= size or (end is not None and start >= end))
) or end == 0:
# As per RFC 2616 14.35.1, a range is not satisfiable only: if
# the first requested byte is equal to or greater than the
# content, or when a suffix with length 0 is specified.
# https://tools.ietf.org/html/rfc7233#section-2.1
# A byte-range-spec is invalid if the last-byte-pos value is present
# and less than the first-byte-pos.
self.set_status(416) # Range Not Satisfiable
self.set_header("Content-Type", "text/plain")
self.set_header("Content-Range", "bytes */%s" % (size,))
return
if end is not None and end > size:
# Clients sometimes blindly use a large range to limit their
# download size; cap the endpoint at the actual file size.
end = size
# Note: only return HTTP 206 if less than the entire range has been
# requested. Not only is this semantically correct, but Chrome
# refuses to play audio if it gets an HTTP 206 in response to
# ``Range: bytes=0-``.
if size != (end or size) - (start or 0):
self.set_status(206) # Partial Content
self.set_header(
"Content-Range", httputil._get_content_range(start, end, size)
)
else:
start = end = None
if start is not None and end is not None:
content_length = end - start
elif end is not None:
content_length = end
elif start is not None:
content_length = size - start
else:
content_length = size
self.set_header("Content-Length", content_length)
if include_body:
content = self.get_content(self.absolute_path, start, end)
if isinstance(content, bytes):
content = [content]
for chunk in content:
try:
self.write(chunk)
await self.flush()
except iostream.StreamClosedError:
return
else:
assert self.request.method == "HEAD"
def compute_etag(self) -> Optional[str]:
"""Sets the ``Etag`` header based on static url version.
This allows efficient ``If-None-Match`` checks against cached
versions, and sends the correct ``Etag`` for a partial response
(i.e. the same ``Etag`` as the full file).
.. versionadded:: 3.1
"""
assert self.absolute_path is not None
version_hash = self._get_cached_version(self.absolute_path)
if not version_hash:
return None
return '"%s"' % (version_hash,)
def set_headers(self) -> None:
"""Sets the content and caching headers on the response.
.. versionadded:: 3.1
"""
self.set_header("Accept-Ranges", "bytes")
self.set_etag_header()
if self.modified is not None:
self.set_header("Last-Modified", self.modified)
content_type = self.get_content_type()
if content_type:
self.set_header("Content-Type", content_type)
cache_time = self.get_cache_time(self.path, self.modified, content_type)
if cache_time > 0:
self.set_header(
"Expires",
datetime.datetime.utcnow() + datetime.timedelta(seconds=cache_time),
)
self.set_header("Cache-Control", "max-age=" + str(cache_time))
self.set_extra_headers(self.path)
def should_return_304(self) -> bool:
"""Returns True if the headers indicate that we should return 304.
.. versionadded:: 3.1
"""
# If client sent If-None-Match, use it, ignore If-Modified-Since
if self.request.headers.get("If-None-Match"):
return self.check_etag_header()
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if date_tuple is not None:
if_since = datetime.datetime(*date_tuple[:6])
assert self.modified is not None
if if_since >= self.modified:
return True
return False
@classmethod
def get_absolute_path(cls, root: str, path: str) -> str:
"""Returns the absolute location of ``path`` relative to ``root``.
``root`` is the path configured for this `StaticFileHandler`
(in most cases the ``static_path`` `Application` setting).
This class method may be overridden in subclasses. By default
it returns a filesystem path, but other strings may be used
as long as they are unique and understood by the subclass's
overridden `get_content`.
.. versionadded:: 3.1
"""
abspath = os.path.abspath(os.path.join(root, path))
return abspath
def validate_absolute_path(self, root: str, absolute_path: str) -> Optional[str]:
"""Validate and return the absolute path.
``root`` is the configured path for the `StaticFileHandler`,
and ``path`` is the result of `get_absolute_path`
This is an instance method called during request processing,
so it may raise `HTTPError` or use methods like
`RequestHandler.redirect` (return None after redirecting to
halt further processing). This is where 404 errors for missing files
are generated.
This method may modify the path before returning it, but note that
any such modifications will not be understood by `make_static_url`.
In instance methods, this method's result is available as
``self.absolute_path``.
.. versionadded:: 3.1
"""
# os.path.abspath strips a trailing /.
# We must add it back to `root` so that we only match files
# in a directory named `root` instead of files starting with
# that prefix.
root = os.path.abspath(root)
if not root.endswith(os.path.sep):
# abspath always removes a trailing slash, except when
# root is '/'. This is an unusual case, but several projects
# have independently discovered this technique to disable
# Tornado's path validation and (hopefully) do their own,
# so we need to support it.
root += os.path.sep
# The trailing slash also needs to be temporarily added back
# the requested path so a request to root/ will match.
if not (absolute_path + os.path.sep).startswith(root):
raise HTTPError(403, "%s is not in root static directory", self.path)
if os.path.isdir(absolute_path) and self.default_filename is not None:
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
self.redirect(self.request.path + "/", permanent=True)
return None
absolute_path = os.path.join(absolute_path, self.default_filename)
if not os.path.exists(absolute_path):
raise HTTPError(404)
if not os.path.isfile(absolute_path):
raise HTTPError(403, "%s is not a file", self.path)
return absolute_path
@classmethod
def get_content(
cls, abspath: str, start: int = None, end: int = None
) -> Generator[bytes, None, None]:
"""Retrieve the content of the requested resource which is located
at the given absolute path.
This class method may be overridden by subclasses. Note that its
signature is different from other overridable class methods
(no ``settings`` argument); this is deliberate to ensure that
``abspath`` is able to stand on its own as a cache key.
This method should either return a byte string or an iterator
of byte strings. The latter is preferred for large files
as it helps reduce memory fragmentation.
.. versionadded:: 3.1
"""
with open(abspath, "rb") as file:
if start is not None:
file.seek(start)
if end is not None:
remaining = end - (start or 0) # type: Optional[int]
else:
remaining = None
while True:
chunk_size = 64 * 1024
if remaining is not None and remaining < chunk_size:
chunk_size = remaining
chunk = file.read(chunk_size)
if chunk:
if remaining is not None:
remaining -= len(chunk)
yield chunk
else:
if remaining is not None:
assert remaining == 0
return
@classmethod
def get_content_version(cls, abspath: str) -> str:
"""Returns a version string for the resource at the given path.
This class method may be overridden by subclasses. The
default implementation is a hash of the file's contents.
.. versionadded:: 3.1
"""
data = cls.get_content(abspath)
hasher = hashlib.md5()
if isinstance(data, bytes):
hasher.update(data)
else:
for chunk in data:
hasher.update(chunk)
return hasher.hexdigest()
def _stat(self) -> os.stat_result:
assert self.absolute_path is not None
if not hasattr(self, "_stat_result"):
self._stat_result = os.stat(self.absolute_path)
return self._stat_result
def get_content_size(self) -> int:
"""Retrieve the total size of the resource at the given path.
This method may be overridden by subclasses.
.. versionadded:: 3.1
.. versionchanged:: 4.0
This method is now always called, instead of only when
partial results are requested.
"""
stat_result = self._stat()
return stat_result.st_size
def get_modified_time(self) -> Optional[datetime.datetime]:
"""Returns the time that ``self.absolute_path`` was last modified.
May be overridden in subclasses. Should return a `~datetime.datetime`
object or None.
.. versionadded:: 3.1
"""
stat_result = self._stat()
# NOTE: Historically, this used stat_result[stat.ST_MTIME],
# which truncates the fractional portion of the timestamp. It
# was changed from that form to stat_result.st_mtime to
# satisfy mypy (which disallows the bracket operator), but the
# latter form returns a float instead of an int. For
# consistency with the past (and because we have a unit test
# that relies on this), we truncate the float here, although
# I'm not sure that's the right thing to do.
modified = datetime.datetime.utcfromtimestamp(int(stat_result.st_mtime))
return modified
def get_content_type(self) -> str:
"""Returns the ``Content-Type`` header to be used for this request.
.. versionadded:: 3.1
"""
assert self.absolute_path is not None
mime_type, encoding = mimetypes.guess_type(self.absolute_path)
# per RFC 6713, use the appropriate type for a gzip compressed file
if encoding == "gzip":
return "application/gzip"
# As of 2015-07-21 there is no bzip2 encoding defined at
# http://www.iana.org/assignments/media-types/media-types.xhtml
# So for that (and any other encoding), use octet-stream.
elif encoding is not None:
return "application/octet-stream"
elif mime_type is not None:
return mime_type
# if mime_type not detected, use application/octet-stream
else:
return "application/octet-stream"
def set_extra_headers(self, path: str) -> None:
"""For subclass to add extra headers to the response"""
pass
def get_cache_time(
self, path: str, modified: Optional[datetime.datetime], mime_type: str
) -> int:
"""Override to customize cache control behavior.
Return a positive number of seconds to make the result
cacheable for that amount of time or 0 to mark resource as
cacheable for an unspecified amount of time (subject to
browser heuristics).
By default returns cache expiry of 10 years for resources requested
with ``v`` argument.
"""
return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
@classmethod
def make_static_url(
cls, settings: Dict[str, Any], path: str, include_version: bool = True
) -> str:
"""Constructs a versioned url for the given path.
This method may be overridden in subclasses (but note that it
is a class method rather than an instance method). Subclasses
are only required to implement the signature
``make_static_url(cls, settings, path)``; other keyword
arguments may be passed through `~RequestHandler.static_url`
but are not standard.
``settings`` is the `Application.settings` dictionary. ``path``
is the static path being requested. The url returned should be
relative to the current host.
``include_version`` determines whether the generated URL should
include the query string containing the version hash of the
file corresponding to the given ``path``.
"""
url = settings.get("static_url_prefix", "/static/") + path
if not include_version:
return url
version_hash = cls.get_version(settings, path)
if not version_hash:
return url
return "%s?v=%s" % (url, version_hash)
def parse_url_path(self, url_path: str) -> str:
"""Converts a static URL path into a filesystem path.
``url_path`` is the path component of the URL with
``static_url_prefix`` removed. The return value should be
filesystem path relative to ``static_path``.
This is the inverse of `make_static_url`.
"""
if os.path.sep != "/":
url_path = url_path.replace("/", os.path.sep)
return url_path
@classmethod
def get_version(cls, settings: Dict[str, Any], path: str) -> Optional[str]:
"""Generate the version string to be used in static URLs.
``settings`` is the `Application.settings` dictionary and ``path``
is the relative location of the requested asset on the filesystem.
The returned value should be a string, or ``None`` if no version
could be determined.
.. versionchanged:: 3.1
This method was previously recommended for subclasses to override;
`get_content_version` is now preferred as it allows the base
class to handle caching of the result.
"""
abs_path = cls.get_absolute_path(settings["static_path"], path)
return cls._get_cached_version(abs_path)
@classmethod
def _get_cached_version(cls, abs_path: str) -> Optional[str]:
with cls._lock:
hashes = cls._static_hashes
if abs_path not in hashes:
try:
hashes[abs_path] = cls.get_content_version(abs_path)
except Exception:
gen_log.error("Could not open static file %r", abs_path)
hashes[abs_path] = None
hsh = hashes.get(abs_path)
if hsh:
return hsh
return None
class FallbackHandler(RequestHandler):
"""A `RequestHandler` that wraps another HTTP server callback.
The fallback is a callable object that accepts an
`~.httputil.HTTPServerRequest`, such as an `Application` or
`tornado.wsgi.WSGIContainer`. This is most useful to use both
Tornado ``RequestHandlers`` and WSGI in the same server. Typical
usage::
wsgi_app = tornado.wsgi.WSGIContainer(
django.core.handlers.wsgi.WSGIHandler())
application = tornado.web.Application([
(r"/foo", FooHandler),
(r".*", FallbackHandler, dict(fallback=wsgi_app),
])
"""
def initialize(
self, fallback: Callable[[httputil.HTTPServerRequest], None]
) -> None:
self.fallback = fallback
def prepare(self) -> None:
self.fallback(self.request)
self._finished = True
self.on_finish()
class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
Applications are not expected to create their own OutputTransforms
or interact with them directly; the framework chooses which transforms
(if any) to apply.
"""
def __init__(self, request: httputil.HTTPServerRequest) -> None:
pass
def transform_first_chunk(
self,
status_code: int,
headers: httputil.HTTPHeaders,
chunk: bytes,
finishing: bool,
) -> Tuple[int, httputil.HTTPHeaders, bytes]:
return status_code, headers, chunk
def transform_chunk(self, chunk: bytes, finishing: bool) -> bytes:
return chunk
class GZipContentEncoding(OutputTransform):
"""Applies the gzip content encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
.. versionchanged:: 4.0
Now compresses all mime types beginning with ``text/``, instead
of just a whitelist. (the whitelist is still used for certain
non-text mime types).
"""
# Whitelist of compressible mime types (in addition to any types
# beginning with "text/").
CONTENT_TYPES = set(
[
"application/javascript",
"application/x-javascript",
"application/xml",
"application/atom+xml",
"application/json",
"application/xhtml+xml",
"image/svg+xml",
]
)
# Python's GzipFile defaults to level 9, while most other gzip
# tools (including gzip itself) default to 6, which is probably a
# better CPU/size tradeoff.
GZIP_LEVEL = 6
# Responses that are too short are unlikely to benefit from gzipping
# after considering the "Content-Encoding: gzip" header and the header
# inside the gzip encoding.
# Note that responses written in multiple chunks will be compressed
# regardless of size.
MIN_LENGTH = 1024
def __init__(self, request: httputil.HTTPServerRequest) -> None:
self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "")
def _compressible_type(self, ctype: str) -> bool:
return ctype.startswith("text/") or ctype in self.CONTENT_TYPES
def transform_first_chunk(
self,
status_code: int,
headers: httputil.HTTPHeaders,
chunk: bytes,
finishing: bool,
) -> Tuple[int, httputil.HTTPHeaders, bytes]:
# TODO: can/should this type be inherited from the superclass?
if "Vary" in headers:
headers["Vary"] += ", Accept-Encoding"
else:
headers["Vary"] = "Accept-Encoding"
if self._gzipping:
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = (
self._compressible_type(ctype)
and (not finishing or len(chunk) >= self.MIN_LENGTH)
and ("Content-Encoding" not in headers)
)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
self._gzip_value = BytesIO()
self._gzip_file = gzip.GzipFile(
mode="w", fileobj=self._gzip_value, compresslevel=self.GZIP_LEVEL
)
chunk = self.transform_chunk(chunk, finishing)
if "Content-Length" in headers:
# The original content length is no longer correct.
# If this is the last (and only) chunk, we can set the new
# content-length; otherwise we remove it and fall back to
# chunked encoding.
if finishing:
headers["Content-Length"] = str(len(chunk))
else:
del headers["Content-Length"]
return status_code, headers, chunk
def transform_chunk(self, chunk: bytes, finishing: bool) -> bytes:
if self._gzipping:
self._gzip_file.write(chunk)
if finishing:
self._gzip_file.close()
else:
self._gzip_file.flush()
chunk = self._gzip_value.getvalue()
self._gzip_value.truncate(0)
self._gzip_value.seek(0)
return chunk
def authenticated(
method: Callable[..., Optional[Awaitable[None]]]
) -> Callable[..., Optional[Awaitable[None]]]:
"""Decorate methods with this to require that the user be logged in.
If the user is not logged in, they will be redirected to the configured
`login url <RequestHandler.get_login_url>`.
If you configure a login url with a query parameter, Tornado will
assume you know what you're doing and use it as-is. If not, it
will add a `next` parameter so the login page knows where to send
you once you're logged in.
"""
@functools.wraps(method)
def wrapper( # type: ignore
self: RequestHandler, *args, **kwargs
) -> Optional[Awaitable[None]]:
if not self.current_user:
if self.request.method in ("GET", "HEAD"):
url = self.get_login_url()
if "?" not in url:
if urllib.parse.urlsplit(url).scheme:
# if login url is absolute, make next absolute too
next_url = self.request.full_url()
else:
assert self.request.uri is not None
next_url = self.request.uri
url += "?" + urlencode(dict(next=next_url))
self.redirect(url)
return None
raise HTTPError(403)
return method(self, *args, **kwargs)
return wrapper
class UIModule(object):
"""A re-usable, modular UI unit on a page.
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
Subclasses of UIModule must override the `render` method.
"""
def __init__(self, handler: RequestHandler) -> None:
self.handler = handler
self.request = handler.request
self.ui = handler.ui
self.locale = handler.locale
@property
def current_user(self) -> Any:
return self.handler.current_user
def render(self, *args: Any, **kwargs: Any) -> str:
"""Override in subclasses to return this module's output."""
raise NotImplementedError()
def embedded_javascript(self) -> Optional[str]:
"""Override to return a JavaScript string
to be embedded in the page."""
return None
def javascript_files(self) -> Optional[Iterable[str]]:
"""Override to return a list of JavaScript files needed by this module.
If the return values are relative paths, they will be passed to
`RequestHandler.static_url`; otherwise they will be used as-is.
"""
return None
def embedded_css(self) -> Optional[str]:
"""Override to return a CSS string
that will be embedded in the page."""
return None
def css_files(self) -> Optional[Iterable[str]]:
"""Override to returns a list of CSS files required by this module.
If the return values are relative paths, they will be passed to
`RequestHandler.static_url`; otherwise they will be used as-is.
"""
return None
def html_head(self) -> Optional[str]:
"""Override to return an HTML string that will be put in the <head/>
element.
"""
return None
def html_body(self) -> Optional[str]:
"""Override to return an HTML string that will be put at the end of
the <body/> element.
"""
return None
def render_string(self, path: str, **kwargs: Any) -> bytes:
"""Renders a template and returns it as a string."""
return self.handler.render_string(path, **kwargs)
class _linkify(UIModule):
def render(self, text: str, **kwargs: Any) -> str: # type: ignore
return escape.linkify(text, **kwargs)
class _xsrf_form_html(UIModule):
def render(self) -> str: # type: ignore
return self.handler.xsrf_form_html()
class TemplateModule(UIModule):
"""UIModule that simply renders the given template.
{% module Template("foo.html") %} is similar to {% include "foo.html" %},
but the module version gets its own namespace (with kwargs passed to
Template()) instead of inheriting the outer template's namespace.
Templates rendered through this module also get access to UIModule's
automatic javascript/css features. Simply call set_resources
inside the template and give it keyword arguments corresponding to
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
Note that these resources are output once per template file, not once
per instantiation of the template, so they must not depend on
any arguments to the template.
"""
def __init__(self, handler: RequestHandler) -> None:
super(TemplateModule, self).__init__(handler)
# keep resources in both a list and a dict to preserve order
self._resource_list = [] # type: List[Dict[str, Any]]
self._resource_dict = {} # type: Dict[str, Dict[str, Any]]
def render(self, path: str, **kwargs: Any) -> bytes: # type: ignore
def set_resources(**kwargs) -> str: # type: ignore
if path not in self._resource_dict:
self._resource_list.append(kwargs)
self._resource_dict[path] = kwargs
else:
if self._resource_dict[path] != kwargs:
raise ValueError(
"set_resources called with different "
"resources for the same template"
)
return ""
return self.render_string(path, set_resources=set_resources, **kwargs)
def _get_resources(self, key: str) -> Iterable[str]:
return (r[key] for r in self._resource_list if key in r)
def embedded_javascript(self) -> str:
return "\n".join(self._get_resources("embedded_javascript"))
def javascript_files(self) -> Iterable[str]:
result = []
for f in self._get_resources("javascript_files"):
if isinstance(f, (unicode_type, bytes)):
result.append(f)
else:
result.extend(f)
return result
def embedded_css(self) -> str:
return "\n".join(self._get_resources("embedded_css"))
def css_files(self) -> Iterable[str]:
result = []
for f in self._get_resources("css_files"):
if isinstance(f, (unicode_type, bytes)):
result.append(f)
else:
result.extend(f)
return result
def html_head(self) -> str:
return "".join(self._get_resources("html_head"))
def html_body(self) -> str:
return "".join(self._get_resources("html_body"))
class _UIModuleNamespace(object):
"""Lazy namespace which creates UIModule proxies bound to a handler."""
def __init__(
self, handler: RequestHandler, ui_modules: Dict[str, Type[UIModule]]
) -> None:
self.handler = handler
self.ui_modules = ui_modules
def __getitem__(self, key: str) -> Callable[..., str]:
return self.handler._ui_module(key, self.ui_modules[key])
def __getattr__(self, key: str) -> Callable[..., str]:
try:
return self[key]
except KeyError as e:
raise AttributeError(str(e))
def create_signed_value(
secret: _CookieSecretTypes,
name: str,
value: Union[str, bytes],
version: int = None,
clock: Callable[[], float] = None,
key_version: int = None,
) -> bytes:
if version is None:
version = DEFAULT_SIGNED_VALUE_VERSION
if clock is None:
clock = time.time
timestamp = utf8(str(int(clock())))
value = base64.b64encode(utf8(value))
if version == 1:
assert not isinstance(secret, dict)
signature = _create_signature_v1(secret, name, value, timestamp)
value = b"|".join([value, timestamp, signature])
return value
elif version == 2:
# The v2 format consists of a version number and a series of
# length-prefixed fields "%d:%s", the last of which is a
# signature, all separated by pipes. All numbers are in
# decimal format with no leading zeros. The signature is an
# HMAC-SHA256 of the whole string up to that point, including
# the final pipe.
#
# The fields are:
# - format version (i.e. 2; no length prefix)
# - key version (integer, default is 0)
# - timestamp (integer seconds since epoch)
# - name (not encoded; assumed to be ~alphanumeric)
# - value (base64-encoded)
# - signature (hex-encoded; no length prefix)
def format_field(s: Union[str, bytes]) -> bytes:
return utf8("%d:" % len(s)) + utf8(s)
to_sign = b"|".join(
[
b"2",
format_field(str(key_version or 0)),
format_field(timestamp),
format_field(name),
format_field(value),
b"",
]
)
if isinstance(secret, dict):
assert (
key_version is not None
), "Key version must be set when sign key dict is used"
assert version >= 2, "Version must be at least 2 for key version support"
secret = secret[key_version]
signature = _create_signature_v2(secret, to_sign)
return to_sign + signature
else:
raise ValueError("Unsupported version %d" % version)
# A leading version number in decimal
# with no leading zeros, followed by a pipe.
_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
def _get_version(value: bytes) -> int:
# Figures out what version value is. Version 1 did not include an
# explicit version field and started with arbitrary base64 data,
# which makes this tricky.
m = _signed_value_version_re.match(value)
if m is None:
version = 1
else:
try:
version = int(m.group(1))
if version > 999:
# Certain payloads from the version-less v1 format may
# be parsed as valid integers. Due to base64 padding
# restrictions, this can only happen for numbers whose
# length is a multiple of 4, so we can treat all
# numbers up to 999 as versions, and for the rest we
# fall back to v1 format.
version = 1
except ValueError:
version = 1
return version
def decode_signed_value(
secret: _CookieSecretTypes,
name: str,
value: Union[None, str, bytes],
max_age_days: int = 31,
clock: Callable[[], float] = None,
min_version: int = None,
) -> Optional[bytes]:
if clock is None:
clock = time.time
if min_version is None:
min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION
if min_version > 2:
raise ValueError("Unsupported min_version %d" % min_version)
if not value:
return None
value = utf8(value)
version = _get_version(value)
if version < min_version:
return None
if version == 1:
assert not isinstance(secret, dict)
return _decode_signed_value_v1(secret, name, value, max_age_days, clock)
elif version == 2:
return _decode_signed_value_v2(secret, name, value, max_age_days, clock)
else:
return None
def _decode_signed_value_v1(
secret: Union[str, bytes],
name: str,
value: bytes,
max_age_days: int,
clock: Callable[[], float],
) -> Optional[bytes]:
parts = utf8(value).split(b"|")
if len(parts) != 3:
return None
signature = _create_signature_v1(secret, name, parts[0], parts[1])
if not hmac.compare_digest(parts[2], signature):
gen_log.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < clock() - max_age_days * 86400:
gen_log.warning("Expired cookie %r", value)
return None
if timestamp > clock() + 31 * 86400:
# _cookie_signature does not hash a delimiter between the
# parts of the cookie, so an attacker could transfer trailing
# digits from the payload to the timestamp without altering the
# signature. For backwards compatibility, sanity-check timestamp
# here instead of modifying _cookie_signature.
gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
return None
if parts[1].startswith(b"0"):
gen_log.warning("Tampered cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
except Exception:
return None
def _decode_fields_v2(value: bytes) -> Tuple[int, bytes, bytes, bytes, bytes]:
def _consume_field(s: bytes) -> Tuple[bytes, bytes]:
length, _, rest = s.partition(b":")
n = int(length)
field_value = rest[:n]
# In python 3, indexing bytes returns small integers; we must
# use a slice to get a byte string as in python 2.
if rest[n : n + 1] != b"|":
raise ValueError("malformed v2 signed value field")
rest = rest[n + 1 :]
return field_value, rest
rest = value[2:] # remove version number
key_version, rest = _consume_field(rest)
timestamp, rest = _consume_field(rest)
name_field, rest = _consume_field(rest)
value_field, passed_sig = _consume_field(rest)
return int(key_version), timestamp, name_field, value_field, passed_sig
def _decode_signed_value_v2(
secret: _CookieSecretTypes,
name: str,
value: bytes,
max_age_days: int,
clock: Callable[[], float],
) -> Optional[bytes]:
try:
key_version, timestamp_bytes, name_field, value_field, passed_sig = _decode_fields_v2(
value
)
except ValueError:
return None
signed_string = value[: -len(passed_sig)]
if isinstance(secret, dict):
try:
secret = secret[key_version]
except KeyError:
return None
expected_sig = _create_signature_v2(secret, signed_string)
if not hmac.compare_digest(passed_sig, expected_sig):
return None
if name_field != utf8(name):
return None
timestamp = int(timestamp_bytes)
if timestamp < clock() - max_age_days * 86400:
# The signature has expired.
return None
try:
return base64.b64decode(value_field)
except Exception:
return None
def get_signature_key_version(value: Union[str, bytes]) -> Optional[int]:
value = utf8(value)
version = _get_version(value)
if version < 2:
return None
try:
key_version, _, _, _, _ = _decode_fields_v2(value)
except ValueError:
return None
return key_version
def _create_signature_v1(secret: Union[str, bytes], *parts: Union[str, bytes]) -> bytes:
hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
for part in parts:
hash.update(utf8(part))
return utf8(hash.hexdigest())
def _create_signature_v2(secret: Union[str, bytes], s: bytes) -> bytes:
hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
hash.update(utf8(s))
return utf8(hash.hexdigest())
def is_absolute(path: str) -> bool:
return any(path.startswith(x) for x in ["/", "http:", "https:"])
| NoyaInRain/tornado | tornado/web.py | Python | apache-2.0 | 138,158 |
# Copyright 2014-2020 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Oliver J. Backhouse <olbackhouse@gmail.com>
# George H. Booth <george.booth@kcl.ac.uk>
#
import unittest
import numpy as np
from pyscf.agf2 import aux, _agf2
class KnownValues(unittest.TestCase):
@classmethod
def setUpClass(self):
self.nmo = 100
self.nocc = 20
self.nvir = 80
self.naux = 400
np.random.seed(1)
@classmethod
def tearDownClass(self):
del self.nmo, self.nocc, self.nvir, self.naux
np.random.seed()
def test_c_ragf2(self):
xija = np.random.random((self.nmo, self.nocc, self.nocc, self.nvir))
gf_occ = aux.GreensFunction(np.random.random(self.nocc), np.eye(self.nmo, self.nocc))
gf_vir = aux.GreensFunction(np.random.random(self.nvir), np.eye(self.nmo, self.nvir))
vv1, vev1 = _agf2.build_mats_ragf2_outcore(xija, gf_occ.energy, gf_vir.energy)
vv2, vev2 = _agf2.build_mats_ragf2_incore(xija, gf_occ.energy, gf_vir.energy)
self.assertAlmostEqual(np.max(np.absolute(vv1-vv2)), 0.0, 10)
self.assertAlmostEqual(np.max(np.absolute(vev1-vev2)), 0.0, 10)
def test_c_dfragf2(self):
qxi = np.random.random((self.naux, self.nmo*self.nocc)) / self.naux
qja = np.random.random((self.naux, self.nocc*self.nvir)) / self.naux
gf_occ = aux.GreensFunction(np.random.random(self.nocc), np.eye(self.nmo, self.nocc))
gf_vir = aux.GreensFunction(np.random.random(self.nvir), np.eye(self.nmo, self.nvir))
vv1, vev1 = _agf2.build_mats_dfragf2_outcore(qxi, qja, gf_occ.energy, gf_vir.energy)
vv2, vev2 = _agf2.build_mats_dfragf2_incore(qxi, qja, gf_occ.energy, gf_vir.energy)
self.assertAlmostEqual(np.max(np.absolute(vv1-vv2)), 0.0, 10)
self.assertAlmostEqual(np.max(np.absolute(vev1-vev2)), 0.0, 10)
def test_c_uagf2(self):
xija = np.random.random((2, self.nmo, self.nocc, self.nocc, self.nvir))
gf_occ = (aux.GreensFunction(np.random.random(self.nocc), np.eye(self.nmo, self.nocc)),
aux.GreensFunction(np.random.random(self.nocc), np.eye(self.nmo, self.nocc)))
gf_vir = (aux.GreensFunction(np.random.random(self.nvir), np.eye(self.nmo, self.nvir)),
aux.GreensFunction(np.random.random(self.nvir), np.eye(self.nmo, self.nvir)))
vv1, vev1 = _agf2.build_mats_uagf2_outcore(xija, (gf_occ[0].energy, gf_occ[1].energy), (gf_vir[0].energy, gf_vir[1].energy))
vv2, vev2 = _agf2.build_mats_uagf2_incore(xija, (gf_occ[0].energy, gf_occ[1].energy), (gf_vir[0].energy, gf_vir[1].energy))
self.assertAlmostEqual(np.max(np.absolute(vv1-vv2)), 0.0, 10)
self.assertAlmostEqual(np.max(np.absolute(vev1-vev2)), 0.0, 10)
def test_c_dfuagf2(self):
qxi = np.random.random((2, self.naux, self.nmo*self.nocc)) / self.naux
qja = np.random.random((2, self.naux, self.nocc*self.nvir)) / self.naux
gf_occ = (aux.GreensFunction(np.random.random(self.nocc), np.eye(self.nmo, self.nocc)),
aux.GreensFunction(np.random.random(self.nocc), np.eye(self.nmo, self.nocc)))
gf_vir = (aux.GreensFunction(np.random.random(self.nvir), np.eye(self.nmo, self.nvir)),
aux.GreensFunction(np.random.random(self.nvir), np.eye(self.nmo, self.nvir)))
vv1, vev1 = _agf2.build_mats_dfuagf2_outcore(qxi, qja, (gf_occ[0].energy, gf_occ[1].energy), (gf_vir[0].energy, gf_vir[1].energy))
vv2, vev2 = _agf2.build_mats_dfuagf2_incore(qxi, qja, (gf_occ[0].energy, gf_occ[1].energy), (gf_vir[0].energy, gf_vir[1].energy))
self.assertAlmostEqual(np.max(np.absolute(vv1-vv2)), 0.0, 10)
self.assertAlmostEqual(np.max(np.absolute(vev1-vev2)), 0.0, 10)
if __name__ == '__main__':
print('AGF2 C implementations')
unittest.main()
| sunqm/pyscf | pyscf/agf2/test/test_c_agf2.py | Python | apache-2.0 | 4,413 |
import requests
from PIL import Image
from course import *
from utils import is_on_time, get_week
from io import BytesIO
url_course = 'https://wx.idsbllp.cn/redapi2/api/kebiao'
url_stu_info = 'https://we.cqu.pt/api/others/student.php?key='
url_photo = 'https://we.cqu.pt/api/others/photos.php?id='
data = {
'stuNum': -1,
'week': -1
}
header = {
'API_APP': 'android',
'Content-Type': 'application/x-www-form-urlencoded'
}
def get_courses(stu_num, week=0, offset=0):
data['stuNum'] = stu_num
data['week'] = week
response = requests.post(url_course, data=data).json()
now_week = get_week(response['nowWeek'], offset)
courses = response['data']
courses = filter(lambda x: now_week in x['week'] and is_on_time(x['hash_day'], offset), courses)
this_week_course = list(map(lambda x: Course(x['course'], x['teacher'], x['classroom'], x['lesson']), courses))
return ''.join(i.get_course() for i in this_week_course)
def get_name_by_stu_num(stu_num):
resp = requests.get(url_stu_info + str(stu_num)).json()
return resp['data']['rows'][0]['xm'] if resp['data']['total'] == 1 else '亲你学号输错啦'
def get_stu_infos_by_info(info):
return requests.get(url_stu_info + info).json()['data']['rows']
def get_photo(stu_num_photo):
url = 'https://we.cqu.pt/api/others/photos.php?id=%s' % (stu_num_photo,)
photo_url = requests.get(url).json()['data']
photo = requests.get(photo_url).content
image = Image.open(BytesIO(photo))
bio = BytesIO(photo)
bio.name = 'image.jpeg'
image.save(bio, 'JPEG')
bio.seek(0)
return bio
| simonla/cyxbs_bot | network.py | Python | apache-2.0 | 1,669 |
''' Computes feature representations '''
from __future__ import division, print_function
from hscom import __common__
(print, print_, print_on, print_off,
rrr, profile) = __common__.init(__name__, '[fc2]')
# scientific
import numpy as np
# python
from os.path import join
# hotspotter
from hscom import helpers as util
from hscom import params
from hscom import fileio as io
from hscom.Parallelize import parallel_compute
import extern_feat
def whiten_features(desc_list):
import algos
print('[fc2] * Whitening features')
ax2_desc = np.vstack(desc_list)
ax2_desc_white = algos.scale_to_byte(algos.whiten(ax2_desc))
index = 0
offset = 0
for cx in xrange(len(desc_list)):
old_desc = desc_list[cx]
print ('[fc2] * ' + util.info(old_desc, 'old_desc'))
offset = len(old_desc)
new_desc = ax2_desc_white[index:(index + offset)]
desc_list[cx] = new_desc
index += offset
# =======================================
# Main Script
# =======================================
@profile
def bigcache_feat_save(cache_dir, uid, ext, kpts_list, desc_list):
print('[fc2] Caching desc_list and kpts_list')
io.smart_save(kpts_list, cache_dir, 'kpts_list', uid, ext)
io.smart_save(desc_list, cache_dir, 'desc_list', uid, ext)
@profile
def bigcache_feat_load(cache_dir, uid, ext):
#io.debug_smart_load(cache_dir, fname='*', uid=uid, ext='.*')
kpts_list = io.smart_load(cache_dir, 'kpts_list', uid, ext, can_fail=True)
desc_list = io.smart_load(cache_dir, 'desc_list', uid, ext, can_fail=True)
if desc_list is None or kpts_list is None:
return None
desc_list = desc_list.tolist()
kpts_list = kpts_list.tolist()
print('[fc2] Loaded kpts_list and desc_list from big cache')
return kpts_list, desc_list
@profile
def sequential_feat_load(feat_cfg, feat_fpath_list):
kpts_list = []
desc_list = []
# Debug loading (seems to use lots of memory)
print('\n')
try:
nFeats = len(feat_fpath_list)
prog_label = '[fc2] Loading feature: '
mark_progress, end_progress = util.progress_func(nFeats, prog_label)
for count, feat_path in enumerate(feat_fpath_list):
try:
npz = np.load(feat_path, mmap_mode=None)
except IOError:
print('\n')
util.checkpath(feat_path, verbose=True)
print('IOError on feat_path=%r' % feat_path)
raise
kpts = npz['arr_0']
desc = npz['arr_1']
npz.close()
kpts_list.append(kpts)
desc_list.append(desc)
mark_progress(count)
end_progress()
print('[fc2] Finished load of individual kpts and desc')
except MemoryError:
print('\n------------')
print('[fc2] Out of memory')
print('[fc2] Trying to read: %r' % feat_path)
print('[fc2] len(kpts_list) = %d' % len(kpts_list))
print('[fc2] len(desc_list) = %d' % len(desc_list))
raise
if feat_cfg.whiten:
desc_list = whiten_features(desc_list)
return kpts_list, desc_list
# Maps a preference string into a function
feat_type2_precompute = {
'hesaff+sift': extern_feat.precompute_hesaff,
}
@profile
def _load_features_individualy(hs, cx_list):
use_cache = not params.args.nocache_feats
feat_cfg = hs.prefs.feat_cfg
feat_dir = hs.dirs.feat_dir
feat_uid = feat_cfg.get_uid()
print('[fc2] Loading ' + feat_uid + ' individually')
# Build feature paths
rchip_fpath_list = [hs.cpaths.cx2_rchip_path[cx] for cx in iter(cx_list)]
cid_list = hs.tables.cx2_cid[cx_list]
feat_fname_fmt = ''.join(('cid%d', feat_uid, '.npz'))
feat_fpath_fmt = join(feat_dir, feat_fname_fmt)
feat_fpath_list = [feat_fpath_fmt % cid for cid in cid_list]
#feat_fname_list = [feat_fname_fmt % cid for cid in cid_list]
# Compute features in parallel, saving them to disk
kwargs_list = [feat_cfg.get_dict_args()] * len(rchip_fpath_list)
pfc_kwargs = {
'func': feat_type2_precompute[feat_cfg.feat_type],
'arg_list': [rchip_fpath_list, feat_fpath_list, kwargs_list],
'num_procs': params.args.num_procs,
'lazy': use_cache,
}
parallel_compute(**pfc_kwargs)
# Load precomputed features sequentially
kpts_list, desc_list = sequential_feat_load(feat_cfg, feat_fpath_list)
return kpts_list, desc_list
@profile
def _load_features_bigcache(hs, cx_list):
# args for smart load/save
feat_cfg = hs.prefs.feat_cfg
feat_uid = feat_cfg.get_uid()
cache_dir = hs.dirs.cache_dir
sample_uid = util.hashstr_arr(cx_list, 'cids')
bigcache_uid = '_'.join((feat_uid, sample_uid))
ext = '.npy'
loaded = bigcache_feat_load(cache_dir, bigcache_uid, ext)
if loaded is not None: # Cache Hit
kpts_list, desc_list = loaded
else: # Cache Miss
kpts_list, desc_list = _load_features_individualy(hs, cx_list)
# Cache all the features
bigcache_feat_save(cache_dir, bigcache_uid, ext, kpts_list, desc_list)
return kpts_list, desc_list
@profile
@util.indent_decor('[fc2]')
def load_features(hs, cx_list=None, **kwargs):
# TODO: There needs to be a fast way to ensure that everything is
# already loaded. Same for cc2.
print('=============================')
print('[fc2] Precomputing and loading features: %r' % hs.get_db_name())
#----------------
# COMPUTE SETUP
#----------------
use_cache = not params.args.nocache_feats
use_big_cache = use_cache and cx_list is None
feat_cfg = hs.prefs.feat_cfg
feat_uid = feat_cfg.get_uid()
if hs.feats.feat_uid != '' and hs.feats.feat_uid != feat_uid:
print('[fc2] Disagreement: OLD_feat_uid = %r' % hs.feats.feat_uid)
print('[fc2] Disagreement: NEW_feat_uid = %r' % feat_uid)
print('[fc2] Unloading all chip information')
hs.unload_all()
hs.load_chips(cx_list=cx_list)
print('[fc2] feat_uid = %r' % feat_uid)
# Get the list of chip features to load
cx_list = hs.get_valid_cxs() if cx_list is None else cx_list
if not np.iterable(cx_list):
cx_list = [cx_list]
print('[cc2] len(cx_list) = %r' % len(cx_list))
if len(cx_list) == 0:
return # HACK
cx_list = np.array(cx_list) # HACK
if use_big_cache: # use only if all descriptors requested
kpts_list, desc_list = _load_features_bigcache(hs, cx_list)
else:
kpts_list, desc_list = _load_features_individualy(hs, cx_list)
# Extend the datastructure if needed
list_size = max(cx_list) + 1
util.ensure_list_size(hs.feats.cx2_kpts, list_size)
util.ensure_list_size(hs.feats.cx2_desc, list_size)
# Copy the values into the ChipPaths object
for lx, cx in enumerate(cx_list):
hs.feats.cx2_kpts[cx] = kpts_list[lx]
for lx, cx in enumerate(cx_list):
hs.feats.cx2_desc[cx] = desc_list[lx]
hs.feats.feat_uid = feat_uid
print('[fc2]=============================')
def clear_feature_cache(hs):
feat_cfg = hs.prefs.feat_cfg
feat_dir = hs.dirs.feat_dir
cache_dir = hs.dirs.cache_dir
feat_uid = feat_cfg.get_uid()
print('[fc2] clearing feature cache: %r' % feat_dir)
util.remove_files_in_dir(feat_dir, '*' + feat_uid + '*', verbose=True, dryrun=False)
util.remove_files_in_dir(cache_dir, '*' + feat_uid + '*', verbose=True, dryrun=False)
pass
| Erotemic/hotspotter | hotspotter/feature_compute2.py | Python | apache-2.0 | 7,452 |
##############
# Standard #
##############
import io
import logging
import tempfile
##############
# External #
##############
import pytest
##############
# Module #
##############
import powermate
#Enable the logging level to be set from the command line
def pytest_addoption(parser):
parser.addoption('--log', action='store', default='INFO',
help='Set the level of the log')
#Fixture to automatically instantiate logging setup
@pytest.fixture(scope='session', autouse=True)
def set_level(pytestconfig):
#Read user input logging level
log_level = getattr(logging, pytest.config.getoption('--log'), None)
#Report invalid logging level
if not isinstance(log_level, int):
raise ValueError("Invalid log level : {}".format(log_level))
#Create basic configuration
logging.basicConfig(level=log_level, format='%(message)s')
@pytest.fixture(scope='module')
def pseudo_socket():
with tempfile.NamedTemporaryFile() as tmp:
s = powermate.event.Socket(tmp.name)
s._input = io.BytesIO()
s._output = io.BytesIO()
yield s
| teddyrendahl/powermate | tests/conftest.py | Python | apache-2.0 | 1,119 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Lint as: python3
"""Simple scrambling test generator."""
import copy
import random
from typing import List, Text, Optional
from lit_nlp.api import components as lit_components
from lit_nlp.api import dataset as lit_dataset
from lit_nlp.api import model as lit_model
from lit_nlp.api import types
from lit_nlp.lib import utils
JsonDict = types.JsonDict
FIELDS_TO_SCRAMBLE_KEY = 'Fields to scramble'
class Scrambler(lit_components.Generator):
"""Scramble all words in an example to generate a new example."""
@staticmethod
def scramble(val: Text) -> Text:
words = val.split(' ')
random.shuffle(words)
return ' '.join(words)
def config_spec(self) -> types.Spec:
return {
FIELDS_TO_SCRAMBLE_KEY:
types.MultiFieldMatcher(
spec='input',
types=['TextSegment'],
select_all=True),
}
def generate(self,
example: JsonDict,
model: lit_model.Model,
dataset: lit_dataset.Dataset,
config: Optional[JsonDict] = None) -> List[JsonDict]:
"""Naively scramble all words in an example.
Note: Even if more than one field is to be scrambled, only a single example
will be produced, unlike other generators which will produce multiple
examples, one per field.
Args:
example: the example used for basis of generated examples.
model: the model.
dataset: the dataset.
config: user-provided config properties.
Returns:
examples: a list of generated examples.
"""
del model # Unused.
config = config or {}
# If config key is missing, generate no examples.
fields_to_scramble = list(config.get(FIELDS_TO_SCRAMBLE_KEY, []))
if not fields_to_scramble:
return []
# TODO(lit-dev): move this to generate_all(), so we read the spec once
# instead of on every example.
text_keys = utils.find_spec_keys(dataset.spec(), types.TextSegment)
if not text_keys:
return []
text_keys = [key for key in text_keys if key in fields_to_scramble]
new_example = copy.deepcopy(example)
for text_key in text_keys:
new_example[text_key] = self.scramble(example[text_key])
return [new_example]
| PAIR-code/lit | lit_nlp/components/scrambler.py | Python | apache-2.0 | 2,899 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# SOCKS5 UDP Request
# +----+------+------+----------+----------+----------+
# |RSV | FRAG | ATYP | DST.ADDR | DST.PORT | DATA |
# +----+------+------+----------+----------+----------+
# | 2 | 1 | 1 | Variable | 2 | Variable |
# +----+------+------+----------+----------+----------+
# SOCKS5 UDP Response
# +----+------+------+----------+----------+----------+
# |RSV | FRAG | ATYP | DST.ADDR | DST.PORT | DATA |
# +----+------+------+----------+----------+----------+
# | 2 | 1 | 1 | Variable | 2 | Variable |
# +----+------+------+----------+----------+----------+
# shadowsocks UDP Request (before encrypted)
# +------+----------+----------+----------+
# | ATYP | DST.ADDR | DST.PORT | DATA |
# +------+----------+----------+----------+
# | 1 | Variable | 2 | Variable |
# +------+----------+----------+----------+
# shadowsocks UDP Response (before encrypted)
# +------+----------+----------+----------+
# | ATYP | DST.ADDR | DST.PORT | DATA |
# +------+----------+----------+----------+
# | 1 | Variable | 2 | Variable |
# +------+----------+----------+----------+
# shadowsocks UDP Request and Response (after encrypted)
# +-------+--------------+
# | IV | PAYLOAD |
# +-------+--------------+
# | Fixed | Variable |
# +-------+--------------+
# HOW TO NAME THINGS
# ------------------
# `dest` means destination server, which is from DST fields in the SOCKS5
# request
# `local` means local server of shadowsocks
# `remote` means remote server of shadowsocks
# `client` means UDP clients that connects to other servers
# `server` means the UDP server that handles user requests
from __future__ import absolute_import, division, print_function, \
with_statement
import time
import socket
import logging
import struct
import errno
import random
import binascii
import traceback
import threading
from shadowsocks import encrypt, obfs, eventloop, lru_cache, common, shell
from shadowsocks.common import pre_parse_header, parse_header, pack_addr
# for each handler, we have 2 stream directions:
# upstream: from client to server direction
# read local and write to remote
# downstream: from server to client direction
# read remote and write to local
STREAM_UP = 0
STREAM_DOWN = 1
# for each stream, it's waiting for reading, or writing, or both
WAIT_STATUS_INIT = 0
WAIT_STATUS_READING = 1
WAIT_STATUS_WRITING = 2
WAIT_STATUS_READWRITING = WAIT_STATUS_READING | WAIT_STATUS_WRITING
BUF_SIZE = 65536
DOUBLE_SEND_BEG_IDS = 16
POST_MTU_MIN = 500
POST_MTU_MAX = 1400
SENDING_WINDOW_SIZE = 8192
STAGE_INIT = 0
STAGE_RSP_ID = 1
STAGE_DNS = 2
STAGE_CONNECTING = 3
STAGE_STREAM = 4
STAGE_DESTROYED = -1
CMD_CONNECT = 0
CMD_RSP_CONNECT = 1
CMD_CONNECT_REMOTE = 2
CMD_RSP_CONNECT_REMOTE = 3
CMD_POST = 4
CMD_SYN_STATUS = 5
CMD_POST_64 = 6
CMD_SYN_STATUS_64 = 7
CMD_DISCONNECT = 8
CMD_VER_STR = b"\x08"
RSP_STATE_EMPTY = b""
RSP_STATE_REJECT = b"\x00"
RSP_STATE_CONNECTED = b"\x01"
RSP_STATE_CONNECTEDREMOTE = b"\x02"
RSP_STATE_ERROR = b"\x03"
RSP_STATE_DISCONNECT = b"\x04"
RSP_STATE_REDIRECT = b"\x05"
def client_key(source_addr, server_af):
# notice this is server af, not dest af
return '%s:%s:%d' % (source_addr[0], source_addr[1], server_af)
class UDPRelay(object):
def __init__(self, config, dns_resolver, is_local, stat_callback=None, stat_counter=None):
self._config = config
if config.get('connect_verbose_info', 0) > 0:
common.connect_log = logging.info
if is_local:
self._listen_addr = config['local_address']
self._listen_port = config['local_port']
self._remote_addr = config['server']
self._remote_port = config['server_port']
else:
self._listen_addr = config['server']
self._listen_port = config['server_port']
self._remote_addr = None
self._remote_port = None
self._dns_resolver = dns_resolver
self._password = common.to_bytes(config['password'])
self._method = config['method']
self._timeout = config['timeout']
self._is_local = is_local
self._udp_cache_size = config['udp_cache']
self._cache = lru_cache.LRUCache(timeout=config['udp_timeout'],
close_callback=self._close_client_pair)
self._cache_dns_client = lru_cache.LRUCache(timeout=10,
close_callback=self._close_client_pair)
self._client_fd_to_server_addr = {}
#self._dns_cache = lru_cache.LRUCache(timeout=1800)
self._eventloop = None
self._closed = False
self.server_transfer_ul = 0
self.server_transfer_dl = 0
self.server_users = {}
self.server_user_transfer_ul = {}
self.server_user_transfer_dl = {}
if common.to_str(config['protocol']) in obfs.mu_protocol():
self._update_users(None, None)
self.protocol_data = obfs.obfs(config['protocol']).init_data()
self._protocol = obfs.obfs(config['protocol'])
server_info = obfs.server_info(self.protocol_data)
server_info.host = self._listen_addr
server_info.port = self._listen_port
server_info.users = self.server_users
server_info.protocol_param = config['protocol_param']
server_info.obfs_param = ''
server_info.iv = b''
server_info.recv_iv = b''
server_info.key_str = common.to_bytes(config['password'])
server_info.key = encrypt.encrypt_key(self._password, self._method)
server_info.head_len = 30
server_info.tcp_mss = 1452
server_info.buffer_size = BUF_SIZE
server_info.overhead = 0
self._protocol.set_server_info(server_info)
self._sockets = set()
self._fd_to_handlers = {}
self._reqid_to_hd = {}
self._data_to_write_to_server_socket = []
self._timeout_cache = lru_cache.LRUCache(timeout=self._timeout,
close_callback=self._close_tcp_client)
self._bind = config.get('out_bind', '')
self._bindv6 = config.get('out_bindv6', '')
self._ignore_bind_list = config.get('ignore_bind', [])
if 'forbidden_ip' in config:
self._forbidden_iplist = config['forbidden_ip']
else:
self._forbidden_iplist = None
if 'forbidden_port' in config:
self._forbidden_portset = config['forbidden_port']
else:
self._forbidden_portset = None
addrs = socket.getaddrinfo(self._listen_addr, self._listen_port, 0,
socket.SOCK_DGRAM, socket.SOL_UDP)
if len(addrs) == 0:
raise Exception("can't get addrinfo for %s:%d" %
(self._listen_addr, self._listen_port))
af, socktype, proto, canonname, sa = addrs[0]
server_socket = socket.socket(af, socktype, proto)
server_socket.bind((self._listen_addr, self._listen_port))
server_socket.setblocking(False)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024 * 1024)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1024 * 1024)
self._server_socket = server_socket
self._stat_callback = stat_callback
def _get_a_server(self):
server = self._config['server']
server_port = self._config['server_port']
if type(server_port) == list:
server_port = random.choice(server_port)
if type(server) == list:
server = random.choice(server)
logging.debug('chosen server: %s:%d', server, server_port)
return server, server_port
def get_ud(self):
return (self.server_transfer_ul, self.server_transfer_dl)
def get_users_ud(self):
ret = (self.server_user_transfer_ul.copy(), self.server_user_transfer_dl.copy())
return ret
def _update_users(self, protocol_param, acl):
if protocol_param is None:
protocol_param = self._config['protocol_param']
param = common.to_bytes(protocol_param).split(b'#')
if len(param) == 2:
user_list = param[1].split(b',')
if user_list:
for user in user_list:
items = user.split(b':')
if len(items) == 2:
user_int_id = int(items[0])
uid = struct.pack('<I', user_int_id)
if acl is not None and user_int_id not in acl:
self.del_user(uid)
else:
passwd = items[1]
self.add_user(uid, {'password':passwd})
def _update_user(self, id, passwd):
uid = struct.pack('<I', id)
self.add_user(uid, passwd)
def update_users(self, users):
for uid in list(self.server_users.keys()):
id = struct.unpack('<I', uid)[0]
if id not in users:
self.del_user(uid)
for id in users:
uid = struct.pack('<I', id)
self.add_user(uid, users[id])
def add_user(self, uid, cfg): # user: binstr[4], passwd: str
passwd = cfg['password']
self.server_users[uid] = common.to_bytes(passwd)
def del_user(self, uid):
if uid in self.server_users:
del self.server_users[uid]
def add_transfer_u(self, user, transfer):
if user is None:
self.server_transfer_ul += transfer
else:
if user not in self.server_user_transfer_ul:
self.server_user_transfer_ul[user] = 0
self.server_user_transfer_ul[user] += transfer + self.server_transfer_ul
self.server_transfer_ul = 0
def add_transfer_d(self, user, transfer):
if user is None:
self.server_transfer_dl += transfer
else:
if user not in self.server_user_transfer_dl:
self.server_user_transfer_dl[user] = 0
self.server_user_transfer_dl[user] += transfer + self.server_transfer_dl
self.server_transfer_dl = 0
def _close_client_pair(self, client_pair):
client, uid = client_pair
self._close_client(client)
def _close_client(self, client):
if hasattr(client, 'close'):
if not self._is_local:
if client.fileno() in self._client_fd_to_server_addr:
logging.debug('close_client: %s' %
(self._client_fd_to_server_addr[client.fileno()],))
else:
client.info('close_client')
self._sockets.remove(client.fileno())
self._eventloop.remove(client)
del self._client_fd_to_server_addr[client.fileno()]
client.close()
else:
# just an address
client.info('close_client pass %s' % client)
pass
def _handel_protocol_error(self, client_address, ogn_data):
#raise Exception('can not parse header')
logging.warn("Protocol ERROR, UDP ogn data %s from %s:%d" % (binascii.hexlify(ogn_data), client_address[0], client_address[1]))
def _socket_bind_addr(self, sock, af):
bind_addr = ''
if self._bind and af == socket.AF_INET:
bind_addr = self._bind
elif self._bindv6 and af == socket.AF_INET6:
bind_addr = self._bindv6
bind_addr = bind_addr.replace("::ffff:", "")
if bind_addr in self._ignore_bind_list:
bind_addr = None
if bind_addr:
local_addrs = socket.getaddrinfo(bind_addr, 0, 0, socket.SOCK_DGRAM, socket.SOL_UDP)
if local_addrs[0][0] == af:
logging.debug("bind %s" % (bind_addr,))
try:
sock.bind((bind_addr, 0))
except Exception as e:
logging.warn("bind %s fail" % (bind_addr,))
def _handle_server(self):
server = self._server_socket
data, r_addr = server.recvfrom(BUF_SIZE)
ogn_data = data
if not data:
logging.debug('UDP handle_server: data is empty')
if self._stat_callback:
self._stat_callback(self._listen_port, len(data))
uid = None
if self._is_local:
frag = common.ord(data[2])
if frag != 0:
logging.warn('drop a message since frag is not 0')
return
else:
data = data[3:]
else:
ref_iv = [0]
data = encrypt.encrypt_all_iv(self._protocol.obfs.server_info.key, self._method, 0, data, ref_iv)
# decrypt data
if not data:
logging.debug('UDP handle_server: data is empty after decrypt')
return
self._protocol.obfs.server_info.recv_iv = ref_iv[0]
data, uid = self._protocol.server_udp_post_decrypt(data)
#logging.info("UDP data %s" % (binascii.hexlify(data),))
if not self._is_local:
data = pre_parse_header(data)
if data is None:
return
try:
header_result = parse_header(data)
except:
self._handel_protocol_error(r_addr, ogn_data)
return
if header_result is None:
self._handel_protocol_error(r_addr, ogn_data)
return
connecttype, addrtype, dest_addr, dest_port, header_length = header_result
if self._is_local:
addrtype = 3
server_addr, server_port = self._get_a_server()
else:
server_addr, server_port = dest_addr, dest_port
if (addrtype & 7) == 3:
af = common.is_ip(server_addr)
if af == False:
handler = common.UDPAsyncDNSHandler((data, r_addr, uid, header_length))
handler.resolve(self._dns_resolver, (server_addr, server_port), self._handle_server_dns_resolved)
else:
self._handle_server_dns_resolved("", (server_addr, server_port), server_addr, (data, r_addr, uid, header_length))
else:
self._handle_server_dns_resolved("", (server_addr, server_port), server_addr, (data, r_addr, uid, header_length))
def _handle_server_dns_resolved(self, error, remote_addr, server_addr, params):
if error:
return
data, r_addr, uid, header_length = params
user_id = self._listen_port
try:
server_port = remote_addr[1]
addrs = socket.getaddrinfo(server_addr, server_port, 0,
socket.SOCK_DGRAM, socket.SOL_UDP)
if not addrs: # drop
return
af, socktype, proto, canonname, sa = addrs[0]
server_addr = sa[0]
key = client_key(r_addr, af)
client_pair = self._cache.get(key, None)
if client_pair is None:
client_pair = self._cache_dns_client.get(key, None)
if client_pair is None:
if self._forbidden_iplist:
if common.to_str(sa[0]) in self._forbidden_iplist:
logging.debug('IP %s is in forbidden list, drop' % common.to_str(sa[0]))
# drop
return
if self._forbidden_portset:
if sa[1] in self._forbidden_portset:
logging.debug('Port %d is in forbidden list, reject' % sa[1])
# drop
return
client = socket.socket(af, socktype, proto)
client_uid = uid
client.setblocking(False)
self._socket_bind_addr(client, af)
is_dns = False
if len(data) > header_length + 13 and data[header_length + 4 : header_length + 12] == b"\x00\x01\x00\x00\x00\x00\x00\x00":
is_dns = True
else:
pass
if sa[1] == 53 and is_dns: #DNS
logging.debug("DNS query %s from %s:%d" % (common.to_str(sa[0]), r_addr[0], r_addr[1]))
self._cache_dns_client[key] = (client, uid)
else:
self._cache[key] = (client, uid)
self._client_fd_to_server_addr[client.fileno()] = (r_addr, af)
self._sockets.add(client.fileno())
self._eventloop.add(client, eventloop.POLL_IN, self)
logging.debug('UDP port %5d sockets %d' % (self._listen_port, len(self._sockets)))
if uid is not None:
user_id = struct.unpack('<I', client_uid)[0]
else:
client, client_uid = client_pair
self._cache.clear(self._udp_cache_size)
self._cache_dns_client.clear(16)
if self._is_local:
ref_iv = [encrypt.encrypt_new_iv(self._method)]
self._protocol.obfs.server_info.iv = ref_iv[0]
data = self._protocol.client_udp_pre_encrypt(data)
#logging.debug("%s" % (binascii.hexlify(data),))
data = encrypt.encrypt_all_iv(self._protocol.obfs.server_info.key, self._method, 1, data, ref_iv)
if not data:
return
else:
data = data[header_length:]
if not data:
return
except Exception as e:
shell.print_exception(e)
logging.error("exception from user %d" % (user_id,))
try:
client.sendto(data, (server_addr, server_port))
self.add_transfer_u(client_uid, len(data))
if client_pair is None: # new request
addr, port = client.getsockname()[:2]
common.connect_log('UDP data to %s(%s):%d from %s:%d by user %d' %
(common.to_str(remote_addr[0]), common.to_str(server_addr), server_port, addr, port, user_id))
except IOError as e:
err = eventloop.errno_from_exception(e)
logging.warning('IOError sendto %s:%d by user %d' % (server_addr, server_port, user_id))
if err in (errno.EINPROGRESS, errno.EAGAIN):
pass
else:
shell.print_exception(e)
def _handle_client(self, sock):
data, r_addr = sock.recvfrom(BUF_SIZE)
if not data:
logging.debug('UDP handle_client: data is empty')
return
if self._stat_callback:
self._stat_callback(self._listen_port, len(data))
client_addr = self._client_fd_to_server_addr.get(sock.fileno())
client_uid = None
if client_addr:
key = client_key(client_addr[0], client_addr[1])
client_pair = self._cache.get(key, None)
client_dns_pair = self._cache_dns_client.get(key, None)
if client_pair:
client, client_uid = client_pair
elif client_dns_pair:
client, client_uid = client_dns_pair
if not self._is_local:
addrlen = len(r_addr[0])
if addrlen > 255:
# drop
return
data = pack_addr(r_addr[0]) + struct.pack('>H', r_addr[1]) + data
ref_iv = [encrypt.encrypt_new_iv(self._method)]
self._protocol.obfs.server_info.iv = ref_iv[0]
data = self._protocol.server_udp_pre_encrypt(data, client_uid)
response = encrypt.encrypt_all_iv(self._protocol.obfs.server_info.key, self._method, 1,
data, ref_iv)
if not response:
return
else:
ref_iv = [0]
data = encrypt.encrypt_all_iv(self._protocol.obfs.server_info.key, self._method, 0,
data, ref_iv)
if not data:
return
self._protocol.obfs.server_info.recv_iv = ref_iv[0]
data = self._protocol.client_udp_post_decrypt(data)
header_result = parse_header(data)
if header_result is None:
return
#connecttype, dest_addr, dest_port, header_length = header_result
#logging.debug('UDP handle_client %s:%d to %s:%d' % (common.to_str(r_addr[0]), r_addr[1], dest_addr, dest_port))
response = b'\x00\x00\x00' + data
if client_addr:
if client_uid:
self.add_transfer_d(client_uid, len(response))
else:
self.server_transfer_dl += len(response)
self.write_to_server_socket(response, client_addr[0])
if client_dns_pair:
logging.debug("remove dns client %s:%d" % (client_addr[0][0], client_addr[0][1]))
del self._cache_dns_client[key]
self._close_client(client_dns_pair[0])
else:
# this packet is from somewhere else we know
# simply drop that packet
pass
def write_to_server_socket(self, data, addr):
uncomplete = False
retry = 0
try:
self._server_socket.sendto(data, addr)
data = None
while self._data_to_write_to_server_socket:
data_buf = self._data_to_write_to_server_socket[0]
retry = data_buf[1] + 1
del self._data_to_write_to_server_socket[0]
data, addr = data_buf[0]
self._server_socket.sendto(data, addr)
except (OSError, IOError) as e:
error_no = eventloop.errno_from_exception(e)
uncomplete = True
if error_no in (errno.EWOULDBLOCK,):
pass
else:
shell.print_exception(e)
return False
#if uncomplete and data is not None and retry < 3:
# self._data_to_write_to_server_socket.append([(data, addr), retry])
#'''
def add_to_loop(self, loop):
if self._eventloop:
raise Exception('already add to loop')
if self._closed:
raise Exception('already closed')
self._eventloop = loop
server_socket = self._server_socket
self._eventloop.add(server_socket,
eventloop.POLL_IN | eventloop.POLL_ERR, self)
loop.add_periodic(self.handle_periodic)
def remove_handler(self, client):
if hash(client) in self._timeout_cache:
del self._timeout_cache[hash(client)]
def update_activity(self, client):
self._timeout_cache[hash(client)] = client
def _sweep_timeout(self):
self._timeout_cache.sweep()
def _close_tcp_client(self, client):
if client.remote_address:
logging.debug('timed out: %s:%d' %
client.remote_address)
else:
logging.debug('timed out')
client.destroy()
client.destroy_local()
def handle_event(self, sock, fd, event):
if sock == self._server_socket:
if event & eventloop.POLL_ERR:
logging.error('UDP server_socket err')
try:
self._handle_server()
except Exception as e:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
elif sock and (fd in self._sockets):
if event & eventloop.POLL_ERR:
logging.error('UDP client_socket err')
try:
self._handle_client(sock)
except Exception as e:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
else:
if sock:
handler = self._fd_to_handlers.get(fd, None)
if handler:
handler.handle_event(sock, event)
else:
logging.warn('poll removed fd')
def handle_periodic(self):
if self._closed:
self._cache.clear(0)
self._cache_dns_client.clear(0)
if self._eventloop:
self._eventloop.remove_periodic(self.handle_periodic)
self._eventloop.remove(self._server_socket)
if self._server_socket:
self._server_socket.close()
self._server_socket = None
logging.info('closed UDP port %d', self._listen_port)
else:
before_sweep_size = len(self._sockets)
self._cache.sweep()
self._cache_dns_client.sweep()
if before_sweep_size != len(self._sockets):
logging.debug('UDP port %5d sockets %d' % (self._listen_port, len(self._sockets)))
self._sweep_timeout()
def close(self, next_tick=False):
logging.debug('UDP close')
self._closed = True
if not next_tick:
if self._eventloop:
self._eventloop.remove_periodic(self.handle_periodic)
self._eventloop.remove(self._server_socket)
self._server_socket.close()
self._cache.clear(0)
self._cache_dns_client.clear(0)
| kaneawk/shadowsocksr | shadowsocks/udprelay.py | Python | apache-2.0 | 25,937 |
import logging
from ..directives import directives_by_section
logger = logging.getLogger(__name__)
class Stanza(object):
"""
Subclass for config file stanzas.
In an HAProxy config file, a stanza is in the form of::
stanza header
directive
directive
directive
Stanza instances have a `header` attribute for the header and a list of
`lines`, one for each directive line.
"""
def __init__(self, section_name):
self.section_name = section_name
self.header = section_name
self.lines = []
def add_lines(self, lines):
"""
Simple helper method for adding multiple lines at once.
"""
for line in lines:
self.add_line(line)
def add_line(self, line):
"""
Adds a given line string to the list of lines, validating the line
first.
"""
if not self.is_valid_line(line):
logger.warn(
"Invalid line for %s section: '%s'",
self.section_name, line
)
return
self.lines.append(line)
def is_valid_line(self, line):
"""
Validates a given line against the associated "section" (e.g. 'global'
or 'frontend', etc.) of a stanza.
If a line represents a directive that shouldn't be within the stanza
it is rejected. See the `directives.json` file for a condensed look
at valid directives based on section.
"""
adjusted_line = line.strip().lower()
return any([
adjusted_line.startswith(directive)
for directive in directives_by_section[self.section_name]
])
def __str__(self):
"""
Returns the string representation of a Stanza, meant for use in
config file content.
if no lines are defined an empty string is returned.
"""
if not self.lines:
return ""
return self.header + "\n" + "\n".join([
"\t" + line
for line in self.lines
])
| wglass/lighthouse | lighthouse/haproxy/stanzas/stanza.py | Python | apache-2.0 | 2,087 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
from pants.backend.jvm.targets.exportable_jvm_library import ExportableJvmLibrary
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
logger = logging.getLogger(__name__)
class JavaWireLibrary(ExportableJvmLibrary):
"""Generates a stub Java library from protobuf IDL files."""
def __init__(self,
payload=None,
service_writer=None,
service_writer_options=None,
roots=None,
registry_class=None,
enum_options=None,
no_options=None,
**kwargs):
"""
:param string service_writer: the name of the class to pass as the --service_writer option to
the Wire compiler.
:param list service_writer_options: A list of options to pass to the service writer
:param list roots: passed through to the --roots option of the Wire compiler
:param string registry_class: fully qualified class name of RegistryClass to create. If in
doubt, specify com.squareup.wire.SimpleServiceWriter
:param list enum_options: list of enums to pass to as the --enum-enum_options option, # optional
:param boolean no_options: boolean that determines if --no_options flag is passed
"""
payload = payload or Payload()
payload.add_fields({
'service_writer': PrimitiveField(service_writer or None),
'service_writer_options': PrimitiveField(service_writer_options or []),
'roots': PrimitiveField(roots or []),
'registry_class': PrimitiveField(registry_class or None),
'enum_options': PrimitiveField(enum_options or []),
'no_options': PrimitiveField(no_options or False),
})
if service_writer_options:
logger.warn('The service_writer_options flag is ignored.')
super(JavaWireLibrary, self).__init__(payload=payload, **kwargs)
self.add_labels('codegen')
| digwanderlust/pants | src/python/pants/backend/codegen/targets/java_wire_library.py | Python | apache-2.0 | 2,200 |
""" a custom layer for 'power', maybe we should implement this in standard way.
more info can be found here: http://caffe.berkeleyvision.org/tutorial/layers/power.html
"""
from .register import register
def power_shape(input_shape, shape=None):
""" calculate the output shape of this layer using input shape
Args:
@input_shape (list of num): a list of number which represents the input shape
Returns:
@output_shape (list of num): a list of numbers represent the output shape
"""
return input_shape
def power_layer(input, name, power=1.0, scale=1.0, shift=0.0):
""" build a layer of type 'Power' using fluid
Args:
@input (variables): input fluid variable for this layer
@name (str): name for this layer
@power (float): parameter from caffe's Power layer
@scale (float): parameter from caffe's Power layer
@shift (float): parameter from caffe's Power layer
Returns:
output (variable): output variable for this layer
"""
import paddle.fluid as fluid
scale_out = fluid.layers.scale(
input, scale=scale, bias=shift, bias_after_scale=True)
output = fluid.layers.pow(scale_out, factor=power)
return output
register(kind='Power', shape=power_shape, layer=power_layer)
| kuke/models | fluid/PaddleCV/caffe2fluid/kaffe/custom_layers/power.py | Python | apache-2.0 | 1,292 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
import hashlib
from girder import events
from girder.api import access
from girder.api.describe import Description, autoDescribeRoute
from girder.api.rest import boundHandler, RestException
from girder.api.v1.collection import Collection
from girder.constants import AccessType, TokenScope
from girder.models.model_base import ModelImporter
@access.user(scope=TokenScope.DATA_READ)
@boundHandler
@autoDescribeRoute(
Description('Accept a collection\'s Terms of Use for the current user.')
.modelParam('id', model='collection', level=AccessType.READ)
.param('termsHash', 'The SHA-256 hash of this collection\'s terms, encoded in hexadecimal.')
)
def acceptCollectionTerms(self, collection, termsHash):
if not collection.get('terms'):
raise RestException('This collection currently has no terms.')
# termsHash should be encoded to a bytes object, but storing bytes into MongoDB behaves
# differently in Python 2 vs 3. Additionally, serializing a bytes to JSON behaves differently
# in Python 2 vs 3. So, just keep it as a unicode (or ordinary Python 2 str).
realTermsHash = hashlib.sha256(collection['terms'].encode('utf-8')).hexdigest()
if termsHash != realTermsHash:
# This "proves" that the client has at least accessed the terms
raise RestException(
'The submitted "termsHash" does not correspond to the collection\'s current terms.')
ModelImporter.model('user').update(
{'_id': self.getCurrentUser()['_id']},
{'$set': {
'terms.collection.%s' % collection['_id']: {
'hash': termsHash,
'accepted': datetime.datetime.now()
}
}}
)
def afterPostPutCollection(event):
# This will only trigger if no exceptions (for access, invalid id, etc.) are thrown
extraParams = event.info['params']
if 'terms' in extraParams:
collectionResponse = event.info['returnVal']
collectionId = collectionResponse['_id']
terms = extraParams['terms']
ModelImporter.model('collection').update(
{'_id': collectionId},
{'$set': {'terms': terms}}
)
collectionResponse['terms'] = terms
event.addResponse(collectionResponse)
def load(info):
# Augment the collection creation and edit routes to accept a terms field
events.bind('rest.post.collection.after', 'terms', afterPostPutCollection)
events.bind('rest.put.collection/:id.after', 'terms', afterPostPutCollection)
for handler in [
Collection.createCollection,
Collection.updateCollection
]:
handler.description.param('terms', 'The Terms of Use for the collection.', required=False)
# Expose the terms field on all collections
ModelImporter.model('collection').exposeFields(level=AccessType.READ, fields={'terms'})
# Add endpoint for registered users to accept terms
info['apiRoot'].collection.route('POST', (':id', 'acceptTerms'), acceptCollectionTerms)
# Expose the terms field on all users
ModelImporter.model('user').exposeFields(level=AccessType.ADMIN, fields={'terms'})
| adsorensen/girder | plugins/terms/server/__init__.py | Python | apache-2.0 | 3,937 |
import abc
from typing import Dict, List
class State(metaclass=abc.ABCMeta):
"""Base class for inheritable state objects, such as would represent an
input element's attributes."""
@abc.abstractproperty
def replacements(self) -> Dict[str, str]:
"""Return a dictionary mapping template replacements."""
pass
@abc.abstractproperty
def ref(self) -> str:
"""Return a string uniquely identifying this element."""
pass
@abc.abstractproperty
def keys(self) -> List[str]:
"""Return a list of attributes in this State class that will inherit
from a parent object."""
pass
def inherit(self, other: 'State') -> None:
"""Inherit properties from a parent State instance."""
for key in [k for k in other.keys if getattr(self, k) is None]:
setattr(self, key, getattr(other, key))
for src, dest in other.replacements.items():
if src not in self.replacements:
self.replacements[src] = dest
def __str__(self) -> str:
d = {}
for k in self.keys:
d[k] = getattr(self, k)
return str(d)
| jeff-allen-mongo/mut | mut/state.py | Python | apache-2.0 | 1,173 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from horizon_bsn.bsndashboard import dashboard
class Networktemplate(horizon.Panel):
name = _("Network Templates")
slug = "networktemplate"
dashboard.Bsndashboard.register(Networktemplate)
| wolverineav/horizon-bsn | horizon_bsn/bsndashboard/networktemplate/panel.py | Python | apache-2.0 | 817 |
"""
Copyright 2020 Google LLC
Copyright 2020 PerfectVIPs Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
"""
from pygen_src.riscv_defines import DEFINE_FP_INSTR
from pygen_src.riscv_instr_pkg import (riscv_instr_name_t, riscv_instr_format_t,
riscv_instr_category_t, riscv_instr_group_t)
DEFINE_FP_INSTR(riscv_instr_name_t.FCVT_L_S, riscv_instr_format_t.I_FORMAT,
riscv_instr_category_t.ARITHMETIC, riscv_instr_group_t.RV64F, g=globals())
DEFINE_FP_INSTR(riscv_instr_name_t.FCVT_LU_S, riscv_instr_format_t.I_FORMAT,
riscv_instr_category_t.ARITHMETIC, riscv_instr_group_t.RV64F, g=globals())
DEFINE_FP_INSTR(riscv_instr_name_t.FCVT_S_L, riscv_instr_format_t.I_FORMAT,
riscv_instr_category_t.ARITHMETIC, riscv_instr_group_t.RV64F, g=globals())
DEFINE_FP_INSTR(riscv_instr_name_t.FCVT_S_LU, riscv_instr_format_t.I_FORMAT,
riscv_instr_category_t.ARITHMETIC, riscv_instr_group_t.RV64F, g=globals())
| google/riscv-dv | pygen/pygen_src/isa/rv64f_instr.py | Python | apache-2.0 | 1,363 |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1GroupVersionForDiscovery(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'group_version': 'str',
'version': 'str'
}
attribute_map = {
'group_version': 'groupVersion',
'version': 'version'
}
def __init__(self, group_version=None, version=None):
"""
V1GroupVersionForDiscovery - a model defined in Swagger
"""
self._group_version = None
self._version = None
self.discriminator = None
self.group_version = group_version
self.version = version
@property
def group_version(self):
"""
Gets the group_version of this V1GroupVersionForDiscovery.
groupVersion specifies the API group and version in the form \"group/version\"
:return: The group_version of this V1GroupVersionForDiscovery.
:rtype: str
"""
return self._group_version
@group_version.setter
def group_version(self, group_version):
"""
Sets the group_version of this V1GroupVersionForDiscovery.
groupVersion specifies the API group and version in the form \"group/version\"
:param group_version: The group_version of this V1GroupVersionForDiscovery.
:type: str
"""
if group_version is None:
raise ValueError("Invalid value for `group_version`, must not be `None`")
self._group_version = group_version
@property
def version(self):
"""
Gets the version of this V1GroupVersionForDiscovery.
version specifies the version in the form of \"version\". This is to save the clients the trouble of splitting the GroupVersion.
:return: The version of this V1GroupVersionForDiscovery.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this V1GroupVersionForDiscovery.
version specifies the version in the form of \"version\". This is to save the clients the trouble of splitting the GroupVersion.
:param version: The version of this V1GroupVersionForDiscovery.
:type: str
"""
if version is None:
raise ValueError("Invalid value for `version`, must not be `None`")
self._version = version
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1GroupVersionForDiscovery):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| mbohlool/client-python | kubernetes/client/models/v1_group_version_for_discovery.py | Python | apache-2.0 | 4,487 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from neutron.common import config as neutron_config # noqa
from gbpservice.neutron.services.grouppolicy import config
from gbpservice.neutron.tests.unit import common as cm
from gbpservice.neutron.tests.unit.services.grouppolicy import (
extensions as test_ext)
from gbpservice.neutron.tests.unit.services.grouppolicy import (
test_grouppolicy_plugin as test_plugin)
class ExtensionDriverTestBase(test_plugin.GroupPolicyPluginTestCase):
_extension_drivers = ['test']
_extension_path = os.path.dirname(os.path.abspath(test_ext.__file__))
def setUp(self, policy_drivers=None, core_plugin=None,
l3_plugin=None, ml2_options=None,
sc_plugin=None, qos_plugin=None, trunk_plugin=None):
config.cfg.CONF.set_override('extension_drivers',
self._extension_drivers,
group='group_policy')
if self._extension_path:
config.cfg.CONF.set_override(
'api_extensions_path', self._extension_path)
super(ExtensionDriverTestBase, self).setUp(
core_plugin=core_plugin, l3_plugin=l3_plugin,
ml2_options=ml2_options, sc_plugin=sc_plugin,
qos_plugin=qos_plugin, trunk_plugin=trunk_plugin)
class ExtensionDriverTestCase(ExtensionDriverTestBase):
def test_pt_attr(self):
# Test create with default value.
pt = self.create_policy_target()
policy_target_id = pt['policy_target']['id']
val = pt['policy_target']['pt_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_targets', policy_target_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target']['pt_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_targets')
val = res['policy_targets'][0]['pt_extension']
self.assertIsNone(val)
# Test create with explict value.
pt = self.create_policy_target(pt_extension="abc")
policy_target_id = pt['policy_target']['id']
val = pt['policy_target']['pt_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_targets', policy_target_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target']['pt_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_target': {'pt_extension': "def"}}
req = self.new_update_request('policy_targets', data, policy_target_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target']['pt_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_targets', policy_target_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target']['pt_extension']
self.assertEqual("def", val)
def test_ptg_attr(self):
# Test create with default value.
ptg = self.create_policy_target_group()
policy_target_group_id = ptg['policy_target_group']['id']
val = ptg['policy_target_group']['ptg_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_target_groups',
policy_target_group_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target_group']['ptg_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_target_groups')
val = res['policy_target_groups'][0]['ptg_extension']
self.assertIsNone(val)
# Test create with explict value.
ptg = self.create_policy_target_group(ptg_extension="abc")
policy_target_group_id = ptg['policy_target_group']['id']
val = ptg['policy_target_group']['ptg_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_target_groups',
policy_target_group_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target_group']['ptg_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_target_group': {'ptg_extension': "def"}}
req = self.new_update_request('policy_target_groups', data,
policy_target_group_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target_group']['ptg_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_target_groups',
policy_target_group_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_target_group']['ptg_extension']
self.assertEqual("def", val)
def test_l2p_attr(self):
# Test create with default value.
l2p = self.create_l2_policy()
l2_policy_id = l2p['l2_policy']['id']
val = l2p['l2_policy']['l2p_extension']
self.assertIsNone(val)
req = self.new_show_request('l2_policies', l2_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l2_policy']['l2p_extension']
self.assertIsNone(val)
# Test list.
res = self._list('l2_policies')
val = res['l2_policies'][0]['l2p_extension']
self.assertIsNone(val)
# Test create with explict value.
l2p = self.create_l2_policy(l2p_extension="abc")
l2_policy_id = l2p['l2_policy']['id']
val = l2p['l2_policy']['l2p_extension']
self.assertEqual("abc", val)
req = self.new_show_request('l2_policies', l2_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l2_policy']['l2p_extension']
self.assertEqual("abc", val)
# Test update.
data = {'l2_policy': {'l2p_extension': "def"}}
req = self.new_update_request('l2_policies', data, l2_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l2_policy']['l2p_extension']
self.assertEqual("def", val)
req = self.new_show_request('l2_policies', l2_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l2_policy']['l2p_extension']
self.assertEqual("def", val)
def test_l3p_attr(self):
# Test create with default value.
l3p = self.create_l3_policy()
l3_policy_id = l3p['l3_policy']['id']
val = l3p['l3_policy']['l3p_extension']
self.assertIsNone(val)
req = self.new_show_request('l3_policies', l3_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l3_policy']['l3p_extension']
self.assertIsNone(val)
# Test list.
res = self._list('l3_policies')
val = res['l3_policies'][0]['l3p_extension']
self.assertIsNone(val)
# Test create with explict value.
l3p = self.create_l3_policy(l3p_extension="abc")
l3_policy_id = l3p['l3_policy']['id']
val = l3p['l3_policy']['l3p_extension']
self.assertEqual("abc", val)
req = self.new_show_request('l3_policies', l3_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l3_policy']['l3p_extension']
self.assertEqual("abc", val)
# Test update.
data = {'l3_policy': {'l3p_extension': "def"}}
req = self.new_update_request('l3_policies', data, l3_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l3_policy']['l3p_extension']
self.assertEqual("def", val)
req = self.new_show_request('l3_policies', l3_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['l3_policy']['l3p_extension']
self.assertEqual("def", val)
def test_pc_attr(self):
# Test create with default value.
pc = self.create_policy_classifier()
policy_classifier_id = pc['policy_classifier']['id']
val = pc['policy_classifier']['pc_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_classifiers', policy_classifier_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_classifier']['pc_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_classifiers')
val = res['policy_classifiers'][0]['pc_extension']
self.assertIsNone(val)
# Test create with explict value.
pc = self.create_policy_classifier(pc_extension="abc")
policy_classifier_id = pc['policy_classifier']['id']
val = pc['policy_classifier']['pc_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_classifiers', policy_classifier_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_classifier']['pc_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_classifier': {'pc_extension': "def"}}
req = self.new_update_request('policy_classifiers', data,
policy_classifier_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_classifier']['pc_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_classifiers', policy_classifier_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_classifier']['pc_extension']
self.assertEqual("def", val)
def test_pa_attr(self):
# Test create with default value.
pa = self.create_policy_action()
policy_action_id = pa['policy_action']['id']
val = pa['policy_action']['pa_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_actions', policy_action_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_action']['pa_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_actions')
val = res['policy_actions'][0]['pa_extension']
self.assertIsNone(val)
# Test create with explict value.
pa = self.create_policy_action(pa_extension="abc")
policy_action_id = pa['policy_action']['id']
val = pa['policy_action']['pa_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_actions', policy_action_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_action']['pa_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_action': {'pa_extension': "def"}}
req = self.new_update_request('policy_actions', data, policy_action_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_action']['pa_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_actions', policy_action_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_action']['pa_extension']
self.assertEqual("def", val)
def test_pr_attr(self):
# Create necessary parameters.
classifier = self.create_policy_classifier(
name="class1", protocol="tcp", direction="out",
port_range="50:100")
classifier_id = classifier['policy_classifier']['id']
# Test create with default value.
pr = self.create_policy_rule(policy_classifier_id=classifier_id)
policy_rule_id = pr['policy_rule']['id']
val = pr['policy_rule']['pr_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_rules', policy_rule_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule']['pr_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_rules')
val = res['policy_rules'][0]['pr_extension']
self.assertIsNone(val)
# Test create with explict value.
pr = self.create_policy_rule(policy_classifier_id=classifier_id,
pr_extension="abc")
policy_rule_id = pr['policy_rule']['id']
val = pr['policy_rule']['pr_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_rules', policy_rule_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule']['pr_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_rule': {'pr_extension': "def"}}
req = self.new_update_request('policy_rules', data, policy_rule_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule']['pr_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_rules', policy_rule_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule']['pr_extension']
self.assertEqual("def", val)
def test_prs_attr(self):
# Test create with default value.
prs = self.create_policy_rule_set(policy_rules=[])
policy_rule_set_id = prs['policy_rule_set']['id']
val = prs['policy_rule_set']['prs_extension']
self.assertIsNone(val)
req = self.new_show_request('policy_rule_sets', policy_rule_set_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule_set']['prs_extension']
self.assertIsNone(val)
# Test list.
res = self._list('policy_rule_sets')
val = res['policy_rule_sets'][0]['prs_extension']
self.assertIsNone(val)
# Test create with explict value.
prs = self.create_policy_rule_set(policy_rules=[], prs_extension="abc")
policy_rule_set_id = prs['policy_rule_set']['id']
val = prs['policy_rule_set']['prs_extension']
self.assertEqual("abc", val)
req = self.new_show_request('policy_rule_sets', policy_rule_set_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule_set']['prs_extension']
self.assertEqual("abc", val)
# Test update.
data = {'policy_rule_set': {'prs_extension': "def"}}
req = self.new_update_request('policy_rule_sets', data,
policy_rule_set_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule_set']['prs_extension']
self.assertEqual("def", val)
req = self.new_show_request('policy_rule_sets', policy_rule_set_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['policy_rule_set']['prs_extension']
self.assertEqual("def", val)
def test_nsp_attr(self):
# Test create with default value.
nsp = self.create_network_service_policy()
network_service_policy_id = nsp['network_service_policy']['id']
val = nsp['network_service_policy']['nsp_extension']
self.assertIsNone(val)
req = self.new_show_request('network_service_policies',
network_service_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['network_service_policy']['nsp_extension']
self.assertIsNone(val)
# Test list.
res = self._list('network_service_policies')
val = res['network_service_policies'][0]['nsp_extension']
self.assertIsNone(val)
# Test create with explict value.
nsp = self.create_network_service_policy(nsp_extension="abc")
network_service_policy_id = nsp['network_service_policy']['id']
val = nsp['network_service_policy']['nsp_extension']
self.assertEqual("abc", val)
req = self.new_show_request('network_service_policies',
network_service_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['network_service_policy']['nsp_extension']
self.assertEqual("abc", val)
# Test update.
data = {'network_service_policy': {'nsp_extension': "def"}}
req = self.new_update_request('network_service_policies', data,
network_service_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['network_service_policy']['nsp_extension']
self.assertEqual("def", val)
req = self.new_show_request('network_service_policies',
network_service_policy_id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res['network_service_policy']['nsp_extension']
self.assertEqual("def", val)
def test_es_attr(self):
self._test_attr('external_segment')
def test_ep_attr(self):
self._test_attr('external_policy')
def test_np_attr(self):
self._test_attr('nat_pool')
def _test_attr(self, type):
# Test create with default value.
acronim = _acronim(type)
plural = cm.get_resource_plural(type)
obj = getattr(self, 'create_%s' % type)()
id = obj[type]['id']
val = obj[type][acronim + '_extension']
self.assertIsNone(val)
req = self.new_show_request(plural, id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res[type][acronim + '_extension']
self.assertIsNone(val)
# Test list.
res = self._list(plural)
val = res[plural][0][acronim + '_extension']
self.assertIsNone(val)
# Test create with explict value.
kwargs = {acronim + '_extension': "abc"}
obj = getattr(self, 'create_%s' % type)(**kwargs)
id = obj[type]['id']
val = obj[type][acronim + '_extension']
self.assertEqual("abc", val)
req = self.new_show_request(plural, id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res[type][acronim + '_extension']
self.assertEqual("abc", val)
# Test update.
data = {type: {acronim + '_extension': "def"}}
req = self.new_update_request(plural, data, id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res[type][acronim + '_extension']
self.assertEqual("def", val)
req = self.new_show_request(plural, id)
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
val = res[type][acronim + '_extension']
self.assertEqual("def", val)
def _acronim(type):
return ''.join([x[0] for x in type.split('_')])
| noironetworks/group-based-policy | gbpservice/neutron/tests/unit/services/grouppolicy/test_extension_driver_api.py | Python | apache-2.0 | 19,823 |
''' get_snmp_name_desc.py '''
from snmp_helper import snmp_get_oid,snmp_extract
PORT=161
COMMUNITY='galileo'
rtrs={'pynet-rtr1':'184.105.247.70', 'pynet-rtr2':'184.105.247.71'}
oids={'sysName':'1.3.6.1.2.1.1.5.0', 'sysDescr':'1.3.6.1.2.1.1.1.0'}
for rtr in rtrs.keys():
print rtr
for oid in oids.keys():
print " " + oid + " = " + snmp_extract(snmp_get_oid((rtrs[rtr],COMMUNITY,PORT),oids[oid]))
| dnsbob/pynet_testz | SNMP/get_snmp_name_desc.py | Python | apache-2.0 | 418 |
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/bgp/peer-groups/peer-group/afi-safis/afi-safi/l2vpn-evpn/prefix-limit/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__max_prefixes",
"__prevent_teardown",
"__shutdown_threshold_pct",
"__restart_timer",
)
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__max_prefixes = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-prefixes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
self.__prevent_teardown = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="prevent-teardown",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
self.__shutdown_threshold_pct = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..100"]},
),
is_leaf=True,
yang_name="shutdown-threshold-pct",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-types:percentage",
is_config=True,
)
self.__restart_timer = YANGDynClass(
base=RestrictedPrecisionDecimalType(precision=2),
is_leaf=True,
yang_name="restart-timer",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="decimal64",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"peer-groups",
"peer-group",
"afi-safis",
"afi-safi",
"l2vpn-evpn",
"prefix-limit",
"config",
]
def _get_max_prefixes(self):
"""
Getter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/max_prefixes (uint32)
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
return self.__max_prefixes
def _set_max_prefixes(self, v, load=False):
"""
Setter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/max_prefixes (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_max_prefixes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_max_prefixes() directly.
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-prefixes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """max_prefixes must be of a type compatible with uint32""",
"defined-type": "uint32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=True)""",
}
)
self.__max_prefixes = t
if hasattr(self, "_set"):
self._set()
def _unset_max_prefixes(self):
self.__max_prefixes = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-prefixes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
def _get_prevent_teardown(self):
"""
Getter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/prevent_teardown (boolean)
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
return self.__prevent_teardown
def _set_prevent_teardown(self, v, load=False):
"""
Setter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/prevent_teardown (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_prevent_teardown is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prevent_teardown() directly.
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="prevent-teardown",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """prevent_teardown must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=True)""",
}
)
self.__prevent_teardown = t
if hasattr(self, "_set"):
self._set()
def _unset_prevent_teardown(self):
self.__prevent_teardown = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="prevent-teardown",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
def _get_shutdown_threshold_pct(self):
"""
Getter method for shutdown_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/shutdown_threshold_pct (oc-types:percentage)
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
return self.__shutdown_threshold_pct
def _set_shutdown_threshold_pct(self, v, load=False):
"""
Setter method for shutdown_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/shutdown_threshold_pct (oc-types:percentage)
If this variable is read-only (config: false) in the
source YANG file, then _set_shutdown_threshold_pct is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_shutdown_threshold_pct() directly.
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..100"]},
),
is_leaf=True,
yang_name="shutdown-threshold-pct",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-types:percentage",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """shutdown_threshold_pct must be of a type compatible with oc-types:percentage""",
"defined-type": "oc-types:percentage",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="shutdown-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=True)""",
}
)
self.__shutdown_threshold_pct = t
if hasattr(self, "_set"):
self._set()
def _unset_shutdown_threshold_pct(self):
self.__shutdown_threshold_pct = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..100"]},
),
is_leaf=True,
yang_name="shutdown-threshold-pct",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-types:percentage",
is_config=True,
)
def _get_restart_timer(self):
"""
Getter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/restart_timer (decimal64)
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
return self.__restart_timer
def _set_restart_timer(self, v, load=False):
"""
Setter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/restart_timer (decimal64)
If this variable is read-only (config: false) in the
source YANG file, then _set_restart_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_restart_timer() directly.
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedPrecisionDecimalType(precision=2),
is_leaf=True,
yang_name="restart-timer",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="decimal64",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """restart_timer must be of a type compatible with decimal64""",
"defined-type": "decimal64",
"generated-type": """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=True)""",
}
)
self.__restart_timer = t
if hasattr(self, "_set"):
self._set()
def _unset_restart_timer(self):
self.__restart_timer = YANGDynClass(
base=RestrictedPrecisionDecimalType(precision=2),
is_leaf=True,
yang_name="restart-timer",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="decimal64",
is_config=True,
)
max_prefixes = __builtin__.property(_get_max_prefixes, _set_max_prefixes)
prevent_teardown = __builtin__.property(
_get_prevent_teardown, _set_prevent_teardown
)
shutdown_threshold_pct = __builtin__.property(
_get_shutdown_threshold_pct, _set_shutdown_threshold_pct
)
restart_timer = __builtin__.property(_get_restart_timer, _set_restart_timer)
_pyangbind_elements = OrderedDict(
[
("max_prefixes", max_prefixes),
("prevent_teardown", prevent_teardown),
("shutdown_threshold_pct", shutdown_threshold_pct),
("restart_timer", restart_timer),
]
)
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/bgp/peer-groups/peer-group/afi-safis/afi-safi/l2vpn-evpn/prefix-limit/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the prefix
limit for the AFI-SAFI
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__max_prefixes",
"__prevent_teardown",
"__shutdown_threshold_pct",
"__restart_timer",
)
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__max_prefixes = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-prefixes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
self.__prevent_teardown = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="prevent-teardown",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
self.__shutdown_threshold_pct = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..100"]},
),
is_leaf=True,
yang_name="shutdown-threshold-pct",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-types:percentage",
is_config=True,
)
self.__restart_timer = YANGDynClass(
base=RestrictedPrecisionDecimalType(precision=2),
is_leaf=True,
yang_name="restart-timer",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="decimal64",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"peer-groups",
"peer-group",
"afi-safis",
"afi-safi",
"l2vpn-evpn",
"prefix-limit",
"config",
]
def _get_max_prefixes(self):
"""
Getter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/max_prefixes (uint32)
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
return self.__max_prefixes
def _set_max_prefixes(self, v, load=False):
"""
Setter method for max_prefixes, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/max_prefixes (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_max_prefixes is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_max_prefixes() directly.
YANG Description: Maximum number of prefixes that will be accepted
from the neighbour
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-prefixes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """max_prefixes must be of a type compatible with uint32""",
"defined-type": "uint32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="max-prefixes", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=True)""",
}
)
self.__max_prefixes = t
if hasattr(self, "_set"):
self._set()
def _unset_max_prefixes(self):
self.__max_prefixes = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="max-prefixes",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=True,
)
def _get_prevent_teardown(self):
"""
Getter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/prevent_teardown (boolean)
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
return self.__prevent_teardown
def _set_prevent_teardown(self, v, load=False):
"""
Setter method for prevent_teardown, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/prevent_teardown (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_prevent_teardown is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prevent_teardown() directly.
YANG Description: Do not tear down the BGP session when the maximum
prefix limit is exceeded, but rather only log a
warning. The default of this leaf is false, such
that when it is not specified, the session is torn
down.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="prevent-teardown",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """prevent_teardown must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="prevent-teardown", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=True)""",
}
)
self.__prevent_teardown = t
if hasattr(self, "_set"):
self._set()
def _unset_prevent_teardown(self):
self.__prevent_teardown = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="prevent-teardown",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=True,
)
def _get_shutdown_threshold_pct(self):
"""
Getter method for shutdown_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/shutdown_threshold_pct (oc-types:percentage)
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
return self.__shutdown_threshold_pct
def _set_shutdown_threshold_pct(self, v, load=False):
"""
Setter method for shutdown_threshold_pct, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/shutdown_threshold_pct (oc-types:percentage)
If this variable is read-only (config: false) in the
source YANG file, then _set_shutdown_threshold_pct is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_shutdown_threshold_pct() directly.
YANG Description: Threshold on number of prefixes that can be received
from a neighbour before generation of warning messages
or log entries. Expressed as a percentage of
max-prefixes
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["0..100"]},
),
is_leaf=True,
yang_name="shutdown-threshold-pct",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-types:percentage",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """shutdown_threshold_pct must be of a type compatible with oc-types:percentage""",
"defined-type": "oc-types:percentage",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['0..100']}), is_leaf=True, yang_name="shutdown-threshold-pct", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-types:percentage', is_config=True)""",
}
)
self.__shutdown_threshold_pct = t
if hasattr(self, "_set"):
self._set()
def _unset_shutdown_threshold_pct(self):
self.__shutdown_threshold_pct = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["0..100"]},
),
is_leaf=True,
yang_name="shutdown-threshold-pct",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-types:percentage",
is_config=True,
)
def _get_restart_timer(self):
"""
Getter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/restart_timer (decimal64)
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
return self.__restart_timer
def _set_restart_timer(self, v, load=False):
"""
Setter method for restart_timer, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/restart_timer (decimal64)
If this variable is read-only (config: false) in the
source YANG file, then _set_restart_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_restart_timer() directly.
YANG Description: Time interval in seconds after which the BGP session
is re-established after being torn down due to exceeding
the max-prefix limit.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedPrecisionDecimalType(precision=2),
is_leaf=True,
yang_name="restart-timer",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="decimal64",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """restart_timer must be of a type compatible with decimal64""",
"defined-type": "decimal64",
"generated-type": """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="restart-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='decimal64', is_config=True)""",
}
)
self.__restart_timer = t
if hasattr(self, "_set"):
self._set()
def _unset_restart_timer(self):
self.__restart_timer = YANGDynClass(
base=RestrictedPrecisionDecimalType(precision=2),
is_leaf=True,
yang_name="restart-timer",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="decimal64",
is_config=True,
)
max_prefixes = __builtin__.property(_get_max_prefixes, _set_max_prefixes)
prevent_teardown = __builtin__.property(
_get_prevent_teardown, _set_prevent_teardown
)
shutdown_threshold_pct = __builtin__.property(
_get_shutdown_threshold_pct, _set_shutdown_threshold_pct
)
restart_timer = __builtin__.property(_get_restart_timer, _set_restart_timer)
_pyangbind_elements = OrderedDict(
[
("max_prefixes", max_prefixes),
("prevent_teardown", prevent_teardown),
("shutdown_threshold_pct", shutdown_threshold_pct),
("restart_timer", restart_timer),
]
)
| napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/bgp/peer_groups/peer_group/afi_safis/afi_safi/l2vpn_evpn/prefix_limit/config/__init__.py | Python | apache-2.0 | 38,585 |
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/is-reachability/neighbors/neighbors/delay-metric/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State parameters of delay-metric.
"""
__slots__ = ("_path_helper", "_extmethods", "__metric", "__flags")
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__metric = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["1..63"]},
),
is_leaf=True,
yang_name="metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:narrow-metric",
is_config=False,
)
self.__flags = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"INTERNAL": {}, "UNSUPPORTED": {}},
)
),
is_leaf=False,
yang_name="flags",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="isis-metric-flags",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"is-reachability",
"neighbors",
"neighbors",
"delay-metric",
"state",
]
def _get_metric(self):
"""
Getter method for metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/is_reachability/neighbors/neighbors/delay_metric/state/metric (oc-isis-types:narrow-metric)
YANG Description: ISIS delay metric value. This metric measures the transit delay of
the associated circuit. It is an optional metric, which if assigned
to a circuit shall have a positive integral value. Higher values
indicate a longer transit delay.
"""
return self.__metric
def _set_metric(self, v, load=False):
"""
Setter method for metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/is_reachability/neighbors/neighbors/delay_metric/state/metric (oc-isis-types:narrow-metric)
If this variable is read-only (config: false) in the
source YANG file, then _set_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_metric() directly.
YANG Description: ISIS delay metric value. This metric measures the transit delay of
the associated circuit. It is an optional metric, which if assigned
to a circuit shall have a positive integral value. Higher values
indicate a longer transit delay.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["1..63"]},
),
is_leaf=True,
yang_name="metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:narrow-metric",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """metric must be of a type compatible with oc-isis-types:narrow-metric""",
"defined-type": "oc-isis-types:narrow-metric",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['1..63']}), is_leaf=True, yang_name="metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-isis-types:narrow-metric', is_config=False)""",
}
)
self.__metric = t
if hasattr(self, "_set"):
self._set()
def _unset_metric(self):
self.__metric = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["1..63"]},
),
is_leaf=True,
yang_name="metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:narrow-metric",
is_config=False,
)
def _get_flags(self):
"""
Getter method for flags, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/is_reachability/neighbors/neighbors/delay_metric/state/flags (isis-metric-flags)
YANG Description: ISIS Delay Metric Flags.
"""
return self.__flags
def _set_flags(self, v, load=False):
"""
Setter method for flags, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/is_reachability/neighbors/neighbors/delay_metric/state/flags (isis-metric-flags)
If this variable is read-only (config: false) in the
source YANG file, then _set_flags is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_flags() directly.
YANG Description: ISIS Delay Metric Flags.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"INTERNAL": {}, "UNSUPPORTED": {}},
)
),
is_leaf=False,
yang_name="flags",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="isis-metric-flags",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """flags must be of a type compatible with isis-metric-flags""",
"defined-type": "openconfig-network-instance:isis-metric-flags",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'INTERNAL': {}, 'UNSUPPORTED': {}},)), is_leaf=False, yang_name="flags", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='isis-metric-flags', is_config=False)""",
}
)
self.__flags = t
if hasattr(self, "_set"):
self._set()
def _unset_flags(self):
self.__flags = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"INTERNAL": {}, "UNSUPPORTED": {}},
)
),
is_leaf=False,
yang_name="flags",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="isis-metric-flags",
is_config=False,
)
metric = __builtin__.property(_get_metric)
flags = __builtin__.property(_get_flags)
_pyangbind_elements = OrderedDict([("metric", metric), ("flags", flags)])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/is-reachability/neighbors/neighbors/delay-metric/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State parameters of delay-metric.
"""
__slots__ = ("_path_helper", "_extmethods", "__metric", "__flags")
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__metric = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["1..63"]},
),
is_leaf=True,
yang_name="metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:narrow-metric",
is_config=False,
)
self.__flags = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"INTERNAL": {}, "UNSUPPORTED": {}},
)
),
is_leaf=False,
yang_name="flags",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="isis-metric-flags",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"is-reachability",
"neighbors",
"neighbors",
"delay-metric",
"state",
]
def _get_metric(self):
"""
Getter method for metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/is_reachability/neighbors/neighbors/delay_metric/state/metric (oc-isis-types:narrow-metric)
YANG Description: ISIS delay metric value. This metric measures the transit delay of
the associated circuit. It is an optional metric, which if assigned
to a circuit shall have a positive integral value. Higher values
indicate a longer transit delay.
"""
return self.__metric
def _set_metric(self, v, load=False):
"""
Setter method for metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/is_reachability/neighbors/neighbors/delay_metric/state/metric (oc-isis-types:narrow-metric)
If this variable is read-only (config: false) in the
source YANG file, then _set_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_metric() directly.
YANG Description: ISIS delay metric value. This metric measures the transit delay of
the associated circuit. It is an optional metric, which if assigned
to a circuit shall have a positive integral value. Higher values
indicate a longer transit delay.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..255"]},
int_size=8,
),
restriction_dict={"range": ["1..63"]},
),
is_leaf=True,
yang_name="metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:narrow-metric",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """metric must be of a type compatible with oc-isis-types:narrow-metric""",
"defined-type": "oc-isis-types:narrow-metric",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': ['1..63']}), is_leaf=True, yang_name="metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-isis-types:narrow-metric', is_config=False)""",
}
)
self.__metric = t
if hasattr(self, "_set"):
self._set()
def _unset_metric(self):
self.__metric = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
restriction_dict={"range": ["1..63"]},
),
is_leaf=True,
yang_name="metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:narrow-metric",
is_config=False,
)
def _get_flags(self):
"""
Getter method for flags, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/is_reachability/neighbors/neighbors/delay_metric/state/flags (isis-metric-flags)
YANG Description: ISIS Delay Metric Flags.
"""
return self.__flags
def _set_flags(self, v, load=False):
"""
Setter method for flags, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/is_reachability/neighbors/neighbors/delay_metric/state/flags (isis-metric-flags)
If this variable is read-only (config: false) in the
source YANG file, then _set_flags is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_flags() directly.
YANG Description: ISIS Delay Metric Flags.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"INTERNAL": {}, "UNSUPPORTED": {}},
)
),
is_leaf=False,
yang_name="flags",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="isis-metric-flags",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """flags must be of a type compatible with isis-metric-flags""",
"defined-type": "openconfig-network-instance:isis-metric-flags",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'INTERNAL': {}, 'UNSUPPORTED': {}},)), is_leaf=False, yang_name="flags", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='isis-metric-flags', is_config=False)""",
}
)
self.__flags = t
if hasattr(self, "_set"):
self._set()
def _unset_flags(self):
self.__flags = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"INTERNAL": {}, "UNSUPPORTED": {}},
)
),
is_leaf=False,
yang_name="flags",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="isis-metric-flags",
is_config=False,
)
metric = __builtin__.property(_get_metric)
flags = __builtin__.property(_get_flags)
_pyangbind_elements = OrderedDict([("metric", metric), ("flags", flags)])
| napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/is_reachability/neighbors/neighbors_/delay_metric/state/__init__.py | Python | apache-2.0 | 23,483 |
from .helper import *
import open_cp.gui.predictors.sepp2 as sepp2
import open_cp.gui.predictors.predictor
#import datetime
@pytest.fixture
def analysis_model2(analysis_model):
analysis_model.time_range = (datetime.datetime(2017,5,21,12,30),
datetime.datetime(2017,5,21,13,30), None, None)
return analysis_model
@mock.patch("open_cp.seppexp")
def test_SEPP(seppmock, model, project_task, analysis_model2, grid_task):
provider = sepp2.SEPP(model)
assert provider.name == "Grid based SEPP"
assert provider.settings_string is None
standard_calls(provider, project_task, analysis_model2, grid_task)
def test_serialise(model, project_task, analysis_model2, grid_task):
serialise( sepp2.SEPP(model) )
def test_no_data(model, project_task, analysis_model, grid_task):
analysis_model.time_range = (datetime.datetime(2017,5,20,12,30),
datetime.datetime(2017,5,20,13,30), None, None)
provider = sepp2.SEPP(model)
with pytest.raises(open_cp.gui.predictors.predictor.PredictionError):
standard_calls(provider, project_task, analysis_model, grid_task)
@mock.patch("open_cp.seppexp")
def test_training_usage(seppmock, model, project_task, analysis_model2, grid_task):
provider = sepp2.SEPP(model)
subtask = standard_calls(provider, project_task, analysis_model2, grid_task)
seppmock.SEPPTrainer.assert_called_with(grid=grid_task.return_value)
trainer_mock = seppmock.SEPPTrainer.return_value
np.testing.assert_allclose(trainer_mock.data.xcoords, [0, 10])
np.testing.assert_allclose(trainer_mock.data.ycoords, [10, 20])
time_diffs = ( (trainer_mock.data.timestamps -
[np.datetime64("2017-05-21T12:30"), np.datetime64("2017-05-21T13:00")])
/ np.timedelta64(1,"ms") )
np.testing.assert_allclose(time_diffs, [0,0])
trainer_mock.train.assert_called_with(iterations=40, use_corrected=True)
pred = trainer_mock.train.return_value
np.testing.assert_allclose(pred.data.xcoords, [0, 10, 20])
np.testing.assert_allclose(pred.data.ycoords, [10, 20, 0])
train_date = datetime.datetime(2017,5,22,5,35)
prediction = subtask(train_date)
assert prediction == pred.predict.return_value
pred.predict.assert_called_with(train_date)
| QuantCrimAtLeeds/PredictCode | tests/gui/predictors/sepp2_test.py | Python | artistic-2.0 | 2,313 |
# TempConv.py
# Celcius to Fahreinheit
def Fahreinheit(temp):
temp = float(temp)
temp = (temp*9/5)+32
return temp
# Fahreinheit to Celcius
def Celcius(temp):
temp = float(temp)
temp = (temp-32)*5/9
return temp
| megatharun/basic-python-for-researcher | TempConv.py | Python | artistic-2.0 | 243 |
import os
import logging
import claripy
from ..errors import (
AngrSimOSError,
TracerEnvironmentError,
SimSegfaultException,
SimUnsupportedError,
SimZeroDivisionException,
)
from .. import sim_options as o
from ..tablespecs import StringTableSpec
from ..procedures import SIM_LIBRARIES as L
from .simos import SimOS
_l = logging.getLogger('angr.simos.windows')
class SimWindows(SimOS):
"""
Environemnt for the Windows Win32 subsystem. Does not support syscalls currently.
"""
def __init__(self, project):
super(SimWindows, self).__init__(project, name='Win32')
self._exception_handler = None
self.fmode_ptr = None
self.commode_ptr = None
self.acmdln_ptr = None
self.wcmdln_ptr = None
def configure_project(self):
super(SimWindows, self).configure_project()
# here are some symbols which we MUST hook, regardless of what the user wants
self._weak_hook_symbol('GetProcAddress', L['kernel32.dll'].get('GetProcAddress', self.arch))
self._weak_hook_symbol('LoadLibraryA', L['kernel32.dll'].get('LoadLibraryA', self.arch))
self._weak_hook_symbol('LoadLibraryExW', L['kernel32.dll'].get('LoadLibraryExW', self.arch))
self._exception_handler = self._find_or_make('KiUserExceptionDispatcher')
self.project.hook(self._exception_handler,
L['ntdll.dll'].get('KiUserExceptionDispatcher', self.arch),
replace=True)
self.fmode_ptr = self._find_or_make('_fmode')
self.commode_ptr = self._find_or_make('_commode')
self.acmdln_ptr = self._find_or_make('_acmdln')
self.wcmdln_ptr = self._find_or_make('_wcmdln')
def _find_or_make(self, name):
sym = self.project.loader.find_symbol(name)
if sym is None:
return self.project.loader.extern_object.get_pseudo_addr(name)
else:
return sym.rebased_addr
# pylint: disable=arguments-differ
def state_entry(self, args=None, env=None, argc=None, **kwargs):
state = super(SimWindows, self).state_entry(**kwargs)
if args is None: args = []
if env is None: env = {}
# Prepare argc
if argc is None:
argc = claripy.BVV(len(args), state.arch.bits)
elif type(argc) in (int, long): # pylint: disable=unidiomatic-typecheck
argc = claripy.BVV(argc, state.arch.bits)
# Make string table for args and env
table = StringTableSpec()
table.append_args(args)
table.append_env(env)
# calculate full command line, since this is windows and that's how everything works
cmdline = claripy.BVV(0, 0)
for arg in args:
if cmdline.length != 0:
cmdline = cmdline.concat(claripy.BVV(' '))
if type(arg) is str:
if '"' in arg or '\0' in arg:
raise AngrSimOSError("Can't handle windows args with quotes or nulls in them")
arg = claripy.BVV(arg)
elif isinstance(arg, claripy.ast.BV):
for byte in arg.chop(8):
state.solver.add(byte != claripy.BVV('"'))
state.solver.add(byte != claripy.BVV(0, 8))
else:
raise TypeError("Argument must be str or bitvector")
cmdline = cmdline.concat(claripy.BVV('"'), arg, claripy.BVV('"'))
cmdline = cmdline.concat(claripy.BVV(0, 8))
wcmdline = claripy.Concat(*(x.concat(0, 8) for x in cmdline.chop(8)))
if not state.satisfiable():
raise AngrSimOSError("Can't handle windows args with quotes or nulls in them")
# Dump the table onto the stack, calculate pointers to args, env
stack_ptr = state.regs.sp
stack_ptr -= 16
state.memory.store(stack_ptr, claripy.BVV(0, 8*16))
stack_ptr -= cmdline.length / 8
state.memory.store(stack_ptr, cmdline)
state.mem[self.acmdln_ptr].long = stack_ptr
stack_ptr -= wcmdline.length / 8
state.memory.store(stack_ptr, wcmdline)
state.mem[self.wcmdln_ptr].long = stack_ptr
argv = table.dump(state, stack_ptr)
envp = argv + ((len(args) + 1) * state.arch.bytes)
# Put argc on stack and fix the stack pointer
newsp = argv - state.arch.bytes
state.memory.store(newsp, argc, endness=state.arch.memory_endness)
state.regs.sp = newsp
# store argc argv envp in the posix plugin
state.posix.argv = argv
state.posix.argc = argc
state.posix.environ = envp
state.regs.sp = state.regs.sp - 0x80 # give us some stack space to work with
# fake return address from entry point
return_addr = self.return_deadend
kernel32 = self.project.loader.shared_objects.get('kernel32.dll', None)
if kernel32:
# some programs will use the return address from start to find the kernel32 base
return_addr = kernel32.get_symbol('ExitProcess').rebased_addr
if state.arch.name == 'X86':
state.mem[state.regs.sp].dword = return_addr
# first argument appears to be PEB
tib_addr = state.regs.fs.concat(state.solver.BVV(0, 16))
peb_addr = state.mem[tib_addr + 0x30].dword.resolved
state.mem[state.regs.sp + 4].dword = peb_addr
return state
def state_blank(self, **kwargs):
if self.project.loader.main_object.supports_nx:
add_options = kwargs.get('add_options', set())
add_options.add(o.ENABLE_NX)
kwargs['add_options'] = add_options
state = super(SimWindows, self).state_blank(**kwargs)
# yikes!!!
fun_stuff_addr = state.libc.mmap_base
if fun_stuff_addr & 0xffff != 0:
fun_stuff_addr += 0x10000 - (fun_stuff_addr & 0xffff)
state.memory.map_region(fun_stuff_addr, 0x2000, claripy.BVV(3, 3))
TIB_addr = fun_stuff_addr
PEB_addr = fun_stuff_addr + 0x1000
if state.arch.name == 'X86':
LDR_addr = fun_stuff_addr + 0x2000
state.mem[TIB_addr + 0].dword = -1 # Initial SEH frame
state.mem[TIB_addr + 4].dword = state.regs.sp # stack base (high addr)
state.mem[TIB_addr + 8].dword = state.regs.sp - 0x100000 # stack limit (low addr)
state.mem[TIB_addr + 0x18].dword = TIB_addr # myself!
state.mem[TIB_addr + 0x24].dword = 0xbad76ead # thread id
if self.project.loader.tls_object is not None:
state.mem[TIB_addr + 0x2c].dword = self.project.loader.tls_object.user_thread_pointer # tls array pointer
state.mem[TIB_addr + 0x30].dword = PEB_addr # PEB addr, of course
state.regs.fs = TIB_addr >> 16
state.mem[PEB_addr + 0xc].dword = LDR_addr
# OKAY IT'S TIME TO SUFFER
# http://sandsprite.com/CodeStuff/Understanding_the_Peb_Loader_Data_List.html
THUNK_SIZE = 0x100
num_pe_objects = len(self.project.loader.all_pe_objects)
thunk_alloc_size = THUNK_SIZE * (num_pe_objects + 1)
string_alloc_size = sum(len(obj.binary)*2 + 2 for obj in self.project.loader.all_pe_objects)
total_alloc_size = thunk_alloc_size + string_alloc_size
if total_alloc_size & 0xfff != 0:
total_alloc_size += 0x1000 - (total_alloc_size & 0xfff)
state.memory.map_region(LDR_addr, total_alloc_size, claripy.BVV(3, 3))
state.libc.mmap_base = LDR_addr + total_alloc_size
string_area = LDR_addr + thunk_alloc_size
for i, obj in enumerate(self.project.loader.all_pe_objects):
# Create a LDR_MODULE, we'll handle the links later...
obj.module_id = i+1 # HACK HACK HACK HACK
addr = LDR_addr + (i+1) * THUNK_SIZE
state.mem[addr+0x18].dword = obj.mapped_base
state.mem[addr+0x1C].dword = obj.entry
# Allocate some space from the same region to store the paths
path = obj.binary # we're in trouble if this is None
string_size = len(path) * 2
tail_size = len(os.path.basename(path)) * 2
state.mem[addr+0x24].short = string_size
state.mem[addr+0x26].short = string_size
state.mem[addr+0x28].dword = string_area
state.mem[addr+0x2C].short = tail_size
state.mem[addr+0x2E].short = tail_size
state.mem[addr+0x30].dword = string_area + string_size - tail_size
for j, c in enumerate(path):
# if this segfaults, increase the allocation size
state.mem[string_area + j*2].short = ord(c)
state.mem[string_area + string_size].short = 0
string_area += string_size + 2
# handle the links. we construct a python list in the correct order for each, and then, uh,
mem_order = sorted(self.project.loader.all_pe_objects, key=lambda x: x.mapped_base)
init_order = []
partially_loaded = set()
def fuck_load(x):
if x.provides in partially_loaded:
return
partially_loaded.add(x.provides)
for dep in x.deps:
if dep in self.project.loader.shared_objects:
depo = self.project.loader.shared_objects[dep]
fuck_load(depo)
if depo not in init_order:
init_order.append(depo)
fuck_load(self.project.loader.main_object)
load_order = [self.project.loader.main_object] + init_order
def link(a, b):
state.mem[a].dword = b
state.mem[b+4].dword = a
# I have genuinely never felt so dead in my life as I feel writing this code
def link_list(mods, offset):
if mods:
addr_a = LDR_addr + 12
addr_b = LDR_addr + THUNK_SIZE * mods[0].module_id
link(addr_a + offset, addr_b + offset)
for mod_a, mod_b in zip(mods[:-1], mods[1:]):
addr_a = LDR_addr + THUNK_SIZE * mod_a.module_id
addr_b = LDR_addr + THUNK_SIZE * mod_b.module_id
link(addr_a + offset, addr_b + offset)
addr_a = LDR_addr + THUNK_SIZE * mods[-1].module_id
addr_b = LDR_addr + 12
link(addr_a + offset, addr_b + offset)
else:
link(LDR_addr + 12, LDR_addr + 12)
_l.debug("Load order: %s", load_order)
_l.debug("In-memory order: %s", mem_order)
_l.debug("Initialization order: %s", init_order)
link_list(load_order, 0)
link_list(mem_order, 8)
link_list(init_order, 16)
return state
def state_tracer(self, input_content=None, magic_content=None, preconstrain_input=True,
preconstrain_flag=True, constrained_addrs=None, **kwargs):
raise TracerEnvironmentError("Tracer currently only supports CGC and Unix.")
def handle_exception(self, successors, engine, exc_type, exc_value, exc_traceback):
# don't bother handling non-vex exceptions
if engine is not self.project.factory.default_engine:
raise exc_type, exc_value, exc_traceback
# don't bother handling symbolic-address exceptions
if exc_type is SimSegfaultException:
if exc_value.original_addr is not None and exc_value.original_addr.symbolic:
raise exc_type, exc_value, exc_traceback
_l.debug("Handling exception from block at %#x: %r", successors.addr, exc_value)
# If our state was just living out the rest of an unsatisfiable guard, discard it
# it's possible this is incomplete because of implicit constraints added by memory or ccalls...
if not successors.initial_state.satisfiable(extra_constraints=(exc_value.guard,)):
_l.debug("... NOT handling unreachable exception")
successors.processed = True
return
# we'll need to wind up to the exception to get the correct state to resume from...
# exc will be a SimError, for sure
# executed_instruction_count is incremented when we see an imark BUT it starts at -1, so this is the correct val
num_inst = exc_value.executed_instruction_count
if num_inst >= 1:
# scary...
try:
r = self.project.factory.default_engine.process(successors.initial_state, num_inst=num_inst)
if len(r.flat_successors) != 1:
if exc_value.guard.is_true():
_l.error("Got %d successors while re-executing %d instructions at %#x "
"for unconditional exception windup", num_inst, successors.initial_state.addr)
raise exc_type, exc_value, exc_traceback
# Try to figure out which successor is ours...
_, _, canon_guard = exc_value.guard.canonicalize()
for possible_succ in r.flat_successors:
_, _, possible_guard = possible_succ.recent_events[-1].constraint.canonicalize()
if canon_guard is possible_guard:
exc_state = possible_succ
break
else:
_l.error("None of the %d successors while re-executing %d instructions at %#x "
"for conditional exception windup matched guard",
num_inst, successors.initial_state.addr
)
raise exc_type, exc_value, exc_traceback
else:
exc_state = r.flat_successors[0]
except:
# lol no
_l.error("Got some weirdo error while re-executing %d instructions at %#x "
"for exception windup", num_inst, successors.initial_state.addr)
raise exc_type, exc_value, exc_traceback
else:
# duplicate the history-cycle code here...
exc_state = successors.initial_state.copy()
exc_state.register_plugin('history', successors.initial_state.history.make_child())
exc_state.history.recent_bbl_addrs.append(successors.initial_state.addr)
_l.debug("... wound up state to %#x", exc_state.addr)
# first check that we actually have an exception handler
# we check is_true since if it's symbolic this is exploitable maybe?
tib_addr = exc_state.regs._fs.concat(exc_state.solver.BVV(0, 16))
if exc_state.solver.is_true(exc_state.mem[tib_addr].long.resolved == -1):
_l.debug("... no handlers register")
exc_value.args = ('Unhandled exception: %r' % exc_value,)
raise exc_type, exc_value, exc_traceback
# catch nested exceptions here with magic value
if exc_state.solver.is_true(exc_state.mem[tib_addr].long.resolved == 0xBADFACE):
_l.debug("... nested exception")
exc_value.args = ('Unhandled exception: %r' % exc_value,)
raise exc_type, exc_value, exc_traceback
# serialize the thread context and set up the exception record...
self._dump_regs(exc_state, exc_state.regs._esp - 0x300)
exc_state.regs.esp -= 0x400
record = exc_state.regs._esp + 0x20
context = exc_state.regs._esp + 0x100
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa363082(v=vs.85).aspx
exc_state.mem[record + 0x4].uint32_t = 0 # flags = continuable
exc_state.mem[record + 0x8].uint32_t = 0 # FUCK chained exceptions
exc_state.mem[record + 0xc].uint32_t = exc_state.regs._eip # exceptionaddress
for i in xrange(16): # zero out the arg count and args array
exc_state.mem[record + 0x10 + 4*i].uint32_t = 0
# TOTAL SIZE: 0x50
# the rest of the parameters have to be set per-exception type
# https://msdn.microsoft.com/en-us/library/cc704588.aspx
if exc_type is SimSegfaultException:
exc_state.mem[record].uint32_t = 0xc0000005 # STATUS_ACCESS_VIOLATION
exc_state.mem[record + 0x10].uint32_t = 2
exc_state.mem[record + 0x14].uint32_t = 1 if exc_value.reason.startswith('write-') else 0
exc_state.mem[record + 0x18].uint32_t = exc_value.addr
elif exc_type is SimZeroDivisionException:
exc_state.mem[record].uint32_t = 0xC0000094 # STATUS_INTEGER_DIVIDE_BY_ZERO
exc_state.mem[record + 0x10].uint32_t = 0
# set up parameters to userland dispatcher
exc_state.mem[exc_state.regs._esp].uint32_t = 0xBADC0DE # god help us if we return from this func
exc_state.mem[exc_state.regs._esp + 4].uint32_t = record
exc_state.mem[exc_state.regs._esp + 8].uint32_t = context
# let's go let's go!
# we want to use a true guard here. if it's not true, then it's already been added in windup.
successors.add_successor(exc_state, self._exception_handler, exc_state.solver.true, 'Ijk_Exception')
successors.processed = True
# these two methods load and store register state from a struct CONTEXT
# https://www.nirsoft.net/kernel_struct/vista/CONTEXT.html
@staticmethod
def _dump_regs(state, addr):
if state.arch.name != 'X86':
raise SimUnsupportedError("I don't know how to work with struct CONTEXT outside of i386")
# I decline to load and store the floating point/extended registers
state.mem[addr + 0].uint32_t = 0x07 # contextflags = control | integer | segments
# dr0 - dr7 are at 0x4-0x18
# fp state is at 0x1c: 8 ulongs plus a char[80] gives it size 0x70
state.mem[addr + 0x8c].uint32_t = state.regs.gs.concat(state.solver.BVV(0, 16))
state.mem[addr + 0x90].uint32_t = state.regs.fs.concat(state.solver.BVV(0, 16))
state.mem[addr + 0x94].uint32_t = 0 # es
state.mem[addr + 0x98].uint32_t = 0 # ds
state.mem[addr + 0x9c].uint32_t = state.regs.edi
state.mem[addr + 0xa0].uint32_t = state.regs.esi
state.mem[addr + 0xa4].uint32_t = state.regs.ebx
state.mem[addr + 0xa8].uint32_t = state.regs.edx
state.mem[addr + 0xac].uint32_t = state.regs.ecx
state.mem[addr + 0xb0].uint32_t = state.regs.eax
state.mem[addr + 0xb4].uint32_t = state.regs.ebp
state.mem[addr + 0xb8].uint32_t = state.regs.eip
state.mem[addr + 0xbc].uint32_t = 0 # cs
state.mem[addr + 0xc0].uint32_t = state.regs.eflags
state.mem[addr + 0xc4].uint32_t = state.regs.esp
state.mem[addr + 0xc8].uint32_t = 0 # ss
# and then 512 bytes of extended registers
# TOTAL SIZE: 0x2cc
@staticmethod
def _load_regs(state, addr):
if state.arch.name != 'X86':
raise SimUnsupportedError("I don't know how to work with struct CONTEXT outside of i386")
# TODO: check contextflags to see what parts to deserialize
state.regs.gs = state.mem[addr + 0x8c].uint32_t.resolved[31:16]
state.regs.fs = state.mem[addr + 0x90].uint32_t.resolved[31:16]
state.regs.edi = state.mem[addr + 0x9c].uint32_t.resolved
state.regs.esi = state.mem[addr + 0xa0].uint32_t.resolved
state.regs.ebx = state.mem[addr + 0xa4].uint32_t.resolved
state.regs.edx = state.mem[addr + 0xa8].uint32_t.resolved
state.regs.ecx = state.mem[addr + 0xac].uint32_t.resolved
state.regs.eax = state.mem[addr + 0xb0].uint32_t.resolved
state.regs.ebp = state.mem[addr + 0xb4].uint32_t.resolved
state.regs.eip = state.mem[addr + 0xb8].uint32_t.resolved
state.regs.eflags = state.mem[addr + 0xc0].uint32_t.resolved
state.regs.esp = state.mem[addr + 0xc4].uint32_t.resolved
| f-prettyland/angr | angr/simos/windows.py | Python | bsd-2-clause | 20,273 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sleeptomusicweb.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| beddit/sleep-musicalization-web | manage.py | Python | bsd-2-clause | 258 |
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from .aws import Action as BaseAction
from .aws import BaseARN
service_name = "AWS Budget Service"
prefix = "budgets"
class Action(BaseAction):
def __init__(self, action: str = None) -> None:
super().__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource: str = "", region: str = "", account: str = "") -> None:
super().__init__(
service=prefix, resource=resource, region=region, account=account
)
CreateBudgetAction = Action("CreateBudgetAction")
DeleteBudgetAction = Action("DeleteBudgetAction")
DescribeBudgetAction = Action("DescribeBudgetAction")
DescribeBudgetActionHistories = Action("DescribeBudgetActionHistories")
DescribeBudgetActionsForAccount = Action("DescribeBudgetActionsForAccount")
DescribeBudgetActionsForBudget = Action("DescribeBudgetActionsForBudget")
ExecuteBudgetAction = Action("ExecuteBudgetAction")
ModifyBudget = Action("ModifyBudget")
UpdateBudgetAction = Action("UpdateBudgetAction")
ViewBudget = Action("ViewBudget")
| cloudtools/awacs | awacs/budgets.py | Python | bsd-2-clause | 1,133 |
from tests import tests
def test_toggle():
temporary = tests.toggled_seats
assert temporary == [[1, 1, 1], [1, 1, 1], [1, 1, 1]] | kevindiltinero/seass3 | tests/test_toggle.py | Python | bsd-2-clause | 137 |
# Copyright (c) 2013-2021, Freja Nordsiek
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os.path
import random
import tempfile
import h5py
import pytest
import hdf5storage
from asserts import assert_equal
from make_randoms import random_numpy, random_numpy_shape, \
max_array_axis_length, dtypes, random_name
random.seed()
@pytest.mark.parametrize(
'compression,shuffle,fletcher32,gzip_level',
[(compression, shuffle, fletcher32, level)
for compression in ('gzip', 'lzf')
for shuffle in (True, False)
for fletcher32 in (True, False)
for level in range(10)])
def test_read_filtered_data(compression, shuffle, fletcher32,
gzip_level):
# Make the filters dict.
filts = {'compression': compression,
'shuffle': shuffle,
'fletcher32': fletcher32}
if compression == 'gzip':
filts['compression_opts'] = gzip_level
# Make some random data.
dims = random.randint(1, 4)
data = random_numpy(shape=random_numpy_shape(dims,
max_array_axis_length),
dtype=random.choice(tuple(
set(dtypes) - set(['U']))))
# Make a random name.
name = random_name()
# Write the data to the file with the given name with the provided
# filters and read it back.
with tempfile.TemporaryDirectory() as folder:
filename = os.path.join(folder, 'data.h5')
with h5py.File(filename, mode='w') as f:
f.create_dataset(name, data=data, chunks=True, **filts)
out = hdf5storage.read(path=name, filename=filename,
matlab_compatible=False)
# Compare
assert_equal(out, data)
@pytest.mark.parametrize(
'compression,shuffle,fletcher32,gzip_level',
[(compression, shuffle, fletcher32, level)
for compression in ('gzip', 'lzf')
for shuffle in (True, False)
for fletcher32 in (True, False)
for level in range(10)])
def test_write_filtered_data(compression, shuffle, fletcher32,
gzip_level):
# Make some random data. The dtype must be restricted so that it can
# be read back reliably.
dims = random.randint(1, 4)
dts = tuple(set(dtypes) - set(['U', 'S', 'bool', 'complex64', \
'complex128']))
data = random_numpy(shape=random_numpy_shape(dims,
max_array_axis_length),
dtype=random.choice(dts))
# Make a random name.
name = random_name()
# Write the data to the file with the given name with the provided
# filters and read it back.
with tempfile.TemporaryDirectory() as folder:
filename = os.path.join(folder, 'data.h5')
hdf5storage.write(data, path=name, filename=filename,
store_python_metadata=False,
matlab_compatible=False,
compress=True, compress_size_threshold=0,
compression_algorithm=compression,
gzip_compression_level=gzip_level,
shuffle_filter=shuffle,
compressed_fletcher32_filter=fletcher32)
with h5py.File(filename, mode='r') as f:
d = f[name]
filts = {'fletcher32': d.fletcher32,
'shuffle': d.shuffle,
'compression': d.compression,
'gzip_level': d.compression_opts}
out = d[...]
# Check the filters
assert fletcher32 == filts['fletcher32']
assert shuffle == filts['shuffle']
assert compression == filts['compression']
if compression == 'gzip':
assert gzip_level == filts['gzip_level']
# Compare
assert_equal(out, data)
@pytest.mark.parametrize(
'method,uncompressed_fletcher32_filter,compression,shuffle,'
'fletcher32,gzip_level',
[(method, uf, compression, shuffle, fletcher32, level)
for method in ('compression_disabled', 'data_too_small')
for uf in (True, False)
for compression in ('gzip', 'lzf')
for shuffle in (True, False)
for fletcher32 in (True, False)
for level in range(10)])
def test_uncompressed_write_filtered_data(
method, uncompressed_fletcher32_filter, compression, shuffle,
fletcher32, gzip_level):
# Make the filters dict.
filts = {'compression': compression,
'shuffle': shuffle,
'fletcher32': fletcher32,
'gzip_level': gzip_level}
# Make some random data. The dtype must be restricted so that it can
# be read back reliably.
dims = random.randint(1, 4)
dts = tuple(set(dtypes) - set(['U', 'S', 'bool', 'complex64', \
'complex128']))
data = random_numpy(shape=random_numpy_shape(dims,
max_array_axis_length),
dtype=random.choice(dts))
# Make a random name.
name = random_name()
# Make the options to disable compression by the method specified,
# which is either that it is outright disabled or that the data is
# smaller than the compression threshold.
if method == 'compression_disabled':
opts = {'compress': False, 'compress_size_threshold': 0}
else:
opts = {'compress': True,
'compress_size_threshold': data.nbytes + 1}
# Write the data to the file with the given name with the provided
# filters and read it back.
with tempfile.TemporaryDirectory() as folder:
filename = os.path.join(folder, 'data.h5')
hdf5storage.write(data, path=name, filename=filename, \
store_python_metadata=False, matlab_compatible=False, \
compression_algorithm=filts['compression'], \
gzip_compression_level=filts['gzip_level'], \
shuffle_filter=filts['shuffle'], \
compressed_fletcher32_filter=filts['fletcher32'], \
uncompressed_fletcher32_filter= \
uncompressed_fletcher32_filter, \
**opts)
with h5py.File(filename, mode='r') as f:
d = f[name]
fletcher32 = d.fletcher32
shuffle = d.shuffle
compression = d.compression
gzip_level = d.compression_opts
out = d[...]
# Check the filters
assert compression is None
assert shuffle is False
assert fletcher32 == uncompressed_fletcher32_filter
# Compare
assert_equal(out, data)
| frejanordsiek/hdf5storage | tests/test_hdf5_filters.py | Python | bsd-2-clause | 7,723 |
'''The script reads in a list of genes from an output from DAVID and reads in
expression data from a text file containing corresponding genes and report
an expression level of each gene in DAVID list to standard output.
Both gene expression and DAVID files should be in a comma-delimited format.
'''
import sys
david_file = sys.argv[1]
expr_file = sys.argv[2]
genes = {}
print >> sys.stderr, 'Reading %s...', david_file
for line in open(expr_file):
cols = line.strip().split(',')
geneid = cols[0]
exprs = '\t'.join(cols[1:5])
if geneid not in genes:
genes[geneid] = exprs
else:
raise KeyError('duplicated gene ID')
print >> sys.stderr, 'Reading %s...', expr_file
for line in open(david_file):
geneid = line.strip().split(',')[0]
if geneid in genes:
print '%s\t%s' % (geneid, genes[geneid])
| likit/BioUtils | david2expr.py | Python | bsd-2-clause | 851 |
# -*- coding: utf-8 -*-
# Copyright (c) 2014, 2015, Stephan Rave
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import, division, print_function
import sys
PY2 = sys.version_info.major == 2
try:
from cPickle import dumps, load
except ImportError:
from pickle import dumps, load
import fnmatch
import itertools
import logging
import os
import shutil
import textwrap
import numpy as np
import yaml
logger = logging.getLogger('simdb')
class DataLoader(object):
def __init__(self, filename):
self.filename = filename
def __call__(self):
if PY2:
with open(self.filename, 'rb') as f:
return load(f)
else:
try:
with open(self.filename, 'rb') as f:
return load(f)
except UnicodeDecodeError:
# this happens when loading numpy arrays which have been
# pickled with Python 2
with open(self.filename, 'rb') as f:
return load(f, encoding='latin-1')
class Dataset(object):
class DataDict(object):
def __getitem__(self, name):
return getattr(self, name)
def __setitem__(self, name, value):
setattr(self, name, value)
def __iter__(self):
return iter(self.__dict__)
@property
def dict(self):
return self.__dict__
def __repr__(self):
return ', '.join(str(k) + ': ' + str(v) for k, v in sorted(self.dict.items()))
def __init__(self, path):
self.path = path
with open(os.path.join(path, 'INFO'), 'rt') as f:
info = yaml.load(f)
self._params = info.pop('parameters')
self.p = self.DataDict()
self.p.__dict__ = self._params
self.tags = frozenset(info.pop('tags'))
self.__dict__.update(info)
self.failed = False
if os.path.exists(os.path.join(path, 'FINISHED')):
with open(os.path.join(path, 'FINISHED'), 'rt') as f:
self.finished = yaml.load(f)
else:
self.finished = False
if os.path.exists(os.path.join(path, 'FAILED')):
with open(os.path.join(path, 'FAILED'), 'rt') as f:
self.failed = yaml.load(f)
self._locked = True
self._deleted = False
@property
def name(self):
return os.path.basename(self.path)
@property
def d(self):
if self._deleted:
raise ValueError('Dataset has been deleted')
if hasattr(self, '_d'):
return self._d
if not self.finished:
if self.failed:
logger.warn('Loading data of failed dataset {}.'.format(self.name))
else:
if os.path.exists(os.path.join(self.path, 'DATA')):
logger.warn('Loading data of unfinished dataset {}.'.format(self.name))
else:
raise ValueError('No data has been written to unfinished dataset {}.'.format(self.name))
if PY2:
with open(os.path.join(self.path, 'DATA'), 'rb') as f:
self._data = load(f)
else:
try:
with open(os.path.join(self.path, 'DATA'), 'rb') as f:
self._data = load(f)
except UnicodeDecodeError:
# this happens when loading numpy arrays which have been
# pickled with Python 2
with open(os.path.join(self.path, 'DATA'), 'rb') as f:
self._data = load(f, encoding='latin-1')
for v in self._data.values():
if isinstance(v, DataLoader):
v.filename = os.path.join(self.path, v.filename)
elif isinstance(v, list):
for vv in v:
if isinstance(vv, DataLoader):
vv.filename = os.path.join(self.path, vv.filename)
self._d = self.DataDict()
self._d.__dict__ = self._data
return self._d
@property
def t(self):
if self._deleted:
raise ValueError('Dataset has been deleted')
if hasattr(self, '_t'):
return self._t
if not self.finished:
if self.failed:
logger.warn('Loading data of failed dataset {}.'.format(self.name))
else:
if os.path.exists(os.path.join(self.path, 'TIMES')):
logger.warn('Loading data of unfinished dataset {}.'.format(self.name))
else:
raise ValueError('No data has been written to unfinished dataset {}.'.format(self.name))
with open(os.path.join(self.path, 'TIMES'), 'rt') as f:
self._times = {k: np.array(v)
for k, v in yaml.load(f)['duration'].items()}
self._t = self.DataDict()
self._t.__dict__ = self._times
return self._t
@property
def host(self):
return self.name.split('-')[-1]
def unload_data(self):
if hasattr(self, '_d'):
del self._d
del self._data
if hasattr(self, '_t'):
del self._t
del self._times
def delete(self):
if self._deleted:
return
shutil.rmtree(self.path)
self._deleted = True
def tag(self, *args):
if self._deleted:
raise ValueError('Dataset has been deleted')
self.__dict__['tags'] = frozenset(self.tags.union(args))
self._update_info()
def untag(self, *args):
if self._deleted:
raise ValueError('Dataset has been deleted')
self.__dict__['tags'] = frozenset(self.tags - set(args))
self._update_info()
def _update_info(self):
data = {k: getattr(self, k) for k in ['experiment', 'started', 'comment', 'protected']}
data['parameters'] = self.p.dict
data['tags'] = list(sorted(self.tags))
with open(os.path.join(self.path, 'INFO'), 'wt') as f:
yaml.dump(data, f)
def __setattr__(self, k, v):
if not hasattr(self, '_locked'):
super(Dataset, self).__setattr__(k, v)
else:
if getattr(self, '_deleted', False):
raise ValueError('Dataset has been deleted')
if k not in ['comment', 'protected'] and not k.startswith('_'):
raise AttributeError('Cannot change attribute ' + k)
if k == 'protected' and not isinstance(v, bool):
raise AttributeError('protected must be bool')
if k == 'comment' and not isinstance(v, str):
raise AttributeError('comment must be string')
super(Dataset, self).__setattr__(k, v)
if not k.startswith('_'):
self._update_info()
def __str__(self):
params = '\n'.join(textwrap.wrap(', '.join('{}={}'.format(k, v) for k, v in sorted(self.p.dict.items())),
initial_indent=' '*10, subsequent_indent=' '*10, width=100))
s = '{} {}'.format(self.name,
'✓' if self.finished else '✗' if self.failed else '?')
if self._deleted:
s += ' ***DELETED***'
s += '\n' + params
return s
__repr__ = __str__
class DatasetCollection(object):
def __init__(self, datasets):
self.datasets = datasets
def dir(self):
print(str(self))
def select(self, *args, **kwargs):
failed = kwargs.pop('failed', None)
def selector(ds):
if failed is not None and bool(ds.failed) != failed:
return False
try:
for k, v in kwargs.items():
if ds.p.dict[k] != v:
return False
return all(f(ds) for f in args)
except:
return False
return DatasetCollection(list(filter(selector, self.datasets)))
def select_unique(self, *args, **kwargs):
ds = self.select(*args, **kwargs)
if len(ds) == 0:
raise ValueError('No matching datasets found!')
elif len(ds) > 1:
raise ValueError('More than one ({}) datasetes found!'.format(len(ds)))
else:
return ds[0]
def select_last(self, *args, **kwargs):
ds = self.select(*args, **kwargs)
if len(ds) == 0:
raise ValueError('No matching datasets found!')
else:
return ds[-1]
def delete(self):
for ds in self.datasets:
ds.delete()
def duplicates(self, params=None):
if params:
params = sorted(params)
keyfunc = lambda ds: (ds.experiment,
list(dumps(ds.p[k], protocol=-1) for k in params))
groups = itertools.groupby(sorted((ds for ds in self.datasets if set(ds.p.dict.keys()) >= set(params)),
key=keyfunc),
key=keyfunc)
else:
keyfunc = lambda ds: (ds.experiment,
list(sorted((k, dumps(v, protocol=-1)) for k, v in ds.p.dict.items())))
groups = itertools.groupby(sorted(self.datasets, key=keyfunc), key=keyfunc)
def get_duplicates(groups):
for k, v in groups:
datasets = list(v)
if len(datasets) > 1:
p = datasets[0].p.dict
info = {'experiment': datasets[0].experiment,
'params': p if not params else {k: p[k] for k in params}}
yield [info, datasets]
class DuplicatesList(list):
def __str__(self):
formatter = lambda i, ds: ('experiment: ' + i['experiment'] + '\n' +
'params: ' +
'\n'.join(textwrap.wrap(', '.join('{}={}'.format(k, v)
for k, v in sorted(i['params'].items())),
initial_indent='',
subsequent_indent=' ' * len('experiment: '),
width=100)) + '\n' +
'count: ' + str(len(ds)) + '\n')
return '\n'.join(formatter(i, ds) for i, ds in self)
def delete_old(self):
for i, datasets in self:
for ds in datasets[:-1]:
ds.delete()
__repr__ = __str__
return DuplicatesList(get_duplicates(groups))
def __getitem__(self, n):
return self.datasets[n]
def __len__(self):
return len(self.datasets)
def __repr__(self):
return 'DatasetCollection([' + ',\n'.join(repr(ds) for ds in self.datasets) + '])'
def __str__(self):
if not self.datasets:
return 'None!'
return '\n\n'.join(map(str, self.datasets))
class SimulationDatabase(object):
def __init__(self, db_path=None):
if not db_path:
db_path = os.environ['SIMDB_PATH']
self.db_path = db_path
if not os.path.exists(db_path):
os.mkdir(db_path)
if not os.path.exists(os.path.join(db_path, 'DATA')):
os.mkdir(os.path.join(db_path, 'DATA'))
self.experiments = sorted(set(s.split('-')[0] for s in os.listdir(os.path.join(db_path, 'DATA'))))
def select(self, pattern, *args, **kwargs):
paths = sorted(os.path.join(self.db_path, 'DATA', fn) for fn in os.listdir(os.path.join(self.db_path, 'DATA'))
if fnmatch.fnmatch(fn.split('-')[0], pattern))
ds = DatasetCollection(list(map(Dataset, paths)))
if args or kwargs:
return ds.select(*args, **kwargs)
else:
return ds
def select_unique(self, pattern, *args, **kwargs):
ds = self.select(pattern, *args, **kwargs)
if len(ds) == 0:
raise ValueError('No matching datasets found!')
elif len(ds) > 1:
raise ValueError('More than one ({}) datasetes found!'.format(len(ds)))
else:
return ds[0]
def select_last(self, pattern, *args, **kwargs):
ds = self.select(pattern, *args, **kwargs)
if len(ds) == 0:
raise ValueError('No matching datasets found!')
else:
return ds[-1]
def delete(self, pattern, *args, **kwargs):
ds = self.select(pattern, *args, **kwargs)
print('Deleting:')
ds.dir()
ds.delete()
def duplicates(self, params=None):
return self.select('*').duplicates(params)
def dir(self):
print(str(self))
def __str__(self):
return str(self.experiments)
def __repr__(self):
return "SimulationDatabase('{}')".format(self.db_path)
| sdrave/simdb | simdb/db.py | Python | bsd-2-clause | 14,333 |
import numpy as np
import ctypes
import numpy.ctypeslib as npct
# For more information see:
# https://scipy-lectures.github.io/advanced/interfacing_with_c/interfacing_with_c.html#id5
numpy2go = npct.load_library("numpy2go", ".")
array_1d_double = npct.ndpointer(dtype=np.double, ndim=1, flags='CONTIGUOUS')
numpy2go.Test.restype = None
numpy2go.Test.argtypes = [array_1d_double, ctypes.c_int]
data = np.array([0.0, 1.0, 2.0])
print("Python says", data)
numpy2go.Test(data, len(data))
| ryanbressler/numpy2go | numpy2go.py | Python | bsd-2-clause | 496 |
import logging
import pandas as pd
import matplotlib.pyplot as plt
def running_total_comparison(df1, window=15):
fig_size = [12,12]
xlim = [0,365]
ylim = [0,max(df1.Pages)]
years = df1.Year.unique()[-window:].tolist()
y = years.pop(0)
_df = df1.loc[df1.Year == y]
ax = _df.plot("Day", "Pages", figsize=fig_size, xlim=xlim, ylim=ylim, label=y)
for y in years:
_df = df1.loc[df1.Year == y]
ax = _df.plot("Day", "Pages", figsize=fig_size, xlim=xlim, ylim=ylim, ax=ax, label=y)
def yearly_comparisons(df, current_year=2020):
now = df.loc[df.Year == current_year]
fig_size = [12, 6]
ax = df.hist("Pages Read", bins=14, color="darkblue", figsize=fig_size)
plt.axvline(x=int(now["Pages Read"]), color="red")
plt.show()
df.plot.bar(x="Rank", y="Pages Read", width=.95, color="darkblue", figsize=fig_size)
plt.axvline(x=int(now["Rank"]) - 1, color="red")
plt.show()
df.sort_values("Year").plot.bar(x="Year", y="Pages Read", width=.95, color="darkblue", figsize=fig_size)
plt.show() | DrSkippy/php_books_database | tools/bookdbtool/visualizations.py | Python | bsd-2-clause | 1,064 |
"""
@package mi.dataset.driver.optaa_dj.cspp
@file mi-dataset/mi/dataset/driver/optaa_dj/cspp/optaa_dj_cspp_telemetered_driver.py
@author Joe Padula
@brief Telemetered driver for the optaa_dj_cspp instrument
Release notes:
Initial Release
"""
__author__ = 'jpadula'
from mi.dataset.dataset_driver import SimpleDatasetDriver
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.parser.cspp_base import \
DATA_PARTICLE_CLASS_KEY, \
METADATA_PARTICLE_CLASS_KEY
from mi.dataset.parser.optaa_dj_cspp import \
OptaaDjCsppParser, \
OptaaDjCsppMetadataTelemeteredDataParticle, \
OptaaDjCsppInstrumentTelemeteredDataParticle
from mi.core.versioning import version
@version("15.6.1")
def parse(basePythonCodePath, sourceFilePath, particleDataHdlrObj):
"""
This is the method called by Uframe
:param basePythonCodePath This is the file system location of mi-dataset
:param sourceFilePath This is the full path and filename of the file to be parsed
:param particleDataHdlrObj Java Object to consume the output of the parser
:return particleDataHdlrObj
"""
with open(sourceFilePath, 'rU') as stream_handle:
# create an instance of the concrete driver class defined below
driver = OptaaDjCsppTelemeteredDriver(basePythonCodePath, stream_handle, particleDataHdlrObj)
driver.processFileStream()
return particleDataHdlrObj
class OptaaDjCsppTelemeteredDriver(SimpleDatasetDriver):
"""
The optaa_dj_cspp telemetered driver class extends the SimpleDatasetDriver.
"""
def _build_parser(self, stream_handle):
parser_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.optaa_dj_cspp',
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
METADATA_PARTICLE_CLASS_KEY: OptaaDjCsppMetadataTelemeteredDataParticle,
DATA_PARTICLE_CLASS_KEY: OptaaDjCsppInstrumentTelemeteredDataParticle
}
}
parser = OptaaDjCsppParser(parser_config,
stream_handle,
self._exception_callback)
return parser
| JeffRoy/mi-dataset | mi/dataset/driver/optaa_dj/cspp/optaa_dj_cspp_telemetered_driver.py | Python | bsd-2-clause | 2,238 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Run GMAP/GSNAP commands. GMAP/GSNAP manual:
<http://research-pub.gene.com/gmap/src/README>
"""
import os.path as op
import sys
import logging
from jcvi.formats.sam import get_prefix
from jcvi.apps.base import OptionParser, ActionDispatcher, need_update, sh, \
get_abs_path
def main():
actions = (
('index', 'wraps gmap_build'),
('align', 'wraps gsnap'),
('gmap', 'wraps gmap'),
)
p = ActionDispatcher(actions)
p.dispatch(globals())
def check_index(dbfile):
dbfile = get_abs_path(dbfile)
dbdir, filename = op.split(dbfile)
if not dbdir:
dbdir = "."
dbname = filename.rsplit(".", 1)[0]
safile = op.join(dbdir, "{0}/{0}.genomecomp".format(dbname))
if dbname == filename:
dbname = filename + ".db"
if need_update(dbfile, safile):
cmd = "gmap_build -D {0} -d {1} {2}".format(dbdir, dbname, filename)
sh(cmd)
else:
logging.error("`{0}` exists. `gmap_build` already run.".format(safile))
return dbdir, dbname
def index(args):
"""
%prog index database.fasta
`
Wrapper for `gmap_build`. Same interface.
"""
p = OptionParser(index.__doc__)
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
dbfile, = args
check_index(dbfile)
def gmap(args):
"""
%prog gmap database.fasta fastafile
Wrapper for `gmap`.
"""
p = OptionParser(gmap.__doc__)
p.add_option("--cross", default=False, action="store_true",
help="Cross-species alignment")
p.add_option("--npaths", default=0, type="int",
help="Maximum number of paths to show."
" If set to 0, prints two paths if chimera"
" detected, else one.")
p.set_cpus()
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
dbfile, fastafile = args
assert op.exists(dbfile) and op.exists(fastafile)
prefix = get_prefix(fastafile, dbfile)
logfile = prefix + ".log"
gmapfile = prefix + ".gmap.gff3"
if not need_update((dbfile, fastafile), gmapfile):
logging.error("`{0}` exists. `gmap` already run.".format(gmapfile))
else:
dbdir, dbname = check_index(dbfile)
cmd = "gmap -D {0} -d {1}".format(dbdir, dbname)
cmd += " -f 2 --intronlength=100000" # Output format 2
cmd += " -t {0}".format(opts.cpus)
cmd += " --npaths {0}".format(opts.npaths)
if opts.cross:
cmd += " --cross-species"
cmd += " " + fastafile
sh(cmd, outfile=gmapfile, errfile=logfile)
return gmapfile, logfile
def align(args):
"""
%prog align database.fasta read1.fq read2.fq
Wrapper for `gsnap` single-end or paired-end, depending on the number of
args.
"""
from jcvi.formats.fasta import join
from jcvi.formats.fastq import guessoffset
p = OptionParser(align.__doc__)
p.add_option("--join", default=False, action="store_true",
help="Join sequences with padded 50Ns")
p.add_option("--rnaseq", default=False, action="store_true",
help="Input is RNA-seq reads, turn splicing on")
p.add_option("--snp", default=False, action="store_true",
help="Call SNPs after GSNAP")
p.set_home("eddyyeh")
p.set_cpus()
opts, args = p.parse_args(args)
if len(args) == 2:
logging.debug("Single-end alignment")
elif len(args) == 3:
logging.debug("Paired-end alignment")
else:
sys.exit(not p.print_help())
dbfile, readfile = args[0:2]
if opts.join:
dbfile = join([dbfile, "--gapsize=50", "--newid=chr1"])
assert op.exists(dbfile) and op.exists(readfile)
prefix = get_prefix(readfile, dbfile)
logfile = prefix + ".log"
gsnapfile = prefix + ".gsnap"
if not need_update((dbfile, readfile), gsnapfile):
logging.error("`{0}` exists. `gsnap` already run.".format(gsnapfile))
else:
dbdir, dbname = check_index(dbfile)
cmd = "gsnap -D {0} -d {1}".format(dbdir, dbname)
cmd += " -B 5 -m 0.1 -i 2 -n 3" # memory, mismatch, indel penalty, nhits
if opts.rnaseq:
cmd += " -N 1"
cmd += " -t {0}".format(opts.cpus)
cmd += " --gmap-mode none --nofails"
if readfile.endswith(".gz"):
cmd += " --gunzip"
try:
offset = "sanger" if guessoffset([readfile]) == 33 else "illumina"
cmd += " --quality-protocol {0}".format(offset)
except AssertionError:
pass
cmd += " " + " ".join(args[1:])
sh(cmd, outfile=gsnapfile, errfile=logfile)
if opts.snp:
EYHOME = opts.eddyyeh_home
pf = gsnapfile.rsplit(".", 1)[0]
nativefile = pf + ".unique.native"
if need_update(gsnapfile, nativefile):
cmd = op.join(EYHOME, "convert2native.pl")
cmd += " --gsnap {0} -o {1}".format(gsnapfile, nativefile)
cmd += " -proc {0}".format(opts.cpus)
sh(cmd)
return gsnapfile, logfile
if __name__ == '__main__':
main()
| sgordon007/jcvi_062915 | apps/gmap.py | Python | bsd-2-clause | 5,206 |
# coding: utf-8
# Copyright (c) 2012, SciELO <scielo-dev@googlegroups.com>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
import re
from django import forms
from django.utils.translation import ugettext_lazy as _
class ISSNField(forms.CharField):
default_error_messages = {
'invalid': _('Enter a valid ISSN.')
}
regex = r'[0-9]{4}-[0-9]{3}[0-9X]{1}$'
def clean(self, value):
if value is not u'' and value is not None:
result = re.match(self.regex, value)
if result is None:
raise forms.ValidationError(self.error_messages['invalid'])
return value | scieloorg/scielo-django-extensions | scielo_extensions/formfields.py | Python | bsd-2-clause | 1,891 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
An auto-reloading standalone wiki server, useful for development.
"""
import hatta
import werkzeug
if __name__=="__main__":
config = hatta.WikiConfig()
config.parse_args()
# config.parse_files()
application = hatta.Wiki(config).application
host = config.get('interface', 'localhost')
port = int(config.get('port', 8080))
werkzeug.run_simple(host, port, application, use_reloader=True)
| thuydang/djagazin | docs/9_tmp/hatta-wiki-1.5.3/dev.py | Python | bsd-2-clause | 466 |
def _iter(target, method, key):
iterable = target if method is None else getattr(target, method)()
iterator = iter(iterable)
if key is None:
return iterator
if not callable(key):
raise TypeError('{!r} is not callable'.format(type(key).__name__))
return (each for each in iterator if key(each))
def iterate(target, key=None):
return _iter(target=target, method=None, key=key)
def iter_values(dict_, key=None):
return _iter(target=dict_, method='values', key=key)
def iter_items(dict_, key=None):
return _iter(target=dict_, method='items', key=key)
| aguerra/python-stuff | stuff/collections.py | Python | bsd-2-clause | 604 |
from orangecontrib.recommendation.tests.coverage.base_tests \
import TestRatingModels, TestRankingModels | salvacarrion/orange3-recommendation | orangecontrib/recommendation/tests/coverage/__init__.py | Python | bsd-2-clause | 108 |
# -*- coding: utf-8 -*-
import re
from datetime import datetime
from markdown import Markdown
from flask import render_template, redirect, request, g, url_for, Markup, abort, flash, escape
from flask.ext.lastuser import LastUser
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url, jsonp
from app import app
from models import *
from forms import (ProposalSpaceForm, SectionForm, ProposalForm, CommentForm, DeleteCommentForm,
ConfirmDeleteForm, ConfirmSessionForm)
from utils import makename
lastuser = LastUser(app)
lastuser.init_usermanager(UserManager(db, User))
#lastuser.external_resource('email', 'http://iddev.hasgeek.in:7000/api/1/email', 'GET')
markdown = Markdown(safe_mode="escape").convert
jsoncallback_re = re.compile(r'^[a-z$_][0-9a-z$_]*$', re.I)
# From http://daringfireball.net/2010/07/improved_regex_for_matching_urls
url_re = re.compile(ur'''(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'".,<>?«»“”‘’]))''')
# --- Routes ------------------------------------------------------------------
@app.route('/')
def index():
spaces = ProposalSpace.query.filter(ProposalSpace.status >= 1).filter(ProposalSpace.status <= 4).order_by(db.desc('date')).all()
return render_template('index.html', spaces=spaces)
@app.route('/favicon.ico')
def favicon():
return url_for('static', filename='img/favicon.ico')
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash("You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return render_template("autherror.html",
error=error,
error_description=error_description,
error_uri=error_uri)
# --- Routes: account ---------------------------------------------------------
@app.route('/account')
def account():
return "Coming soon"
# --- Routes: spaces ----------------------------------------------------------
@app.route('/new', methods=['GET', 'POST'])
@lastuser.requires_permission('siteadmin')
def newspace():
form = ProposalSpaceForm()
form.description.flags.markdown = True
if form.validate_on_submit():
space = ProposalSpace(user=g.user)
form.populate_obj(space)
space.description_html = markdown(space.description)
db.session.add(space)
db.session.commit()
flash("Your new space has been created", "info")
return redirect(url_for('viewspace', name=space.name), code=303)
return render_template('autoform.html', form=form, title="Create a new proposal space", submit="Create space")
@app.route('/<name>/')
def viewspace(name):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
description = Markup(space.description_html)
sections = ProposalSpaceSection.query.filter_by(proposal_space=space).order_by('title').all()
confirmed = Proposal.query.filter_by(proposal_space=space, confirmed=True).order_by(db.desc('created_at')).all()
unconfirmed = Proposal.query.filter_by(proposal_space=space, confirmed=False).order_by(db.desc('created_at')).all()
return render_template('space.html', space=space, description=description, sections=sections,
confirmed=confirmed, unconfirmed=unconfirmed)
@app.route('/<name>/json')
def viewspace_json(name):
space = ProposalSpace.query.filter_by(name=name).first_or_404()
sections = ProposalSpaceSection.query.filter_by(proposal_space=space).order_by('title').all()
proposals = Proposal.query.filter_by(proposal_space=space).order_by(db.desc('created_at')).all()
return jsonp(**{
'space': {
'name': space.name,
'title': space.title,
'datelocation': space.datelocation,
'status': space.status,
},
'sections': [{'name': s.name, 'title': s.title, 'description': s.description} for s in sections],
'proposals': [proposal_data(proposal) for proposal in proposals]
})
@app.route('/<name>/edit', methods=['GET', 'POST'])
@lastuser.requires_permission('siteadmin')
def editspace(name):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
form = ProposalSpaceForm(obj=space)
form.description.flags.markdown = True
if form.validate_on_submit():
form.populate_obj(space)
space.description_html = markdown(space.description)
db.session.commit()
flash("Your changes have been saved", "info")
return redirect(url_for('viewspace', name=space.name), code=303)
return render_template('autoform.html', form=form, title="Edit proposal space", submit="Save changes")
@app.route('/<name>/newsection', methods=['GET', 'POST'])
@lastuser.requires_permission('siteadmin')
def newsection(name):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
form = SectionForm()
if form.validate_on_submit():
section = ProposalSpaceSection(proposal_space=space)
form.populate_obj(section)
db.session.add(section)
db.session.commit()
flash("Your new section has been added", "info")
return redirect(url_for('viewspace', name=space.name), code=303)
return render_template('autoform.html', form=form, title="New section", submit="Create section")
@app.route('/<name>/sections/<section>/edit', methods=['GET', 'POST'])
@lastuser.requires_permission('siteadmin')
def section_edit(name, section):
space = ProposalSpace.query.filter_by(name=section).first()
section = ProposalSpaceSection.query.filter_by(name=section).first()
form = SectionForm(obj=section)
if form.validate_on_submit():
form.populate_obj(section)
db.session.commit()
flash("Your section has been edited", 'info')
return redirect(space.url_for('viewspace', name=space.name), code=303)
return render_template('autoform.html', form=form, title="Edit section", submit="Save changes")
@app.route('/<space>/sections/<section>/delete', methods=['GET', 'POST'])
@lastuser.requires_permission('siteadmin')
def section_delete(space, section):
space = ProposalSpace.query.filter_by(name=section).first()
section = ProposalSpaceSection.query.filter_by(name=section).first()
form = ConfirmDeleteForm()
if form.validate_on_submit():
if 'delete' in request.form:
db.session.delete(section)
db.session.commit()
flash("Your section has been deleted", 'info')
return redirect(space.url_for('viewspace', name=space.name), code=303)
return render_template('delete.html', form=form, title="Confirm delete", message="Do you really wish to delete section '{title}' ?".format(title=section.title))
@app.route('/<name>/new', methods=['GET', 'POST'])
@lastuser.requires_login
def newsession(name):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
if space.status != SPACESTATUS.SUBMISSIONS:
abort(403)
form = ProposalForm()
# Set markdown flag to True for fields that need markdown conversion
markdown_attrs = ('description', 'objective', 'requirements', 'bio')
for name in markdown_attrs:
attr = getattr(form, name)
attr.flags.markdown = True
form.section.query = ProposalSpaceSection.query.filter_by(proposal_space=space, public=True).order_by('title')
if len(list(form.section.query.all())) == 0:
# Don't bother with sections when there aren't any
del form.section
if request.method == 'GET':
form.email.data = g.user.email
if form.validate_on_submit():
proposal = Proposal(user=g.user, proposal_space=space)
if form.speaking.data:
proposal.speaker = g.user
else:
proposal.speaker = None
proposal.votes.vote(g.user) # Vote up your own proposal by default
form.populate_obj(proposal)
proposal.name = makename(proposal.title)
# Set *_html attributes after converting markdown text
for name in markdown_attrs:
attr = getattr(proposal, name)
html_attr = name + '_html'
setattr(proposal, html_attr, markdown(attr))
db.session.add(proposal)
db.session.commit()
flash("Your new session has been saved", "info")
return redirect(url_for('viewsession', name=space.name, slug=proposal.urlname), code=303)
return render_template('autoform.html', form=form, title="Submit a session proposal", submit="Submit session",
breadcrumbs=[(url_for('viewspace', name=space.name), space.title)],
message=Markup(
'This form uses <a href="http://daringfireball.net/projects/markdown/">Markdown</a> for formatting.'))
@app.route('/<name>/<slug>/edit', methods=['GET', 'POST'])
@lastuser.requires_login
def editsession(name, slug):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
proposal_id = int(slug.split('-')[0])
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
if proposal.user != g.user and not lastuser.has_permission('siteadmin'):
abort(403)
form = ProposalForm(obj=proposal)
form.section.query = ProposalSpaceSection.query.filter_by(proposal_space=space, public=True).order_by('title')
if len(list(form.section.query.all())) == 0:
# Don't bother with sections when there aren't any
del form.section
# Set markdown flag to True for fields that need markdown conversion
markdown_attrs = ('description', 'objective', 'requirements', 'bio')
for name in markdown_attrs:
attr = getattr(form, name)
attr.flags.markdown = True
if proposal.user != g.user:
del form.speaking
elif request.method == 'GET':
form.speaking.data = proposal.speaker == g.user
if form.validate_on_submit():
form.populate_obj(proposal)
proposal.name = makename(proposal.title)
if proposal.user == g.user:
# Only allow the speaker to change this status
if form.speaking.data:
proposal.speaker = g.user
else:
if proposal.speaker == g.user:
proposal.speaker = None
# Set *_html attributes after converting markdown text
for name in markdown_attrs:
attr = getattr(proposal, name)
html_attr = name + '_html'
setattr(proposal, html_attr, markdown(attr))
proposal.edited_at = datetime.utcnow()
db.session.commit()
flash("Your changes have been saved", "info")
return redirect(url_for('viewsession', name=space.name, slug=proposal.urlname), code=303)
return render_template('autoform.html', form=form, title="Edit session proposal", submit="Save changes",
breadcrumbs=[(url_for('viewspace', name=space.name), space.title),
(url_for('viewsession', name=space.name, slug=proposal.urlname), proposal.title)],
message=Markup(
'This form uses <a href="http://daringfireball.net/projects/markdown/">Markdown</a> for formatting.'))
@app.route('/<name>/<slug>/confirm', methods=['POST'])
@lastuser.requires_permission('siteadmin')
def confirmsession(name, slug):
ProposalSpace.query.filter_by(name=name).first_or_404()
proposal_id = int(slug.split('-')[0])
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
form = ConfirmSessionForm()
if form.validate_on_submit():
proposal.confirmed = not proposal.confirmed
db.session.commit()
if proposal.confirmed:
flash("This proposal has been confirmed.", 'success')
else:
flash("This session has been cancelled.", 'success')
return redirect(url_for('viewsession', name=name, slug=slug))
@app.route('/<name>/<slug>/delete', methods=['GET', 'POST'])
@lastuser.requires_login
def deletesession(name, slug):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
proposal_id = int(slug.split('-')[0])
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
if not lastuser.has_permission('siteadmin') and proposal.user != g.user:
abort(403)
form = ConfirmDeleteForm()
if form.validate_on_submit():
if 'delete' in request.form:
comments = Comment.query.filter_by(commentspace=proposal.comments).order_by('created_at').all()
for comment in comments:
db.session.delete(comment)
db.session.delete(proposal.comments)
votes = Vote.query.filter_by(votespace=proposal.votes).all()
for vote in votes:
db.session.delete(vote)
db.session.delete(proposal.votes)
db.session.delete(proposal)
db.session.commit()
flash("Your proposal has been deleted", "info")
return redirect(url_for('viewspace', name=name))
else:
return redirect(url_for('viewsession', name=name, slug=slug))
return render_template('delete.html', form=form, title=u"Confirm delete",
message=u"Do you really wish to delete your proposal '%s'? "
u"This will remove all votes and comments as well. This operation "
u"is permanent and cannot be undone." % proposal.title)
def urllink(m):
s = m.group(0)
if not (s.startswith('http://') or s.startswith('https://')):
s = 'http://' + s
return '<a href="%s" rel="nofollow" target="_blank">%s</a>' % (s, s)
@app.route('/<name>/<slug>', methods=['GET', 'POST'])
def viewsession(name, slug):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
try:
proposal_id = int(slug.split('-')[0])
except ValueError:
abort(404)
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
if proposal.proposal_space != space:
return redirect(url_for('viewsession', name=proposal.proposal_space.name, slug=proposal.urlname), code=301)
if slug != proposal.urlname:
return redirect(url_for('viewsession', name=proposal.proposal_space.name, slug=proposal.urlname), code=301)
# URL is okay. Show the proposal.
comments = sorted(Comment.query.filter_by(commentspace=proposal.comments, parent=None).order_by('created_at').all(),
key=lambda c: c.votes.count, reverse=True)
commentform = CommentForm()
commentform.message.flags.markdown = True
delcommentform = DeleteCommentForm()
if request.method == 'POST':
if request.form.get('form.id') == 'newcomment' and commentform.validate():
if commentform.edit_id.data:
comment = Comment.query.get(int(commentform.edit_id.data))
if comment:
if comment.user == g.user:
comment.message = commentform.message.data
comment.message_html = markdown(comment.message)
comment.edited_at = datetime.utcnow()
flash("Your comment has been edited", "info")
else:
flash("You can only edit your own comments", "info")
else:
flash("No such comment", "error")
else:
comment = Comment(user=g.user, commentspace=proposal.comments, message=commentform.message.data)
if commentform.parent_id.data:
parent = Comment.query.get(int(commentform.parent_id.data))
if parent and parent.commentspace == proposal.comments:
comment.parent = parent
comment.message_html = markdown(comment.message)
proposal.comments.count += 1
comment.votes.vote(g.user) # Vote for your own comment
db.session.add(comment)
flash("Your comment has been posted", "info")
db.session.commit()
# Redirect despite this being the same page because HTTP 303 is required to not break
# the browser Back button
return redirect(url_for('viewsession', name=space.name, slug=proposal.urlname) + "#c" + str(comment.id),
code=303)
elif request.form.get('form.id') == 'delcomment' and delcommentform.validate():
comment = Comment.query.get(int(delcommentform.comment_id.data))
if comment:
if comment.user == g.user:
comment.delete()
proposal.comments.count -= 1
db.session.commit()
flash("Your comment was deleted.", "info")
else:
flash("You did not post that comment.", "error")
else:
flash("No such comment.", "error")
return redirect(url_for('viewsession', name=space.name, slug=proposal.urlname), code=303)
links = [Markup(url_re.sub(urllink, unicode(escape(l)))) for l in proposal.links.replace('\r\n', '\n').split('\n') if l]
confirmform = ConfirmSessionForm()
return render_template('proposal.html', space=space, proposal=proposal,
comments=comments, commentform=commentform, delcommentform=delcommentform,
breadcrumbs=[(url_for('viewspace', name=space.name), space.title)],
links=links, confirmform=confirmform)
def proposal_data(proposal):
"""
Return proposal data suitable for a JSON dump. Request helper, not to be used standalone.
"""
votes_community = None
votes_committee = None
votes_count = None
if lastuser.has_permission('siteadmin'):
votes_community = 0
votes_committee = 0
votes_count = len(proposal.votes.votes)
committee = set(request.args.getlist('c'))
for vote in proposal.votes.votes:
if vote.user.userid in committee:
votes_committee += -1 if vote.votedown else +1
else:
votes_community += -1 if vote.votedown else +1
return {
'id': proposal.id,
'name': proposal.urlname,
'title': proposal.title,
'url': url_for('viewsession', name=proposal.proposal_space.name, slug=proposal.urlname, _external=True),
'proposer': proposal.user.fullname,
'speaker': proposal.speaker.fullname if proposal.speaker else None,
'email': proposal.email if lastuser.has_permission('siteadmin') else None,
'phone': proposal.phone if lastuser.has_permission('siteadmin') else None,
'section': proposal.section.title if proposal.section else None,
'type': proposal.session_type,
'level': proposal.technical_level,
'objective': proposal.objective_html,
'description': proposal.description_html,
'requirements': proposal.requirements_html,
'slides': proposal.slides,
'links': proposal.links,
'bio': proposal.bio_html,
'votes': proposal.votes.count,
'votes_community': votes_community,
'votes_committee': votes_committee,
'votes_count': votes_count,
'comments': proposal.comments.count,
'submitted': proposal.created_at.isoformat() + 'Z',
'confirmed': proposal.confirmed,
}
@app.route('/<name>/<slug>/json', methods=['GET', 'POST'])
def session_json(name, slug):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
try:
proposal_id = int(slug.split('-')[0])
except ValueError:
abort(404)
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
if proposal.proposal_space != space:
return redirect(url_for('viewspace', name=space.name))
if slug != proposal.urlname:
return redirect(url_for('session_json', name=space.name, slug=proposal.urlname))
return jsonp(proposal_data(proposal))
# FIXME: This voting method uses GET but makes db changes. Not correct. Should be POST
@app.route('/<name>/<slug>/voteup')
@lastuser.requires_login
def voteupsession(name, slug):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
try:
proposal_id = int(slug.split('-')[0])
except ValueError:
abort(404)
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
proposal.votes.vote(g.user, votedown=False)
db.session.commit()
flash("Your vote has been recorded", "info")
return redirect(url_for('viewsession', name=space.name, slug=proposal.urlname))
# FIXME: This voting method uses GET but makes db changes. Not correct. Should be POST
@app.route('/<name>/<slug>/votedown')
@lastuser.requires_login
def votedownsession(name, slug):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
try:
proposal_id = int(slug.split('-')[0])
except ValueError:
abort(404)
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
proposal.votes.vote(g.user, votedown=True)
db.session.commit()
flash("Your vote has been recorded", "info")
return redirect(url_for('viewsession', name=space.name, slug=proposal.urlname))
# FIXME: This voting method uses GET but makes db changes. Not correct. Should be POST
@app.route('/<name>/<slug>/cancelvote')
@lastuser.requires_login
def votecancelsession(name, slug):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
try:
proposal_id = int(slug.split('-')[0])
except ValueError:
abort(404)
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
proposal.votes.cancelvote(g.user)
db.session.commit()
flash("Your vote has been withdrawn", "info")
return redirect(url_for('viewsession', name=space.name, slug=proposal.urlname))
@app.route('/<name>/<slug>/comments/<int:cid>/json')
def jsoncomment(name, slug, cid):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
proposal_id = int(slug.split('-')[0])
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
comment = Comment.query.get(cid)
if comment:
return jsonp(message=comment.message)
else:
return jsonp(message='')
# FIXME: This voting method uses GET but makes db changes. Not correct. Should be POST
@app.route('/<name>/<slug>/comments/<int:cid>/voteup')
@lastuser.requires_login
def voteupcomment(name, slug, cid):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
try:
proposal_id = int(slug.split('-')[0])
except ValueError:
abort(404)
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
comment = Comment.query.get(cid)
if not comment:
abort(404)
comment.votes.vote(g.user, votedown=False)
db.session.commit()
flash("Your vote has been recorded", "info")
return redirect(url_for('viewsession', name=space.name, slug=proposal.urlname) + "#c%d" % cid)
# FIXME: This voting method uses GET but makes db changes. Not correct. Should be POST
@app.route('/<name>/<slug>/comments/<int:cid>/votedown')
@lastuser.requires_login
def votedowncomment(name, slug, cid):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
try:
proposal_id = int(slug.split('-')[0])
except ValueError:
abort(404)
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
comment = Comment.query.get(cid)
if not comment:
abort(404)
comment.votes.vote(g.user, votedown=True)
db.session.commit()
flash("Your vote has been recorded", "info")
return redirect(url_for('viewsession', name=space.name, slug=proposal.urlname) + "#c%d" % cid)
# FIXME: This voting method uses GET but makes db changes. Not correct. Should be POST
@app.route('/<name>/<slug>/comments/<int:cid>/cancelvote')
@lastuser.requires_login
def votecancelcomment(name, slug, cid):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
try:
proposal_id = int(slug.split('-')[0])
except ValueError:
abort(404)
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
comment = Comment.query.get(cid)
if not comment:
abort(404)
comment.votes.cancelvote(g.user)
db.session.commit()
flash("Your vote has been withdrawn", "info")
return redirect(url_for('viewsession', name=space.name, slug=proposal.urlname) + "#c%d" % cid)
@app.route('/<name>/<slug>/next')
def nextsession(name, slug):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
try:
proposal_id = int(slug.split('-')[0])
except ValueError:
abort(404)
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
next = proposal.getnext()
if next:
return redirect(url_for('viewsession', name=space.name, slug=next.urlname))
else:
flash("You were at the last proposal", "info")
return redirect(url_for('viewspace', name=space.name))
@app.route('/<name>/<slug>/prev')
def prevsession(name, slug):
space = ProposalSpace.query.filter_by(name=name).first()
if not space:
abort(404)
try:
proposal_id = int(slug.split('-')[0])
except ValueError:
abort(404)
proposal = Proposal.query.get(proposal_id)
if not proposal:
abort(404)
prev = proposal.getprev()
if prev:
return redirect(url_for('viewsession', name=space.name, slug=prev.urlname))
else:
flash("You were at the first proposal", "info")
return redirect(url_for('viewspace', name=space.name))
@app.template_filter('age')
def age(dt):
suffix = u"ago"
delta = datetime.utcnow() - dt
if delta.days == 0:
# < 1 day
if delta.seconds < 10:
return "seconds %s" % suffix
elif delta.seconds < 60:
return "%d seconds %s" % (delta.seconds, suffix)
elif delta.seconds < 120:
return "a minute %s" % suffix
elif delta.seconds < 3600: # < 1 hour
return "%d minutes %s" % (int(delta.seconds / 60), suffix)
elif delta.seconds < 7200: # < 2 hours
return "an hour %s" % suffix
else:
return "%d hours %s" % (int(delta.seconds / 3600), suffix)
elif delta.days == 1:
return u"a day %s" % suffix
else:
return u"%d days %s" % (delta.days, suffix)
#@app.route('/email')
#@lastuser.requires_login
#def show_email():
# return jsonp(lastuser.call_resource('email', all=1))
#@app.route('/api/event', methods=['POST'])
#@lastuser.resource_handler('event')
#def api_event(token):
# return jsonp(token)
| piyushroshan/fossmeet | views.py | Python | bsd-2-clause | 27,799 |
#from interface.services.icontainer_agent import ContainerAgentClient
#from pyon.ion.endpoint import ProcessRPCClient
from pyon.public import Container, log, IonObject
from pyon.util.containers import DotDict
from pyon.util.int_test import IonIntegrationTestCase
from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
from ion.services.sa.observatory.observatory_management_service import ObservatoryManagementService
from interface.services.sa.iobservatory_management_service import IObservatoryManagementService, ObservatoryManagementServiceClient
from interface.services.sa.iinstrument_management_service import InstrumentManagementServiceClient
from pyon.util.context import LocalContextMixin
from pyon.core.exception import BadRequest, NotFound, Conflict, Inconsistent
from pyon.public import RT, PRED
#from mock import Mock, patch
from pyon.util.unit_test import PyonTestCase
from nose.plugins.attrib import attr
import unittest
from ooi.logging import log
from ion.services.sa.test.helpers import any_old
class FakeProcess(LocalContextMixin):
name = ''
@attr('INT', group='sa')
@unittest.skip('capabilities not yet available')
class TestObservatoryNegotiation(IonIntegrationTestCase):
def setUp(self):
# Start container
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.rrclient = ResourceRegistryServiceClient(node=self.container.node)
self.omsclient = ObservatoryManagementServiceClient(node=self.container.node)
self.imsclient = InstrumentManagementServiceClient(node=self.container.node)
@unittest.skip("TDB")
def test_request_resource(self):
# L4-CI-SA-RQ-348 : Marine facility shall provide capabilities to define instrument use policies
# L4-CI-SA-RQ-115 : Marine facility shall present resource requests to the marine infrastructure
# create an observatory with resources including platforms with instruments
# create an instrument use policy for one of the defined instruments
# request access to the instrument that aligns with defined policy, verify that access is granted
# request access to the instrument that is in conflict with defined policy, verify that access is NOT granted
pass
@unittest.skip("TBD")
def test_request_config_change(self):
# L4-CI-SA-RQ-342 : Marine facility shall present platform configuration change requests to the marine infrastructure
# create an observatory with resources including platforms with instruments
# request a configuration change to the platform t, verify that the request is submitted to the
# Observatory operator and that then access is granted when that operator approves
pass
| ooici/coi-services | ion/services/sa/observatory/test/test_observatory_negotiation.py | Python | bsd-2-clause | 2,839 |
def add(x, y):
"""Add two numbers"""
return x + y
| computationalmodelling/python-package-template | package_template/arith.py | Python | bsd-2-clause | 58 |
from PyQt4 import QtCore
import numpy
from ilastik.core import dataImpex
import shlex
from ilastik.core.listOfNDArraysAsNDArray import ListOfNDArraysAsNDArray
from ilastik.core.overlays.selectionOverlay import SelectionAccessor
from subprocess import Popen, PIPE
import h5py
# this is the core replacement of the guiThread used to test module functionality
#*******************************************************************************
# T e s t T h r e a d *
#*******************************************************************************
import ilastik.core.jobMachine
def setUp():
if not ilastik.core.jobMachine.GLOBAL_WM:
ilastik.core.jobMachine.GLOBAL_WM = ilastik.core.jobMachine.WorkerManager()
def tearDown():
ilastik.core.jobMachine.GLOBAL_WM.stopWorkers()
del ilastik.core.jobMachine.GLOBAL_WM
ilastik.core.jobMachine.GLOBAL_WM = None
class TestThread(QtCore.QObject):#QtCore.QThread):
def __init__(self, baseMgr, listOfResultOverlays, listOfFilenames, tolerance = 0):
__pyqtSignals__ = ( "done()")
#QtCore.QThread.__init__(self, parent)
QtCore.QObject.__init__(self)
self.baseMgr = baseMgr
self.listOfResultOverlays = listOfResultOverlays
self.listOfFilenames = listOfFilenames
self.tolerance = tolerance
self.passedTest = False
def start(self, input):
self.timer = QtCore.QTimer()
QtCore.QObject.connect(self.timer, QtCore.SIGNAL("timeout()"), self.updateProgress)
# call core function
self.myTestThread = self.baseMgr.computeResults(input)
self.timer.start(200)
def updateProgress(self):
if not self.myTestThread.isRunning():
self.timer.stop()
self.myTestThread.wait()
self.finalize()
def finalize(self):
# call core function
self.baseMgr.finalizeResults()
# compare obtained results with ground truth results
self.passedTest = TestHelperFunctions.compareResultsWithFile(self.baseMgr, self.listOfResultOverlays, self.listOfFilenames, self.tolerance)
# announce that we are done
self.emit(QtCore.SIGNAL("done()"))
'''
# in case you want to create ground truth overlays, use the following code instead of the above
for i in range(len(self.listOfResultOverlays)):
obtained = self.baseMgr.dataMgr[self.baseMgr.dataMgr._activeImageNumber].overlayMgr["Unsupervised/pLSA component %d" % (i+1)]
dataImpex.DataImpex.exportOverlay(self.listOfFilenames[i], "h5", obtained)
'''
#*******************************************************************************
# T e s t H e l p e r F u n c t i o n s *
#*******************************************************************************
class TestHelperFunctions():
@staticmethod
def compareResultsWithFile(baseMgr, listOfResultOverlays, listOfFilenames, tolerance = 0):
equalOverlays = True
for i in range(len(listOfResultOverlays)):
obtained = baseMgr.dataMgr[baseMgr.dataMgr._activeImageNumber].overlayMgr[listOfResultOverlays[i]]
prefix = "Ground_Truth/"
dataImpex.DataImpex.importOverlay(baseMgr.dataMgr[baseMgr.dataMgr._activeImageNumber], listOfFilenames[i], prefix)
groundTruth = baseMgr.dataMgr[baseMgr.dataMgr._activeImageNumber].overlayMgr[prefix + listOfResultOverlays[i]]
equalOverlays = equalOverlays & TestHelperFunctions.compareOverlayData(obtained, groundTruth, tolerance)
print "all ", str(len(listOfResultOverlays)), " compared overlays are equal: ", equalOverlays
return equalOverlays
@staticmethod
# we only compare the data of the overlay, since we want to avoid dependence on color tables etc.
def compareOverlayData(overlay1, overlay2, tolerance = 0):
# overlay1._data._data can be a listOfNDArraysAsNDArray instance, overlay2._data._data is loaded from file, so it should be an NDArray
if isinstance(overlay1._data._data, ListOfNDArraysAsNDArray):
datatemp1 = overlay1._data._data.ndarrays
elif isinstance(overlay1._data._data, SelectionAccessor):
datatemp1 = overlay1._data._data[:]
else:
datatemp1 = overlay1._data._data
datatemp2 = overlay2._data._data
if numpy.all(numpy.abs(datatemp1 - datatemp2) <= tolerance):
return True
else:
return False
@staticmethod
def arrayEqual(a,b):
assert a.shape == b.shape
assert a.dtype == b.dtype
if not numpy.array_equal(a,b):
assert len(a.shape) == 3
for x in range(a.shape[0]):
for y in range(a.shape[1]):
for z in range(a.shape[2]):
if a[x,y,z] != b[x,y,z]:
print x,y,z, "a=", a[x,y,z], "b=", b[x,y,z]
return False
return True
@staticmethod
def compareH5Files(file1, file2):
print "files to compare: ", file1, file2
#have to spawn a subprocess, because h5diff has no wrapper in python
cl = "h5diff -cv '" + file1 + "' '" + file2 + "'"
args = shlex.split(cl)
print args
'''
cl_header1 = "h5dump --header " + file1
args_header1 = shlex.split(cl_header1)
cl_header2 = "h5dump --header " + file2
args_header2 = shlex.split(cl_header2)
try:
p1 = Popen(args_header1, stdout=PIPE, stderr=PIPE)
out1, err1 = p1.communicate()
p2 = Popen(args_header2, stdout=PIPE, stderr=PIPE)
out2, err2 = p2.communicate()
if out1 != out2:
print "different header dumps"
print out1
print ""
print out2
except Exception, e:
print e
return False
#print args
'''
try:
p = Popen(args, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if p.returncode >0:
print stdout
print stderr
return False
else :
return True
except Exception, e:
print e
return False
return True
| ilastik/ilastik-0.5 | ilastik/core/testThread.py | Python | bsd-2-clause | 6,464 |
from ticdat import TicDatFactory, Model, utils
from ticdat.model import cplex, gurobi, xpress
from ticdat.testing.ticdattestutils import dietSolver, nearlySame, netflowSolver
from ticdat.testing.ticdattestutils import fail_to_debugger, flagged_as_run_alone
import unittest
import os
import inspect
def _codeFile() :
return os.path.realpath(os.path.abspath(inspect.getsourcefile(_codeFile)))
__codeFile = _codeFile()
def _codeDir():
return os.path.dirname(__codeFile)
# issue 1104 deals with Jenkins / OCP support of Model and the three horseman of MIPocalypse
#@fail_to_debugger
class TestModel(unittest.TestCase):
def _testDiet(self, modelType):
sln, cost = dietSolver(modelType)
self.assertTrue(sln)
self.assertTrue(nearlySame(cost, 11.8289))
def _testNetflow(self, modelType):
sln, cost = netflowSolver(modelType)
self.assertTrue(sln)
self.assertTrue(nearlySame(cost, 5500.0))
def _testFantop(self, modelType):
sln, draft_yield = _testFantop(modelType, "sample_data.sql")
self.assertTrue(sln and nearlySame(draft_yield, 2988.61))
sln, draft_yield = _testFantop(modelType, "sample_tweaked_most_importants.sql")
self.assertTrue(sln and nearlySame(draft_yield, 2947.677))
sln, draft_yield = _testFantop(modelType, "flex_constraint.sql")
self.assertTrue(sln and nearlySame(draft_yield, 2952.252))
def _testParameters(self, modelType):
mdl = Model(modelType, "parameters")
mdl.set_parameters(MIP_Gap = 0.01)
def testCplex(self):
self.assertFalse(utils.stringish(cplex))
self._testDiet("cplex")
self._testNetflow("cplex")
self._testFantop("cplex")
self._testParameters("cplex")
def testGurobi(self):
self.assertFalse(utils.stringish(gurobi))
self._testDiet("gurobi")
self._testNetflow("gurobi")
self._testFantop("gurobi")
self._testParameters("gurobi")
def testXpress(self):
self.assertFalse(utils.stringish(xpress))
self._testDiet("xpress")
self._testNetflow("xpress")
self._testFantop("xpress")
self._testParameters("xpress") # not yet working
def _testFantop(modelType, sqlFile):
dataFactory = TicDatFactory (
parameters = [["Key"],["Value"]],
players = [['Player Name'],
['Position', 'Average Draft Position', 'Expected Points', 'Draft Status']],
roster_requirements = [['Position'],
['Min Num Starters', 'Max Num Starters', 'Min Num Reserve', 'Max Num Reserve',
'Flex Status']],
my_draft_positions = [['Draft Position'],[]]
)
# add foreign key constraints (optional, but helps with preventing garbage-in, garbage-out)
dataFactory.add_foreign_key("players", "roster_requirements", ['Position', 'Position'])
# set data types (optional, but helps with preventing garbage-in, garbage-out)
dataFactory.set_data_type("parameters", "Key", number_allowed = False,
strings_allowed = ["Starter Weight", "Reserve Weight",
"Maximum Number of Flex Starters"])
dataFactory.set_data_type("parameters", "Value", min=0, max=float("inf"),
inclusive_min = True, inclusive_max = False)
dataFactory.set_data_type("players", "Average Draft Position", min=0, max=float("inf"),
inclusive_min = False, inclusive_max = False)
dataFactory.set_data_type("players", "Expected Points", min=-float("inf"), max=float("inf"),
inclusive_min = False, inclusive_max = False)
dataFactory.set_data_type("players", "Draft Status",
strings_allowed = ["Un-drafted", "Drafted By Me", "Drafted By Someone Else"])
for fld in ("Min Num Starters", "Min Num Reserve", "Max Num Reserve"):
dataFactory.set_data_type("roster_requirements", fld, min=0, max=float("inf"),
inclusive_min = True, inclusive_max = False, must_be_int = True)
dataFactory.set_data_type("roster_requirements", "Max Num Starters", min=0, max=float("inf"),
inclusive_min = False, inclusive_max = True, must_be_int = True)
dataFactory.set_data_type("roster_requirements", "Flex Status", number_allowed = False,
strings_allowed = ["Flex Eligible", "Flex Ineligible"])
dataFactory.set_data_type("my_draft_positions", "Draft Position", min=0, max=float("inf"),
inclusive_min = False, inclusive_max = False, must_be_int = True)
solutionFactory = TicDatFactory(
my_draft = [['Player Name'], ['Draft Position', 'Position', 'Planned Or Actual',
'Starter Or Reserve']])
dat = dataFactory.sql.create_tic_dat_from_sql(os.path.join(_codeDir(), sqlFile), freeze_it=True)
assert dataFactory.good_tic_dat_object(dat)
assert not dataFactory.find_foreign_key_failures(dat)
assert not dataFactory.find_data_type_failures(dat)
expected_draft_position = {}
# for our purposes, its fine to assume all those drafted by someone else are drafted
# prior to any players drafted by me
for player_name in sorted(dat.players,
key=lambda _p: {"Un-drafted":dat.players[_p]["Average Draft Position"],
"Drafted By Me":-1,
"Drafted By Someone Else":-2}[dat.players[_p]["Draft Status"]]):
expected_draft_position[player_name] = len(expected_draft_position) + 1
assert max(expected_draft_position.values()) == len(set(expected_draft_position.values())) == len(dat.players)
assert min(expected_draft_position.values()) == 1
already_drafted_by_me = {player_name for player_name,row in dat.players.items() if
row["Draft Status"] == "Drafted By Me"}
can_be_drafted_by_me = {player_name for player_name,row in dat.players.items() if
row["Draft Status"] != "Drafted By Someone Else"}
m = Model(modelType, 'fantop')
my_starters = {player_name:m.add_var(type="binary",name="starter_%s"%player_name)
for player_name in can_be_drafted_by_me}
my_reserves = {player_name:m.add_var(type="binary",name="reserve_%s"%player_name)
for player_name in can_be_drafted_by_me}
for player_name in can_be_drafted_by_me:
if player_name in already_drafted_by_me:
m.add_constraint(my_starters[player_name] + my_reserves[player_name] == 1,
name="already_drafted_%s"%player_name)
else:
m.add_constraint(my_starters[player_name] + my_reserves[player_name] <= 1,
name="cant_draft_twice_%s"%player_name)
for i,draft_position in enumerate(sorted(dat.my_draft_positions)):
m.add_constraint(m.sum(my_starters[player_name] + my_reserves[player_name]
for player_name in can_be_drafted_by_me
if expected_draft_position[player_name] < draft_position) <= i,
name = "at_most_%s_can_be_ahead_of_%s"%(i,draft_position))
my_draft_size = m.sum(my_starters[player_name] + my_reserves[player_name]
for player_name in can_be_drafted_by_me)
m.add_constraint(my_draft_size >= len(already_drafted_by_me) + 1,
name = "need_to_extend_by_at_least_one")
m.add_constraint(my_draft_size <= len(dat.my_draft_positions), name = "cant_exceed_draft_total")
for position, row in dat.roster_requirements.items():
players = {player_name for player_name in can_be_drafted_by_me
if dat.players[player_name]["Position"] == position}
starters = m.sum(my_starters[player_name] for player_name in players)
reserves = m.sum(my_reserves[player_name] for player_name in players)
m.add_constraint(starters >= row["Min Num Starters"], name = "min_starters_%s"%position)
m.add_constraint(starters <= row["Max Num Starters"], name = "max_starters_%s"%position)
m.add_constraint(reserves >= row["Min Num Reserve"], name = "min_reserve_%s"%position)
m.add_constraint(reserves <= row["Max Num Reserve"], name = "max_reserve_%s"%position)
if "Maximum Number of Flex Starters" in dat.parameters:
players = {player_name for player_name in can_be_drafted_by_me if
dat.roster_requirements[dat.players[player_name]["Position"]]["Flex Status"] == "Flex Eligible"}
m.add_constraint(m.sum(my_starters[player_name] for player_name in players)
<= dat.parameters["Maximum Number of Flex Starters"]["Value"],
name = "max_flex")
starter_weight = dat.parameters["Starter Weight"]["Value"] if "Starter Weight" in dat.parameters else 1
reserve_weight = dat.parameters["Reserve Weight"]["Value"] if "Reserve Weight" in dat.parameters else 1
m.set_objective(m.sum(dat.players[player_name]["Expected Points"] *
(my_starters[player_name] * starter_weight + my_reserves[player_name] * reserve_weight)
for player_name in can_be_drafted_by_me),
sense="maximize")
if not m.optimize():
return
sln = solutionFactory.TicDat()
def almostone(x):
return abs(m.get_solution_value(x) -1) < 0.0001
picked = sorted([player_name for player_name in can_be_drafted_by_me
if almostone(my_starters[player_name]) or almostone(my_reserves[player_name])],
key=lambda _p: expected_draft_position[_p])
assert len(picked) <= len(dat.my_draft_positions)
if len(picked) < len(dat.my_draft_positions):
print("Your model is over-constrained, and thus only a partial draft was possible")
draft_yield = 0
for player_name, draft_position in zip(picked, sorted(dat.my_draft_positions)):
draft_yield += dat.players[player_name]["Expected Points"] * \
(starter_weight if almostone(my_starters[player_name]) else reserve_weight)
assert draft_position <= expected_draft_position[player_name]
sln.my_draft[player_name]["Draft Position"] = draft_position
sln.my_draft[player_name]["Position"] = dat.players[player_name]["Position"]
sln.my_draft[player_name]["Planned Or Actual"] = "Actual" if player_name in already_drafted_by_me else "Planned"
sln.my_draft[player_name]["Starter Or Reserve"] = \
"Starter" if almostone(my_starters[player_name]) else "Reserve"
return sln, draft_yield
_scratchDir = TestModel.__name__ + "_scratch"
# Run the tests.
if __name__ == "__main__":
unittest.main() | opalytics/opalytics-ticdat | ticdat/testing/testmodel.py | Python | bsd-2-clause | 10,941 |
from __future__ import absolute_import, print_function, division
import numpy as np
from matplotlib.colors import LinearSegmentedColormap,ListedColormap
import sys
__author__ = "Juhyeong Kang "
__email__ = "jhkang@astro.snu.ac.kr"
def create_cdict(r, g, b):
i = np.linspace(0, 1, 256)
cdict = dict(
(name, list(zip(i, el / 255.0, el / 255.0)))
for el, name in [(r, 'red'), (g, 'green'), (b, 'blue')]
)
return cdict
def hac(r=False):
hr=np.array([0, 0, 1, 2, 3, 4, 4, 6, 6, 7, 8, 9, 10, 10, 12, 12, 13, 14, 15,
16, 16, 18, 18, 19, 20, 21, 22, 23, 24, 25, 25, 26, 27, 28, 29,
30, 31, 31, 33, 33, 34, 35, 36, 37, 37, 39, 39, 40, 41, 42, 43,
43, 45, 45, 46, 47, 48, 49, 50, 51, 51, 52, 53, 54, 55, 56, 57,
58, 58, 59, 60, 61, 62, 63, 64, 64, 66, 66, 67, 68, 69, 70, 70,
72, 72, 73, 74, 75, 76, 76, 78, 78, 79, 80, 81, 82, 83, 84, 84,
86, 87, 88, 89, 91, 92, 93, 94, 96, 97, 98, 99, 100, 102, 102,
104, 105, 106, 107, 108, 110, 111, 112, 113, 115, 116, 117, 118,
120, 121, 121, 123, 124, 125, 126, 128, 129, 130, 131, 132, 134,
135, 136, 137, 139, 139, 141, 142, 143, 144, 145, 147, 148, 149,
150, 152, 153, 154, 155, 156, 158, 158, 160, 161, 162, 163, 165,
166, 167, 168, 169, 171, 172, 173, 174, 176, 176, 178, 178, 179,
179, 179, 180, 180, 180, 181, 181, 181, 182, 182, 182, 183, 183,
183, 184, 186, 187, 188, 189, 190, 191, 192, 193, 195, 196, 197,
198, 199, 200, 201, 202, 204, 205, 206, 207, 208, 209, 210, 212,
213, 214, 215, 216, 217, 218, 219, 221, 222, 223, 224, 225, 226,
227, 228, 230, 231, 232, 233, 234, 235, 237, 238, 239, 240, 241,
242, 243, 244, 245, 247, 248, 249, 250, 251, 252, 253, 255])
hg=np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4,
4, 4, 4, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 8, 8, 8, 8,
8, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 12, 12,
12, 12, 12, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 15, 15, 15,
15, 15, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 18, 18, 18, 18,
18, 19, 19, 19, 20, 20, 21, 22, 22, 23, 23, 24, 24, 25, 25, 26,
26, 27, 27, 28, 28, 29, 29, 30, 30, 31, 31, 33, 34, 36, 38, 39,
41, 43, 44, 46, 47, 49, 51, 53, 54, 56, 58, 59, 61, 62, 64, 66,
67, 69, 71, 73, 74, 76, 77, 79, 81, 82, 84, 86, 88, 89, 91, 92,
94, 96, 97, 99, 101, 102, 104, 106, 107, 109, 110, 112, 114, 116,
117, 119, 121, 122, 124, 125, 127, 129, 130, 132, 134, 136, 137,
138, 140, 142, 144, 145, 147, 149, 150, 152, 153, 155, 157, 158,
160, 162, 164, 165, 166, 168, 170, 172, 173, 175, 177, 179, 180,
181, 183, 185, 187, 188, 190, 192, 193, 195, 196, 198, 200, 201,
203, 205, 207, 208, 210, 211, 213, 215, 216, 218, 220, 221, 223,
225, 226, 228, 229, 231, 233, 235, 236, 238, 240, 241, 243, 244,
246, 248, 250, 251, 253, 255])
hb=np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5,
5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10,
11, 11, 11, 12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15,
15, 15, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 19, 19, 19,
19, 20, 20, 20, 20, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 23,
24, 24, 24, 25, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 28, 28,
28, 28, 29, 29, 29, 29, 30, 30, 30, 30, 31, 31, 31, 32, 32, 32,
32, 33, 33, 33, 33, 34, 34, 34, 35, 35, 35, 35, 36, 36, 36, 36,
37, 37, 37, 38, 38, 38, 38, 39, 39, 39, 39, 40, 40, 40, 41, 41,
41, 41, 42, 42, 42, 42, 43, 43, 43, 44, 44, 44, 44, 45, 45, 45,
45, 46, 46, 46, 47, 47, 47, 47, 48, 48, 48, 48, 49, 50, 51, 52,
53, 54, 55, 56, 57, 59, 62, 65, 68, 71, 74, 78, 81, 84, 87, 90,
93, 96, 99, 102, 105, 108, 111, 114, 117, 120, 123, 126, 130,
133, 136, 138, 141, 144, 148, 151, 154, 157, 160, 163, 166, 169,
172, 175, 178, 181, 184, 187, 190, 193, 196, 199, 203, 206, 209,
212, 215, 217, 221, 224, 227, 230, 233, 236, 239, 242, 245, 248,
251, 255])
hadic=create_cdict(hr,hg,hb)
hardic=create_cdict(hr[::-1],hg[::-1],hb[::-1])
if r:
return LinearSegmentedColormap('mytables',hardic)
else:
return LinearSegmentedColormap('mytables',hadic)
def cac(r=False):
cr=np.array([0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5,
6, 6, 6, 6, 7, 7, 7, 8, 8, 8, 8, 9, 9, 9, 10, 10, 10, 11, 11, 11,
11, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 15, 15, 15, 15, 16,
16, 16, 17, 17, 17, 17, 18, 18, 18, 19, 19, 19, 19, 20, 20, 20,
21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 24, 24, 24, 24, 25, 25,
25, 26, 26, 26, 26, 27, 27, 27, 28, 28, 30, 31, 32, 34, 35, 36,
37, 39, 40, 41, 42, 43, 45, 46, 47, 49, 50, 51, 53, 53, 55, 56,
57, 59, 60, 61, 63, 64, 65, 67, 67, 69, 70, 71, 73, 74, 75, 76,
78, 79, 80, 81, 83, 84, 85, 86, 88, 89, 90, 92, 92, 94, 95, 96,
98, 99, 100, 102, 103, 104, 106, 106, 108, 109, 110, 112, 113,
114, 115, 117, 118, 119, 120, 122, 123, 124, 125, 127, 128, 129,
130, 130, 132, 133, 133, 135, 136, 136, 138, 138, 139, 141, 141,
142, 144, 146, 148, 149, 151, 153, 155, 157, 158, 160, 162, 164,
166, 167, 169, 171, 172, 174, 176, 178, 180, 181, 183, 185, 187,
189, 190, 192, 194, 196, 198, 199, 201, 203, 204, 206, 208, 210,
212, 213, 215, 217, 219, 221, 222, 224, 226, 228, 230, 232, 233,
235, 236, 238, 240, 242, 244, 245, 247, 249, 251, 253, 255])
cg=np.array([0, 0, 1, 1, 2, 3, 3, 4, 4, 5, 6, 6, 7, 7, 8, 9, 9, 10, 10, 11,
12, 12, 13, 13, 14, 15, 15, 16, 16, 17, 18, 18, 19, 19, 20, 21,
21, 22, 22, 23, 24, 24, 25, 25, 26, 27, 27, 28, 28, 29, 30, 30,
31, 31, 32, 33, 33, 34, 34, 35, 36, 36, 37, 37, 38, 39, 39, 40,
40, 41, 42, 42, 43, 43, 44, 45, 45, 46, 46, 47, 48, 48, 49, 49,
50, 51, 51, 52, 52, 53, 54, 54, 55, 55, 56, 57, 57, 58, 59, 60,
61, 62, 63, 64, 65, 66, 67, 67, 68, 69, 70, 71, 72, 73, 74, 75,
76, 77, 78, 78, 79, 81, 82, 83, 84, 86, 87, 88, 90, 91, 92, 94,
95, 96, 97, 99, 100, 101, 103, 104, 105, 107, 108, 109, 110, 112,
113, 114, 116, 117, 118, 120, 121, 122, 124, 125, 126, 127, 129,
130, 131, 133, 134, 135, 137, 138, 139, 140, 142, 143, 144, 146,
147, 148, 150, 151, 152, 153, 155, 156, 157, 159, 160, 161, 162,
164, 165, 166, 168, 169, 170, 172, 173, 174, 175, 177, 178, 179,
181, 182, 183, 185, 186, 187, 188, 190, 191, 192, 194, 195, 196,
197, 199, 200, 201, 203, 204, 205, 207, 208, 209, 210, 212, 213,
214, 216, 217, 218, 220, 221, 222, 223, 225, 226, 227, 229, 230,
231, 232, 234, 235, 236, 238, 239, 240, 242, 243, 244, 245, 247,
248, 249, 251, 252, 253, 255])
cb=np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5,
5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 9, 9, 9, 9, 10, 10, 10,
10, 11, 11, 11, 12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14,
15, 15, 15, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 19, 19,
19, 19, 20, 20, 20, 20, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23,
23, 24, 24, 24, 25, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 28,
28, 28, 28, 29, 29, 29, 29, 30, 30, 30, 30, 31, 31, 31, 32, 32,
32, 32, 33, 33, 33, 33, 34, 34, 34, 35, 35, 35, 35, 36, 36, 36,
36, 37, 37, 37, 38, 38, 38, 38, 39, 39, 39, 39, 40, 40, 40, 41,
41, 41, 41, 42, 42, 42, 42, 43, 43, 43, 44, 44, 44, 44, 45, 45,
45, 45, 46, 46, 46, 47, 47, 47, 47, 48, 48, 48, 48, 50, 53, 55,
57, 60, 62, 65, 67, 69, 72, 74, 77, 80, 83, 86, 89, 92, 95, 97,
100, 103, 106, 109, 111, 115, 117, 120, 123, 126, 129, 131, 135,
137, 140, 143, 146, 149, 151, 154, 157, 160, 163, 166, 169, 172,
174, 177, 180, 183, 186, 188, 192, 194, 197, 200, 203, 206, 208,
212, 214, 217, 220, 223, 226, 228, 231, 234, 237, 240, 243, 246,
249, 251, 255])
cadic=create_cdict(cr,cg,cb)
cardic=create_cdict(cr[::-1],cg[::-1],cb[::-1])
if r:
return LinearSegmentedColormap('mytables',cardic)
else:
return LinearSegmentedColormap('mytables',cadic)
def nac(r= False):
nr=np.array([0, 0, 0, 1, 2, 3, 3, 4, 4, 5, 6, 6, 7, 8, 9,
9, 10, 10, 11, 12, 12, 13, 13, 14, 15, 16,
16, 17, 18, 18, 19, 19, 20, 21, 22, 22, 23,
23, 24, 25, 25, 26, 27, 27, 28, 29, 29, 30,
31, 31, 32, 32, 33, 34, 35, 35, 36, 36, 37,
38, 38, 39, 40, 40, 41, 42, 42, 43, 44, 44,
45, 45, 46, 47, 48, 48, 49, 49, 50, 51, 51,
52, 53, 54, 54, 55, 55, 56, 57, 57, 58, 58,
59, 60, 61, 61, 62, 63, 63, 65, 66, 68, 69,
71, 72, 73, 75, 76, 77, 79, 80, 81, 83, 84,
86, 87, 89, 90, 91, 93, 94, 95, 97, 98, 99,
101, 102, 104, 105, 106, 108, 109, 111, 112,
113, 115, 116, 117, 119, 120, 121, 123, 124,
126, 127, 129, 130, 131, 133, 134, 135, 137,
138, 139, 141, 142, 144, 145, 146, 148, 149,
151, 152, 153, 155, 156, 157, 158, 160, 162,
163, 164, 166, 167, 169, 170, 171, 172, 173,
174, 174, 175, 176, 176, 177, 178, 179, 180,
180, 181, 182, 182, 183, 184, 185, 187, 187,
189, 190, 191, 192, 193, 194, 196, 197, 198,
199, 200, 201, 202, 203, 204, 206, 207, 208,
209, 210, 211, 213, 213, 215, 216, 217, 218,
219, 220, 222, 222, 224, 225, 226, 227, 228,
229, 231, 232, 233, 234, 235, 236, 237, 239,
240, 241, 242, 243, 244, 245, 246, 248, 248,
250, 251, 252, 253, 255])
ng=np.array([0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 4, 5, 5, 6,
6, 7, 7, 8, 8, 9, 9, 9, 10, 10, 11, 11, 12,
12, 13, 13, 13, 14, 14, 15, 15, 16, 16, 17,
17, 18, 18, 18, 19, 19, 20, 20, 21, 21, 22,
22, 22, 23, 23, 24, 24, 25, 25, 26, 26, 27,
27, 27, 28, 28, 29, 29, 30, 30, 31, 31, 31,
32, 32, 33, 33, 34, 34, 35, 35, 36, 36, 36,
37, 37, 38, 38, 39, 39, 40, 40, 40, 41, 41,
42, 42, 43, 43, 44, 45, 45, 46, 46, 47, 48,
48, 49, 49, 50, 51, 51, 52, 53, 53, 54, 54,
55, 56, 56, 57, 57, 59, 60, 62, 63, 65, 66,
68, 69, 71, 72, 73, 75, 76, 78, 79, 81, 82,
84, 85, 86, 88, 89, 91, 92, 94, 95, 97, 98,
99, 101, 102, 104, 105, 107, 108, 110, 111,
113, 114, 116, 117, 119, 120, 122, 123, 124,
126, 127, 129, 130, 132, 133, 135, 136, 137,
139, 140, 142, 143, 145, 146, 148, 149, 150,
152, 154, 155, 157, 158, 160, 161, 163, 164,
165, 167, 168, 170, 171, 173, 174, 176, 177,
178, 180, 181, 183, 184, 186, 187, 189, 190,
191, 193, 194, 196, 197, 199, 200, 202, 203,
205, 206, 208, 209, 211, 212, 214, 215, 216,
218, 219, 221, 222, 224, 225, 227, 228, 229,
231, 232, 234, 235, 237, 238, 240, 241, 242,
244, 246, 247, 249, 250, 252, 253, 255])
nb=np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3,
4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5,
6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 8,
8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 10,
10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11,
11, 11, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13,
13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14,
15, 15, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16,
16, 16, 16, 17, 17, 17, 17, 17, 17, 17, 18, 18,
18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19,
19, 20, 20, 20, 20, 20, 20, 20, 21, 21, 21, 21,
21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 23,
23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 25,
27, 29, 30, 32, 34, 36, 37, 39, 42, 45, 48, 52,
55, 58, 62, 65, 69, 72, 75, 79, 82, 85, 88, 92,
95, 98, 102, 105, 108, 111, 115, 118, 122, 125,
128, 131, 134, 138, 141, 145, 148, 151, 155, 158,
161, 165, 168, 171, 174, 178, 181, 184, 188, 191,
194, 198, 201, 205, 208, 211, 214, 218, 221, 224,
228, 231, 234, 237, 241, 244, 248, 251, 255])
nadic=create_cdict(nr,ng,nb)
nardic=create_cdict(nr[::-1],ng[::-1],nb[::-1])
if r:
return LinearSegmentedColormap('mytables',nardic)
else:
return LinearSegmentedColormap('mytables',nadic)
def fec(r= False):
fr=np.array([0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5,
6, 6, 6, 6, 7, 7, 7, 8, 8, 8, 8, 9, 9, 9, 10, 10, 10, 11, 11, 11,
11, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 15, 15, 15, 15, 16,
16, 16, 17, 17, 17, 17, 18, 18, 18, 19, 19, 19, 19, 20, 20, 20,
21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 24, 24, 24, 24, 25, 25,
25, 26, 26, 26, 26, 27, 27, 27, 28, 28, 30, 31, 32, 34, 35, 36,
37, 39, 40, 41, 42, 43, 45, 46, 47, 49, 50, 51, 53, 53, 55, 56,
57, 59, 60, 61, 63, 64, 65, 67, 67, 69, 70, 71, 73, 74, 75, 76,
78, 79, 80, 81, 83, 84, 85, 86, 88, 89, 90, 92, 92, 94, 95, 96,
98, 99, 100, 102, 103, 104, 106, 106, 108, 109, 110, 112, 113,
114, 115, 117, 118, 119, 120, 122, 123, 124, 125, 127, 128, 129,
130, 130, 132, 133, 133, 135, 136, 136, 138, 138, 139, 141, 141,
142, 144, 146, 148, 149, 151, 153, 155, 157, 158, 160, 162, 164,
166, 167, 169, 171, 172, 174, 176, 178, 180, 181, 183, 185, 187,
189, 190, 192, 194, 196, 198, 199, 201, 203, 204, 206, 208, 210,
212, 213, 215, 217, 219, 221, 222, 224, 226, 228, 230, 232, 233,
235, 236, 238, 240, 242, 244, 245, 247, 249, 251, 253, 255])
fg=np.array([0, 0, 1, 1, 2, 3, 3, 4, 4, 5, 6, 6, 7, 7, 8, 9, 9, 10, 10, 11,
12, 12, 13, 13, 14, 15, 15, 16, 16, 17, 18, 18, 19, 19, 20, 21,
21, 22, 22, 23, 24, 24, 25, 25, 26, 27, 27, 28, 28, 29, 30, 30,
31, 31, 32, 33, 33, 34, 34, 35, 36, 36, 37, 37, 38, 39, 39, 40,
40, 41, 42, 42, 43, 43, 44, 45, 45, 46, 46, 47, 48, 48, 49, 49,
50, 51, 51, 52, 52, 53, 54, 54, 55, 55, 56, 57, 57, 58, 59, 60,
61, 62, 63, 64, 65, 66, 67, 67, 68, 69, 70, 71, 72, 73, 74, 75,
76, 77, 78, 78, 79, 81, 82, 83, 84, 86, 87, 88, 90, 91, 92, 94,
95, 96, 97, 99, 100, 101, 103, 104, 105, 107, 108, 109, 110, 112,
113, 114, 116, 117, 118, 120, 121, 122, 124, 125, 126, 127, 129,
130, 131, 133, 134, 135, 137, 138, 139, 140, 142, 143, 144, 146,
147, 148, 150, 151, 152, 153, 155, 156, 157, 159, 160, 161, 162,
164, 165, 166, 168, 169, 170, 172, 173, 174, 175, 177, 178, 179,
181, 182, 183, 185, 186, 187, 188, 190, 191, 192, 194, 195, 196,
197, 199, 200, 201, 203, 204, 205, 207, 208, 209, 210, 212, 213,
214, 216, 217, 218, 220, 221, 222, 223, 225, 226, 227, 229, 230,
231, 232, 234, 235, 236, 238, 239, 240, 242, 243, 244, 245, 247,
248, 249, 251, 252, 253, 255])
fb=np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5,
5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 9, 9, 9, 9, 10, 10, 10,
10, 11, 11, 11, 12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14,
15, 15, 15, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 19, 19,
19, 19, 20, 20, 20, 20, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23,
23, 24, 24, 24, 25, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 28,
28, 28, 28, 29, 29, 29, 29, 30, 30, 30, 30, 31, 31, 31, 32, 32,
32, 32, 33, 33, 33, 33, 34, 34, 34, 35, 35, 35, 35, 36, 36, 36,
36, 37, 37, 37, 38, 38, 38, 38, 39, 39, 39, 39, 40, 40, 40, 41,
41, 41, 41, 42, 42, 42, 42, 43, 43, 43, 44, 44, 44, 44, 45, 45,
45, 45, 46, 46, 46, 47, 47, 47, 47, 48, 48, 48, 48, 50, 53, 55,
57, 60, 62, 65, 67, 69, 72, 74, 77, 80, 83, 86, 89, 92, 95, 97,
100, 103, 106, 109, 111, 115, 117, 120, 123, 126, 129, 131, 135,
137, 140, 143, 146, 149, 151, 154, 157, 160, 163, 166, 169, 172,
174, 177, 180, 183, 186, 188, 192, 194, 197, 200, 203, 206, 208,
212, 214, 217, 220, 223, 226, 228, 231, 234, 237, 240, 243, 246,
249, 251, 255])
fedic=create_cdict(fb,fg,fr)
ferdic=create_cdict(fb[::-1],fg[::-1],fr[::-1])
if r:
return LinearSegmentedColormap('mytables',ferdic)
else:
return LinearSegmentedColormap('mytables',fedic)
def allwhite():
return ListedColormap(['w','w','w'])
def allblack():
return ListedColormap(['k','k','k'])
setattr(sys.modules[__name__],'ca',cac())
setattr(sys.modules[__name__],'ca_r',cac(r=True))
setattr(sys.modules[__name__],'ha',hac())
setattr(sys.modules[__name__],'ha_r',hac(r=True))
setattr(sys.modules[__name__],'na',nac())
setattr(sys.modules[__name__],'na_r',nac(r=True))
setattr(sys.modules[__name__],'fe',fec())
setattr(sys.modules[__name__],'fe_r',fec(r=True))
setattr(sys.modules[__name__],'allwhite',allwhite())
setattr(sys.modules[__name__],'allblack',allblack())
| SNU-sunday/fisspy | fisspy/cm.py | Python | bsd-2-clause | 19,011 |
"""
System services
===============
This module provides low-level tools for managing system services,
using the ``service`` command. It supports both `upstart`_ services
and traditional SysV-style ``/etc/init.d/`` scripts.
.. _upstart: http://upstart.ubuntu.com/
"""
from __future__ import with_statement
from fabric.api import *
def is_running(service):
"""
Check if a service is running.
::
import fabtools
if fabtools.service.is_running('foo'):
print "Service foo is running!"
"""
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
res = sudo('service %(service)s status' % locals())
return res.succeeded
def start(service):
"""
Start a service.
::
import fabtools
# Start service if it is not running
if not fabtools.service.is_running('foo'):
fabtools.service.start('foo')
"""
sudo('service %(service)s start' % locals())
def stop(service):
"""
Stop a service.
::
import fabtools
# Stop service if it is running
if fabtools.service.is_running('foo'):
fabtools.service.stop('foo')
"""
sudo('service %(service)s stop' % locals())
def restart(service):
"""
Restart a service.
::
import fabtools
# Start service, or restart it if it is already running
if fabtools.service.is_running('foo'):
fabtools.service.restart('foo')
else:
fabtools.service.start('foo')
"""
sudo('service %(service)s restart' % locals())
def reload(service):
"""
Reload a service.
::
import fabtools
# Reload service
fabtools.service.reload('foo')
.. warning::
The service needs to support the ``reload`` operation.
"""
sudo('service %(service)s reload' % locals())
def force_reload(service):
"""
Force reload a service.
::
import fabtools
# Force reload service
fabtools.service.force_reload('foo')
.. warning::
The service needs to support the ``force-reload`` operation.
"""
sudo('service %(service)s force-reload' % locals())
| pahaz/fabtools | fabtools/service.py | Python | bsd-2-clause | 2,235 |
from pylab import *
t1 = arange(0.0, 5.0, 0.1)
t2 = arange(0.0, 5.0, 0.02)
t3 = arange(0.0, 2.0, 0.01)
subplot(211)
plot(t1, cos(2*pi*t1)*exp(-t1), 'bo', t2, cos(2*pi*t2)*exp(-t2), 'k')
grid(True)
title('A tale of 2 subplots')
ylabel('Damped')
subplot(212)
plot(t3, cos(2*pi*t3), 'r--')
grid(True)
xlabel('time (s)')
ylabel('Undamped')
show()
| sniemi/SamPy | sandbox/src1/subplot_demo.py | Python | bsd-2-clause | 349 |
OperandLookupTable = b''.join([
b'\x81\xbd\x81\xbd\x41\x7d\x00\x00\x81\xbd\x81\xbd\x41\x7d\x00\x00'
b'\x81\xbd\x81\xbd\x41\x7d\x00\x00\x81\xbd\x81\xbd\x41\x7d\x00\x00'
b'\x81\xbd\x81\xbd\x41\x7d\x00\x00\x81\xbd\x81\xbd\x41\x7d\x00\x00'
b'\x81\xbd\x81\xbd\x41\x7d\x00\x00\x81\xbd\x81\xbd\x41\x7d\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\xbf\x82\x00\x00\x00\x00\x7d\xfd\x41\xc1\x00\x00\x00\x00'
b'\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41'
b'\xc1\xfd\xc1\xc1\x81\xbd\x81\xbd\x81\xbd\x81\xbd\x82\x88\x82\xbd'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7a\x00\x00\x00\x00\x00'
b'\x41\x7d\x41\x7d\x00\x00\x00\x00\x41\x7d\x00\x00\x00\x00\x00\x00'
b'\x41\x41\x41\x41\x41\x41\x41\x41\x7d\x7d\x7d\x7d\x7d\x7d\x7d\x7d'
b'\xc1\xc1\x42\x00\xba\xba\xc1\xfd\x41\x00\x42\x00\x00\x41\x00\x00'
b'\x81\xbd\x81\xbd\x41\x41\x00\x00\x84\x84\x84\x84\x84\x84\x82\x82'
b'\x41\x41\x41\x41\x41\x41\x41\x41\x7d\x7d\x7a\x41\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\xc1\xfd\x00\x00\x00\x00\x00\x00\x81\xbd'
b'\x82\x84\x82\x82\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbd\x00\xc1'
b'\x84\x84\x88\x88\x88\x88\x88\x88\xbd\xbd\xbd\xbd\xbd\xbd\xbd\xbd'
b'\x84\x84\x84\x84\x84\x00\x84\x00\x84\x84\x88\x84\x84\x84\x84\x84'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\xbd\xbd\xbd\xbd\xbd\xbd\xbd\xbd\xbd\xbd\xbd\xbd\xbd\xbd\xbd\xbd'
b'\x84\x84\x84\x84\x84\x84\x84\x84\x84\x84\x84\x90\x84\x84\x84\x84'
b'\x84\x84\x84\x84\x84\x84\x84\x88\x88\x88\x88\x88\x90\x90\x84\x88'
b'\xc1\xc1\xc1\xc1\x88\x88\x88\x00\x84\x84\x00\x00\x84\x84\x88\x88'
b'\x7d\x7d\x7d\x7d\x7d\x7d\x7d\x7d\x7d\x7d\x7d\x7d\x7d\x7d\x7d\x7d'
b'\x81\x81\x81\x81\x81\x81\x81\x81\x81\x81\x81\x81\x81\x81\x81\x81'
b'\x00\x00\x00\xbd\xc1\xbd\x00\x00\x00\x00\x00\xbd\xc1\xbd\x84\xbd'
b'\x81\xbd\xba\xbd\xba\xba\x81\x82\x00\x00\xc1\xbd\xbd\xbd\x81\x82'
b'\x81\xbd\xc1\xbc\xc1\xc1\xc1\x88\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x84\x88\x88\x88\x88\x88\x88\x88\x88\x88\x88\x84\x88\x88\x88\x88'
b'\x88\x88\x88\x88\x88\x88\x84\x88\x88\x88\x88\x88\x88\x88\x88\x88'
b'\x90\x88\x88\x88\x88\x84\x88\x88\x88\x88\x88\x88\x88\x88\x88\x00'
])
| arizvisa/syringe | lib/ia32/_optable.py | Python | bsd-2-clause | 2,339 |
import numpy as np
# f(x) = a*x*x*x + b*x*x + c*x + d
# f'(x) = 3*a*x*x + 2*b*x + c
#
# d = x0
# c = dx0
# a + b + c + d = x1
# 3*a + 2*b + c = dx1
#
# a + b + dx0 + x0 = x1
# a + b = x1 - x0 - dx0
# a = x1 - x0 - dx0 - b
#
# 3*a + 2*b + dx0 = dx1
# 3*a + 2*b = dx1 - dx0
# 3*(x1 - x0 - dx0 - b) + 2*b = dx1 - dx0
# -3*b + 2*b = dx1 - dx0 - 3*(x1 - x0 - dx0)
# b = -dx1 + dx0 + 3*(x1 - x0 - dx0)
#
# a = x1 - x0 - dx0 - 0.5 * (dx1 - dx0 - 3*(x1 - x0 - dx0))
def cubic_spline_coeffs(p0, v0, p1, v1):
d = p0
c = v0
b = -v1 + v0 + 3*(p1 - p0 - v0)
a = p1 - p0 - v0 - b
return [a,b,c,d]
def cubic_spline_coeffs_list(ps, vs):
return [ cubic_spline_coeffs(ps[i], vs[i], ps[i+1], vs[i+1]) for i in range(len(ps)-1) ]
def cubic_spline(N, ps=None, vs=None, coeffs_list=None):
t = np.linspace(0,1,N)[np.newaxis].T
if coeffs_list is None:
coeffs_list = cubic_spline_coeffs_list(ps, vs)
vs = []
for a,b,c,d in coeffs_list:
v = a*t**3 + b*t**2 + c*t + d
vs.append(v)
return np.concatenate(vs).T
if __name__ == "__main__":
import matplotlib.pyplot as plt
Np=4
Nt=100
p = np.random.random((Np,2))*2-1
v = np.random.random((Np,2))*2-1
xy = cubic_spline(p, v, Nt)
plt.plot(xy[0,:], xy[1,:])
plt.show()
| dyf/primopt | spline.py | Python | bsd-2-clause | 1,314 |
import sys, unittest, struct, math, ctypes
from binascii import hexlify
from ctypes import *
def bin(s):
return hexlify(memoryview(s)).decode().upper()
# Each *simple* type that supports different byte orders has an
# __ctype_be__ attribute that specifies the same type in BIG ENDIAN
# byte order, and a __ctype_le__ attribute that is the same type in
# LITTLE ENDIAN byte order.
#
# For Structures and Unions, these types are created on demand.
class Test(unittest.TestCase):
@unittest.skip('test disabled')
def test_X(self):
print(sys.byteorder, file=sys.stderr)
for i in range(32):
bits = BITS()
setattr(bits, "i%s" % i, 1)
dump(bits)
def test_slots(self):
class BigPoint(BigEndianStructure):
__slots__ = ()
_fields_ = [("x", c_int), ("y", c_int)]
class LowPoint(LittleEndianStructure):
__slots__ = ()
_fields_ = [("x", c_int), ("y", c_int)]
big = BigPoint()
little = LowPoint()
big.x = 4
big.y = 2
little.x = 2
little.y = 4
with self.assertRaises(AttributeError):
big.z = 42
with self.assertRaises(AttributeError):
little.z = 24
def test_endian_short(self):
if sys.byteorder == "little":
self.assertIs(c_short.__ctype_le__, c_short)
self.assertIs(c_short.__ctype_be__.__ctype_le__, c_short)
else:
self.assertIs(c_short.__ctype_be__, c_short)
self.assertIs(c_short.__ctype_le__.__ctype_be__, c_short)
s = c_short.__ctype_be__(0x1234)
self.assertEqual(bin(struct.pack(">h", 0x1234)), "1234")
self.assertEqual(bin(s), "1234")
self.assertEqual(s.value, 0x1234)
s = c_short.__ctype_le__(0x1234)
self.assertEqual(bin(struct.pack("<h", 0x1234)), "3412")
self.assertEqual(bin(s), "3412")
self.assertEqual(s.value, 0x1234)
s = c_ushort.__ctype_be__(0x1234)
self.assertEqual(bin(struct.pack(">h", 0x1234)), "1234")
self.assertEqual(bin(s), "1234")
self.assertEqual(s.value, 0x1234)
s = c_ushort.__ctype_le__(0x1234)
self.assertEqual(bin(struct.pack("<h", 0x1234)), "3412")
self.assertEqual(bin(s), "3412")
self.assertEqual(s.value, 0x1234)
def test_endian_int(self):
if sys.byteorder == "little":
self.assertIs(c_int.__ctype_le__, c_int)
self.assertIs(c_int.__ctype_be__.__ctype_le__, c_int)
else:
self.assertIs(c_int.__ctype_be__, c_int)
self.assertIs(c_int.__ctype_le__.__ctype_be__, c_int)
s = c_int.__ctype_be__(0x12345678)
self.assertEqual(bin(struct.pack(">i", 0x12345678)), "12345678")
self.assertEqual(bin(s), "12345678")
self.assertEqual(s.value, 0x12345678)
s = c_int.__ctype_le__(0x12345678)
self.assertEqual(bin(struct.pack("<i", 0x12345678)), "78563412")
self.assertEqual(bin(s), "78563412")
self.assertEqual(s.value, 0x12345678)
s = c_uint.__ctype_be__(0x12345678)
self.assertEqual(bin(struct.pack(">I", 0x12345678)), "12345678")
self.assertEqual(bin(s), "12345678")
self.assertEqual(s.value, 0x12345678)
s = c_uint.__ctype_le__(0x12345678)
self.assertEqual(bin(struct.pack("<I", 0x12345678)), "78563412")
self.assertEqual(bin(s), "78563412")
self.assertEqual(s.value, 0x12345678)
def test_endian_longlong(self):
if sys.byteorder == "little":
self.assertIs(c_longlong.__ctype_le__, c_longlong)
self.assertIs(c_longlong.__ctype_be__.__ctype_le__, c_longlong)
else:
self.assertIs(c_longlong.__ctype_be__, c_longlong)
self.assertIs(c_longlong.__ctype_le__.__ctype_be__, c_longlong)
s = c_longlong.__ctype_be__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack(">q", 0x1234567890ABCDEF)), "1234567890ABCDEF")
self.assertEqual(bin(s), "1234567890ABCDEF")
self.assertEqual(s.value, 0x1234567890ABCDEF)
s = c_longlong.__ctype_le__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack("<q", 0x1234567890ABCDEF)), "EFCDAB9078563412")
self.assertEqual(bin(s), "EFCDAB9078563412")
self.assertEqual(s.value, 0x1234567890ABCDEF)
s = c_ulonglong.__ctype_be__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack(">Q", 0x1234567890ABCDEF)), "1234567890ABCDEF")
self.assertEqual(bin(s), "1234567890ABCDEF")
self.assertEqual(s.value, 0x1234567890ABCDEF)
s = c_ulonglong.__ctype_le__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack("<Q", 0x1234567890ABCDEF)), "EFCDAB9078563412")
self.assertEqual(bin(s), "EFCDAB9078563412")
self.assertEqual(s.value, 0x1234567890ABCDEF)
def test_endian_float(self):
if sys.byteorder == "little":
self.assertIs(c_float.__ctype_le__, c_float)
self.assertIs(c_float.__ctype_be__.__ctype_le__, c_float)
else:
self.assertIs(c_float.__ctype_be__, c_float)
self.assertIs(c_float.__ctype_le__.__ctype_be__, c_float)
s = c_float(math.pi)
self.assertEqual(bin(struct.pack("f", math.pi)), bin(s))
# Hm, what's the precision of a float compared to a double?
self.assertAlmostEqual(s.value, math.pi, places=6)
s = c_float.__ctype_le__(math.pi)
self.assertAlmostEqual(s.value, math.pi, places=6)
self.assertEqual(bin(struct.pack("<f", math.pi)), bin(s))
s = c_float.__ctype_be__(math.pi)
self.assertAlmostEqual(s.value, math.pi, places=6)
self.assertEqual(bin(struct.pack(">f", math.pi)), bin(s))
def test_endian_double(self):
if sys.byteorder == "little":
self.assertIs(c_double.__ctype_le__, c_double)
self.assertIs(c_double.__ctype_be__.__ctype_le__, c_double)
else:
self.assertIs(c_double.__ctype_be__, c_double)
self.assertIs(c_double.__ctype_le__.__ctype_be__, c_double)
s = c_double(math.pi)
self.assertEqual(s.value, math.pi)
self.assertEqual(bin(struct.pack("d", math.pi)), bin(s))
s = c_double.__ctype_le__(math.pi)
self.assertEqual(s.value, math.pi)
self.assertEqual(bin(struct.pack("<d", math.pi)), bin(s))
s = c_double.__ctype_be__(math.pi)
self.assertEqual(s.value, math.pi)
self.assertEqual(bin(struct.pack(">d", math.pi)), bin(s))
def test_endian_other(self):
self.assertIs(c_byte.__ctype_le__, c_byte)
self.assertIs(c_byte.__ctype_be__, c_byte)
self.assertIs(c_ubyte.__ctype_le__, c_ubyte)
self.assertIs(c_ubyte.__ctype_be__, c_ubyte)
self.assertIs(c_char.__ctype_le__, c_char)
self.assertIs(c_char.__ctype_be__, c_char)
def test_struct_fields_1(self):
if sys.byteorder == "little":
base = BigEndianStructure
else:
base = LittleEndianStructure
class T(base):
pass
_fields_ = [("a", c_ubyte),
("b", c_byte),
("c", c_short),
("d", c_ushort),
("e", c_int),
("f", c_uint),
("g", c_long),
("h", c_ulong),
("i", c_longlong),
("k", c_ulonglong),
("l", c_float),
("m", c_double),
("n", c_char),
("b1", c_byte, 3),
("b2", c_byte, 3),
("b3", c_byte, 2),
("a", c_int * 3 * 3 * 3)]
T._fields_ = _fields_
# these fields do not support different byte order:
for typ in c_wchar, c_void_p, POINTER(c_int):
_fields_.append(("x", typ))
class T(base):
pass
self.assertRaises(TypeError, setattr, T, "_fields_", [("x", typ)])
def test_struct_struct(self):
# nested structures with different byteorders
# create nested structures with given byteorders and set memory to data
for nested, data in (
(BigEndianStructure, b'\0\0\0\1\0\0\0\2'),
(LittleEndianStructure, b'\1\0\0\0\2\0\0\0'),
):
for parent in (
BigEndianStructure,
LittleEndianStructure,
Structure,
):
class NestedStructure(nested):
_fields_ = [("x", c_uint32),
("y", c_uint32)]
class TestStructure(parent):
_fields_ = [("point", NestedStructure)]
self.assertEqual(len(data), sizeof(TestStructure))
ptr = POINTER(TestStructure)
s = cast(data, ptr)[0]
del ctypes._pointer_type_cache[TestStructure]
self.assertEqual(s.point.x, 1)
self.assertEqual(s.point.y, 2)
def test_struct_fields_2(self):
# standard packing in struct uses no alignment.
# So, we have to align using pad bytes.
#
# Unaligned accesses will crash Python (on those platforms that
# don't allow it, like sparc solaris).
if sys.byteorder == "little":
base = BigEndianStructure
fmt = ">bxhid"
else:
base = LittleEndianStructure
fmt = "<bxhid"
class S(base):
_fields_ = [("b", c_byte),
("h", c_short),
("i", c_int),
("d", c_double)]
s1 = S(0x12, 0x1234, 0x12345678, 3.14)
s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
self.assertEqual(bin(s1), bin(s2))
def test_unaligned_nonnative_struct_fields(self):
if sys.byteorder == "little":
base = BigEndianStructure
fmt = ">b h xi xd"
else:
base = LittleEndianStructure
fmt = "<b h xi xd"
class S(base):
_pack_ = 1
_fields_ = [("b", c_byte),
("h", c_short),
("_1", c_byte),
("i", c_int),
("_2", c_byte),
("d", c_double)]
s1 = S()
s1.b = 0x12
s1.h = 0x1234
s1.i = 0x12345678
s1.d = 3.14
s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
self.assertEqual(bin(s1), bin(s2))
def test_unaligned_native_struct_fields(self):
if sys.byteorder == "little":
fmt = "<b h xi xd"
else:
base = LittleEndianStructure
fmt = ">b h xi xd"
class S(Structure):
_pack_ = 1
_fields_ = [("b", c_byte),
("h", c_short),
("_1", c_byte),
("i", c_int),
("_2", c_byte),
("d", c_double)]
s1 = S()
s1.b = 0x12
s1.h = 0x1234
s1.i = 0x12345678
s1.d = 3.14
s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
self.assertEqual(bin(s1), bin(s2))
if __name__ == "__main__":
unittest.main()
| Suwmlee/XX-Net | Python3/lib/ctypes/test/test_byteswap.py | Python | bsd-2-clause | 11,726 |
# -*- coding: UTF-8 -*-
# Copyright 2008-2018 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
"""Database models for this plugin.
"""
from __future__ import unicode_literals, print_function
import six
from builtins import str
import logging
logger = logging.getLogger(__name__)
import datetime
from dateutil.relativedelta import relativedelta
from django.db import models
from atelier.utils import last_day_of_month
from lino.api import dd, rt, _
from lino import mixins
from lino.utils import mti
from etgen.html import E
from lino.mixins.periods import DateRange
from lino.modlib.users.mixins import UserAuthored
from lino.modlib.printing.mixins import PrintableType
from lino.modlib.checkdata.choicelists import Checker
from lino_xl.lib.accounts.utils import DEBIT, CREDIT, ZERO
from lino_xl.lib.accounts.fields import DebitOrCreditField
from lino_xl.lib.contacts.choicelists import PartnerEvents
from lino.modlib.system.choicelists import ObservedEvent
from .utils import get_due_movements, check_clearings_by_partner
from .choicelists import (FiscalYears, VoucherTypes, VoucherStates,
PeriodStates, JournalGroups, TradeTypes)
from .mixins import ProjectRelated, VoucherNumber, JournalRef, PeriodRangeObservable
from .roles import VoucherSupervisor
# from .mixins import FKMATCH
from .ui import *
class LedgerInfo(dd.Model):
class Meta:
app_label = 'ledger'
allow_cascaded_delete = 'user'
user = dd.OneToOneField('users.User', primary_key=True)
entry_date = models.DateField(
_("Last entry date"), null=True, blank=True)
@classmethod
def get_for_user(cls, user):
try:
return cls.objects.get(user=user)
except cls.DoesNotExist:
return cls(user=user)
@dd.python_2_unicode_compatible
class Journal(mixins.BabelNamed,
mixins.Sequenced,
mixins.Referrable,
PrintableType):
class Meta:
app_label = 'ledger'
verbose_name = _("Journal")
verbose_name_plural = _("Journals")
trade_type = TradeTypes.field(blank=True)
voucher_type = VoucherTypes.field()
journal_group = JournalGroups.field()
auto_check_clearings = models.BooleanField(
_("Check clearing"), default=True)
auto_fill_suggestions = models.BooleanField(
_("Fill suggestions"), default=True)
force_sequence = models.BooleanField(
_("Force chronological sequence"), default=False)
account = dd.ForeignKey('accounts.Account', blank=True, null=True)
partner = dd.ForeignKey('contacts.Company', blank=True, null=True)
printed_name = dd.BabelCharField(
_("Printed document designation"), max_length=100, blank=True)
dc = DebitOrCreditField(_("Primary booking direction"))
yearly_numbering = models.BooleanField(
_("Yearly numbering"), default=True)
must_declare = models.BooleanField(default=True)
# invert_due_dc = models.BooleanField(
# _("Invert booking direction"),
# help_text=_("Whether to invert booking direction of due movement."),
# default=True)
def get_doc_model(self):
"""The model of vouchers in this Journal.
"""
# print self,DOCTYPE_CLASSES, self.doctype
return self.voucher_type.model
#~ return DOCTYPES[self.doctype][0]
def get_doc_report(self):
return self.voucher_type.table_class
#~ return DOCTYPES[self.doctype][1]
def get_voucher(self, year=None, number=None, **kw):
cl = self.get_doc_model()
kw.update(journal=self, accounting_period__year=year, number=number)
return cl.objects.get(**kw)
def create_voucher(self, **kw):
"""Create an instance of this Journal's voucher model
(:meth:`get_doc_model`).
"""
cl = self.get_doc_model()
kw.update(journal=self)
try:
doc = cl()
# ~ doc = cl(**kw) # wouldn't work. See Django ticket #10808
#~ doc.journal = self
for k, v in kw.items():
setattr(doc, k, v)
#~ print 20120825, kw
except TypeError:
#~ print 20100804, cl
raise
doc.on_create(None)
#~ doc.full_clean()
#~ doc.save()
return doc
def get_allowed_accounts(self, **kw):
if self.trade_type:
return self.trade_type.get_allowed_accounts(**kw)
# kw.update(chart=self.chart)
return rt.models.accounts.Account.objects.filter(**kw)
def get_next_number(self, voucher):
# ~ self.save() # 20131005 why was this?
cl = self.get_doc_model()
flt = dict()
if self.yearly_numbering:
flt.update(accounting_period__year=voucher.accounting_period.year)
d = cl.objects.filter(journal=self, **flt).aggregate(
models.Max('number'))
number = d['number__max']
#~ logger.info("20121206 get_next_number %r",number)
if number is None:
return 1
return number + 1
def __str__(self):
# s = super(Journal, self).__str__()
s = dd.babelattr(self, 'name')
if self.ref:
s += " (%s)" % self.ref
#~ return '%s (%s)' % (d.BabelNamed.__unicode__(self),self.ref or self.id)
return s
#~ return self.ref +'%s (%s)' % mixins.BabelNamed.__unicode__(self)
#~ return self.id +' (%s)' % mixins.BabelNamed.__unicode__(self)
def save(self, *args, **kw):
#~ self.before_save()
r = super(Journal, self).save(*args, **kw)
self.after_save()
return r
def after_save(self):
pass
def full_clean(self, *args, **kw):
if self.dc is None:
if self.trade_type:
self.dc = self.trade_type.dc
elif self.account:
self.dc = self.account.type.dc
else:
self.dc = DEBIT # cannot be NULL
if not self.name:
self.name = self.id
#~ if not self.pos:
#~ self.pos = self.__class__.objects.all().count() + 1
super(Journal, self).full_clean(*args, **kw)
def disable_voucher_delete(self, doc):
# print "pre_delete_voucher", doc.number, self.get_next_number()
if self.force_sequence:
if doc.number + 1 != self.get_next_number(doc):
return _("%s is not the last voucher in journal"
% str(doc))
def get_template_groups(self):
"""Here we override the class method by an instance method. This
means that we must also override all other methods of
Printable who call the *class* method. This is currently only
:meth:`template_choices`.
"""
return [self.voucher_type.model.get_template_group()]
@dd.chooser(simple_values=True)
def template_choices(cls, build_method, voucher_type):
# Overrides PrintableType.template_choices to not use the class
# method `get_template_groups`.
if not voucher_type:
return []
#~ print 20131006, voucher_type
template_groups = [voucher_type.model.get_template_group()]
return cls.get_template_choices(build_method, template_groups)
#
#
#
@dd.python_2_unicode_compatible
class AccountingPeriod(DateRange, mixins.Referrable):
class Meta:
app_label = 'ledger'
verbose_name = _("Accounting period")
verbose_name_plural = _("Accounting periods")
ordering = ['ref']
preferred_foreignkey_width = 10
state = PeriodStates.field(default=PeriodStates.as_callable('open'))
year = FiscalYears.field(blank=True)
remark = models.CharField(_("Remark"), max_length=250, blank=True)
@classmethod
def get_available_periods(cls, entry_date):
"""Return a queryset of peruiods available for booking."""
if entry_date is None: # added 20160531
entry_date = dd.today()
fkw = dict(start_date__lte=entry_date, end_date__gte=entry_date)
return rt.models.ledger.AccountingPeriod.objects.filter(**fkw)
@classmethod
def get_ref_for_date(cls, d):
"""Return a text to be used as :attr:`ref` for a new period.
Alternative implementation for usage on a site with movements
before year 2000::
@classmethod
def get_ref_for_date(cls, d):
if d.year < 2000:
y = str(d.year - 1900)
elif d.year < 2010:
y = "A" + str(d.year - 2000)
elif d.year < 2020:
y = "B" + str(d.year - 2010)
elif d.year < 2030:
y = "C" + str(d.year - 2020)
return y + "{:0>2}".format(d.month)
"""
if dd.plugins.ledger.fix_y2k:
return FiscalYears.from_int(d.year).value \
+ "{:0>2}".format(d.month)
return "{0.year}-{0.month:0>2}".format(d)
"""The template used for building the :attr:`ref` of an
:class:`AccountingPeriod`.
`Format String Syntax
<https://docs.python.org/2/library/string.html#formatstrings>`_
"""
@classmethod
def get_periods_in_range(cls, p1, p2):
return cls.objects.filter(ref__gte=p1.ref, ref__lte=p2.ref)
@classmethod
def get_period_filter(cls, fieldname, p1, p2, **kwargs):
if p1 is None:
return kwargs
if p2 is None:
kwargs[fieldname] = p1
else:
periods = cls.get_periods_in_range(p1, p2)
kwargs[fieldname+'__in'] = periods
return kwargs
@classmethod
def get_default_for_date(cls, d):
ref = cls.get_ref_for_date(d)
obj = rt.models.ledger.AccountingPeriod.get_by_ref(ref, None)
if obj is None:
values = dict(start_date=d.replace(day=1))
values.update(end_date=last_day_of_month(d))
values.update(ref=ref)
obj = AccountingPeriod(**values)
obj.full_clean()
obj.save()
return obj
def full_clean(self, *args, **kwargs):
if self.start_date is None:
self.start_date = dd.today().replace(day=1)
if not self.year:
self.year = FiscalYears.from_date(self.start_date)
super(AccountingPeriod, self).full_clean(*args, **kwargs)
def __str__(self):
if not self.ref:
return dd.obj2str(self)
# "{0} {1} (#{0})".format(self.pk, self.year)
return self.ref
AccountingPeriod.set_widget_options('ref', width=6)
class PaymentTerm(mixins.BabelNamed, mixins.Referrable):
class Meta:
app_label = 'ledger'
verbose_name = _("Payment term")
verbose_name_plural = _("Payment terms")
days = models.IntegerField(_("Days"), default=0)
months = models.IntegerField(_("Months"), default=0)
end_of_month = models.BooleanField(_("End of month"), default=False)
printed_text = dd.BabelTextField(
_("Printed text"), blank=True, format='plain')
def get_due_date(self, date1):
assert isinstance(date1, datetime.date), \
"%s is not a date" % date1
d = date1 + relativedelta(months=self.months, days=self.days)
if self.end_of_month:
d = last_day_of_month(d)
return d
@dd.python_2_unicode_compatible
class Voucher(UserAuthored, mixins.Registrable, PeriodRangeObservable):
manager_roles_required = dd.login_required(VoucherSupervisor)
class Meta:
app_label = 'ledger'
verbose_name = _("Voucher")
verbose_name_plural = _("Vouchers")
journal = JournalRef()
entry_date = models.DateField(_("Entry date"))
voucher_date = models.DateField(_("Voucher date"))
accounting_period = dd.ForeignKey(
'ledger.AccountingPeriod', blank=True)
number = VoucherNumber(_("No."), blank=True, null=True)
narration = models.CharField(_("Narration"), max_length=200, blank=True)
state = VoucherStates.field(
default=VoucherStates.as_callable('draft'))
workflow_state_field = 'state'
#~ @classmethod
#~ def create_journal(cls,id,**kw):
#~ doctype = get_doctype(cls)
#~ jnl = Journal(doctype=doctype,id=id,**kw)
#~ return jnl
@property
def currency(self):
"""This is currently used only in some print templates.
"""
return dd.plugins.ledger.currency_symbol
# @classmethod
# def setup_parameters(cls, **fields):
# fields.setdefault(
# 'accounting_period', dd.ForeignKey(
# 'ledger.AccountingPeriod', blank=True, null=True))
# return super(Voucher, cls).setup_parameters(**fields)
# @classmethod
# def get_simple_parameters(cls):
# s = super(Voucher, cls).get_simple_parameters()
# s.add('accounting_period')
# return s
@dd.displayfield(_("No."))
def number_with_year(self, ar):
return "{0}/{1}".format(self.number, self.accounting_period.year)
@classmethod
def quick_search_filter(model, search_text, prefix=''):
"""Overrides :meth:`lino.core.model.Model.quick_search_filter`.
Examples:
123 -> voucher number 123 in current year
123/2014 -> voucher number 123 in 2014
"""
# logger.info(
# "20160612 Voucher.quick_search_filter(%s, %r, %r)",
# model, search_text, prefix)
parts = search_text.split('/')
if len(parts) == 2:
kw = {
prefix + 'number': parts[0],
prefix + 'accounting_period__year': parts[1]}
return models.Q(**kw)
if search_text.isdigit() and not search_text.startswith('0'):
kw = {
prefix + 'number': int(search_text),
prefix + 'accounting_period__year':
FiscalYears.from_date(dd.today())}
return models.Q(**kw)
return super(Voucher, model).quick_search_filter(search_text, prefix)
def full_clean(self, *args, **kwargs):
if self.entry_date is None:
self.entry_date = dd.today()
if self.voucher_date is None:
self.voucher_date = self.entry_date
if not self.accounting_period_id:
self.accounting_period = AccountingPeriod.get_default_for_date(
self.entry_date)
if self.number is None:
self.number = self.journal.get_next_number(self)
super(Voucher, self).full_clean(*args, **kwargs)
def on_create(self, ar):
super(Voucher, self).on_create(ar)
if self.entry_date is None:
if ar is None:
self.entry_date = dd.today()
else:
info = LedgerInfo.get_for_user(ar.get_user())
self.entry_date = info.entry_date or dd.today()
def entry_date_changed(self, ar):
self.accounting_period = AccountingPeriod.get_default_for_date(
self.entry_date)
self.voucher_date = self.entry_date
self.accounting_period_changed(ar)
info = LedgerInfo.get_for_user(ar.get_user())
info.entry_date = self.entry_date
info.full_clean()
info.save()
def accounting_period_changed(self, ar):
"""If user changes the :attr:`accounting_period`, then the `number`
might need to change.
"""
self.number = self.journal.get_next_number(self)
def get_due_date(self):
return self.entry_date
def get_trade_type(self):
return self.journal.trade_type
def get_printed_name(self):
return dd.babelattr(self.journal, 'printed_name')
def get_partner(self):
"""
Return the partner related to this voucher. Overridden by
PartnerRelated vouchers.
"""
return None
def after_ui_save(self, ar, cw):
super(Voucher, self).after_ui_save(ar, cw)
p = self.get_partner()
if p is None:
return
tt = self.get_trade_type()
account = tt.get_partner_invoice_account(p)
if account is None:
return
if self.items.exists():
return
i = self.add_voucher_item(account=account)
i.full_clean()
i.save()
@classmethod
def get_journals(cls):
vt = VoucherTypes.get_for_model(cls)
return Journal.objects.filter(voucher_type=vt).order_by('seqno')
@dd.chooser()
def unused_accounting_period_choices(cls, entry_date):
# deactivated because it also limits the choices of the
# parameter field (which is a Lino bug)
return rt.models.ledger.AccountingPeriod.get_available_periods(
entry_date)
@dd.chooser()
def journal_choices(cls):
# logger.info("20140603 journal_choices %r", cls)
return cls.get_journals()
@classmethod
def create_journal(cls, trade_type=None, account=None, **kw):
vt = VoucherTypes.get_for_model(cls)
if isinstance(trade_type, six.string_types):
trade_type = TradeTypes.get_by_name(trade_type)
if isinstance(account, six.string_types):
account = rt.models.accounts.Account.get_by_ref(account)
if account is not None:
kw.update(account=account)
return Journal(trade_type=trade_type, voucher_type=vt, **kw)
def __str__(self):
if self.number is None:
return "{0}#{1}".format(self.journal.ref, self.id)
if self.journal.yearly_numbering:
return "{0} {1}/{2}".format(self.journal.ref, self.number,
self.accounting_period.year)
return "{0} {1}".format(self.journal.ref, self.number)
# if self.journal.ref:
# return "%s %s" % (self.journal.ref,self.number)
# return "#%s (%s %s)" % (self.number,self.journal,self.year)
def get_default_match(self):
return str(self)
# return "%s#%s" % (self.journal.ref, self.id)
# return "%s%s" % (self.id, self.journal.ref)
# def get_voucher_match(self):
# return str(self) # "{0}{1}".format(self.journal.ref, self.number)
def set_workflow_state(self, ar, state_field, newstate):
""""""
if newstate.name == 'registered':
self.register_voucher(ar)
elif newstate.name == 'draft':
self.deregister_voucher(ar)
super(Voucher, self).set_workflow_state(ar, state_field, newstate)
# doit(ar)
# if newstate.name == 'registered':
# ar.confirm(
# doit,
# _("Are you sure you want to register "
# "voucher {0}?").format(self))
# else:
# doit(ar)
# def before_state_change(self, ar, old, new):
# if new.name == 'registered':
# self.register_voucher(ar)
# elif new.name == 'draft':
# self.deregister_voucher(ar)
# super(Voucher, self).before_state_change(ar, old, new)
def register_voucher(self, ar, do_clear=True):
"""
Delete any existing movements and re-create them
"""
# dd.logger.info("20151211 cosi.Voucher.register_voucher()")
# self.year = FiscalYears.from_date(self.entry_date)
# dd.logger.info("20151211 movement_set.all().delete()")
def doit(partners):
seqno = 0
# dd.logger.info("20151211 gonna call get_wanted_movements()")
movements = self.get_wanted_movements()
# dd.logger.info("20151211 gonna save %d movements", len(movements))
# self.full_clean()
# self.save()
fcu = dd.plugins.ledger.force_cleared_until
for m in movements:
seqno += 1
m.seqno = seqno
if fcu and self.entry_date <= fcu:
m.cleared = True
m.full_clean()
m.save()
if m.partner:
partners.add(m.partner)
self.do_and_clear(doit, do_clear)
def deregister_voucher(self, ar, do_clear=True):
def doit(partners):
pass
self.do_and_clear(doit, do_clear)
def do_and_clear(self, func, do_clear):
"""Delete all movements of this voucher, then run the given callable
`func`, passing it a set with all partners who had at least
one movement in this voucher. The function is expected to add
more partners to this set. Then call `check_clearings` for
all these partners.
"""
existing_mvts = self.movement_set.all()
partners = set()
# accounts = set()
if not self.journal.auto_check_clearings:
do_clear = False
if do_clear:
for m in existing_mvts.filter(
account__clearable=True, partner__isnull=False):
partners.add(m.partner)
existing_mvts.delete()
func(partners)
if do_clear:
for p in partners:
check_clearings_by_partner(p)
# for a in accounts:
# check_clearings_by_account(a)
# dd.logger.info("20151211 Done cosi.Voucher.register_voucher()")
def disable_delete(self, ar=None):
msg = self.journal.disable_voucher_delete(self)
if msg is not None:
return msg
return super(Voucher, self).disable_delete(ar)
def get_wanted_movements(self):
raise NotImplementedError()
def create_movement(self, item, acc_tuple, project, dc, amount, **kw):
"""Create a movement for this voucher.
The specified `item` may be `None` if this the movement is
caused by more than one item. It is used by
:class:`DatedFinancialVoucher
<lino_xl.lib.finan.mixins.DatedFinancialVoucher>`.
"""
# dd.logger.info("20151211 ledger.create_movement()")
account, ana_account = acc_tuple
if account is None and item is not None:
raise Warning("No account specified for {}".format(item))
if not isinstance(account, rt.models.accounts.Account):
raise Warning("{} is not an Account object".format(account))
kw['voucher'] = self
kw['account'] = account
if ana_account is not None:
kw['ana_account'] = ana_account
kw['value_date'] = self.entry_date
if account.clearable:
kw.update(cleared=False)
else:
kw.update(cleared=True)
if dd.plugins.ledger.project_model:
kw['project'] = project
if amount < 0:
amount = - amount
dc = not dc
kw['amount'] = amount
kw['dc'] = dc
b = rt.models.ledger.Movement(**kw)
return b
#~ def get_row_permission(self,ar,state,ba):
#~ """
#~ Only invoices in an editable state may be edited.
#~ """
#~ if not ba.action.readonly and self.state is not None and not self.state.editable:
#~ return False
#~ return super(Voucher,self).get_row_permission(ar,state,ba)
def get_mti_leaf(self):
return mti.get_child(self, self.journal.voucher_type.model)
# def obj2html(self, ar):
def obj2href(self, ar):
return ar.obj2html(self.get_mti_leaf())
#~ def add_voucher_item(self,account=None,**kw):
#~ if account is not None:
#~ if not isinstance(account,accounts.Account):
#~ if isinstance(account,six.string_types):
#~ account = self.journal.chart.get_account_by_ref(account)
#~ kw['account'] = account
def add_voucher_item(self, account=None, **kw):
if account is not None:
if isinstance(account, six.string_types):
account = rt.models.accounts.Account.get_by_ref(account)
kw['account'] = account
kw.update(voucher=self)
#~ logger.info("20131116 %s",self.items.model)
return self.items.model(**kw)
#~ return super(AccountInvoice,self).add_voucher_item(**kw)
def get_bank_account(self):
"""Return the `sepa.Account` object to which this voucher is to be
paid. This is needed by
:class:`lino_xl.lib.ledger.utils.DueMovement`.
"""
return None
# raise NotImplementedError()
Voucher.set_widget_options('number_with_year', width=8)
@dd.python_2_unicode_compatible
class Movement(ProjectRelated, PeriodRangeObservable):
allow_cascaded_delete = ['voucher']
class Meta:
app_label = 'ledger'
verbose_name = _("Movement")
verbose_name_plural = _("Movements")
observable_period_field = 'voucher__accounting_period'
voucher = dd.ForeignKey('ledger.Voucher')
partner = dd.ForeignKey(
'contacts.Partner',
related_name="%(app_label)s_%(class)s_set_by_partner",
blank=True, null=True)
seqno = models.IntegerField(_("Seq.No."))
account = dd.ForeignKey('accounts.Account')
amount = dd.PriceField(default=0)
dc = DebitOrCreditField()
match = models.CharField(_("Match"), blank=True, max_length=20)
# match = MatchField(blank=True, null=True)
cleared = models.BooleanField(_("Cleared"), default=False)
# 20160327: rename "satisfied" to "cleared"
value_date = models.DateField(_("Value date"), null=True, blank=True)
@dd.chooser(simple_values=True)
def match_choices(cls, partner, account):
qs = cls.objects.filter(
partner=partner, account=account, cleared=False)
qs = qs.order_by('value_date')
return qs.values_list('match', flat=True)
def select_text(self):
v = self.voucher.get_mti_leaf()
if v is None:
return str(self.voucher)
return "%s (%s)" % (v, v.entry_date)
@dd.virtualfield(dd.PriceField(_("Debit")))
def debit(self, ar):
if self.dc:
return None
return self.amount
@dd.virtualfield(dd.PriceField(_("Credit")))
def credit(self, ar):
if self.dc:
return self.amount
return None
@dd.displayfield(_("Voucher"))
def voucher_link(self, ar):
if ar is None:
return ''
return ar.obj2html(self.voucher.get_mti_leaf())
@dd.displayfield(_("Voucher partner"))
def voucher_partner(self, ar):
if ar is None:
return ''
voucher = self.voucher.get_mti_leaf()
if voucher is None:
return ''
p = voucher.get_partner()
if p is None:
return ''
return ar.obj2html(p)
@dd.displayfield(_("Match"))
def match_link(self, ar):
if ar is None or not self.match:
return ''
sar = rt.models.ledger.MovementsByMatch.request(
master_instance=self.match, parent=ar)
return sar.ar2button(label=self.match)
#~ @dd.displayfield(_("Matched by"))
#~ def matched_by(self,ar):
#~ elems = [obj.voucher_link(ar) for obj in Movement.objects.filter(match=self)]
#~ return E.div(*elems)
def get_siblings(self):
return self.voucher.movement_set.all()
#~ return self.__class__.objects.filter().order_by('seqno')
def __str__(self):
return "%s.%d" % (str(self.voucher), self.seqno)
# def get_match(self):
# return self.match or str(self.voucher)
@classmethod
def get_balance(cls, dc, qs):
bal = ZERO
for mvt in qs:
if mvt.dc == dc:
bal += mvt.amount
else:
bal -= mvt.amount
return bal
@classmethod
def balance_info(cls, dc, **kwargs):
qs = cls.objects.filter(**kwargs)
qs = qs.order_by('value_date')
bal = ZERO
s = ''
for mvt in qs:
amount = mvt.amount
if mvt.dc == dc:
bal -= amount
s += ' -' + str(amount)
else:
bal += amount
s += ' +' + str(amount)
s += " ({0}) ".format(mvt.voucher)
# s += " ({0} {1}) ".format(
# mvt.voucher,
# dd.fds(mvt.voucher.voucher_date))
if bal:
return s + "= " + str(bal)
return ''
if False:
from lino_xl.lib.cal.utils import day_and_month
mvts = []
for dm in get_due_movements(CREDIT, partner=self.pupil):
s = dm.match
s += " [{0}]".format(day_and_month(dm.due_date))
s += " ("
s += ', '.join([str(i.voucher) for i in dm.debts])
if len(dm.payments):
s += " - "
s += ', '.join([str(i.voucher) for i in dm.payments])
s += "): {0}".format(dm.balance)
mvts.append(s)
return '\n'.join(mvts)
Movement.set_widget_options('voucher_link', width=12)
class MatchRule(dd.Model):
class Meta:
app_label = 'ledger'
verbose_name = _("Match rule")
verbose_name_plural = _("Match rules")
unique_together = ['account', 'journal']
account = dd.ForeignKey('accounts.Account')
journal = JournalRef()
@dd.chooser()
def unused_account_choices(self, journal):
# would be nice, but doesn't work because matchrules are
# usually entered via MatchRulesByJournal where journal is
# always None.
if journal:
fkw = {journal.trade_type.name + '_allowed': True}
return rt.models.accounts.Account.objects.filter(**fkw)
print("20151221 journal is None")
return []
for tt in TradeTypes.objects():
dd.inject_field(
'accounts.Account',
tt.name + '_allowed',
models.BooleanField(verbose_name=tt.text, default=False))
dd.inject_field(
'contacts.Partner',
'payment_term',
dd.ForeignKey(
'ledger.PaymentTerm',
blank=True, null=True,
help_text=_("The default payment term for "
"sales invoices to this customer.")))
class VoucherChecker(Checker):
"Check for wrong ledger movements"
verbose_name = _("Check integrity of ledger movements")
messages = dict(
missing=_("Missing movement {0}."),
unexpected=_("Unexpected movement {0}."),
diff=_("Movement {0} : {1} {2} != {3}."),
)
def get_checkable_models(self):
for m in rt.models_by_base(Voucher):
if m is not Voucher:
yield m
def get_checkdata_problems(self, obj, fix=False):
if obj.__class__ is rt.models.ledger.Voucher:
return
def m2k(obj):
return obj.seqno
wanted = dict()
seqno = 0
fcu = dd.plugins.ledger.force_cleared_until
for m in obj.get_wanted_movements():
seqno += 1
m.seqno = seqno
if fcu and obj.entry_date <= fcu:
m.cleared = True
m.full_clean()
wanted[m2k(m)] = m
for em in obj.movement_set.all():
wm = wanted.pop(m2k(em), None)
if wm is None:
yield (False, self.messages['unexpected'].format(em))
return
for k in ('partner_id', 'account_id', 'dc', 'amount',
'value_date'):
emv = getattr(em, k)
wmv = getattr(wm, k)
if emv != wmv:
yield (False, self.messages['diff'].format(
em, k, emv, wmv))
return
if wanted:
for missing in wanted.values():
yield (False, self.messages['missing'].format(missing))
return
VoucherChecker.activate()
class PartnerHasOpenMovements(ObservedEvent):
text = _("Has open movements")
def add_filter(self, qs, pv):
qs = qs.filter(movement__cleared=False)
if pv.end_date:
qs = qs.filter(movement__value_date__lte=pv.end_date)
if pv.start_date:
qs = qs.filter(movement__value_date__gte=pv.start_date)
return qs.distinct()
PartnerEvents.add_item_instance(
PartnerHasOpenMovements("has_open_movements"))
| khchine5/xl | lino_xl/lib/ledger/models.py | Python | bsd-2-clause | 32,456 |
from __future__ import print_function, division
import sys,os
qspin_path = os.path.join(os.getcwd(),"../")
sys.path.insert(0,qspin_path)
#
from quspin.operators import hamiltonian # Hamiltonians and operators
from quspin.basis import spin_basis_1d # Hilbert space spin basis
from quspin.tools.measurements import mean_level_spacing
import numpy as np # generic math functions
#
L=12 # syste size
# coupling strenghts
J=1.0 # spin-spin coupling
h=0.8945 # x-field strength
g=0.945 # z-field strength
# create site-coupling lists
J_zz=[[J,i,(i+1)%L] for i in range(L)] # PBC
x_field=[[h,i] for i in range(L)]
z_field=[[g,i] for i in range(L)]
# create static and dynamic lists
static_2=[["zz",J_zz],["x",x_field],["z",z_field]]
dynamic=[]
# create spin-1/2 basis
basis=spin_basis_1d(L,kblock=0,pblock=1)
# set up Hamiltonian
H2=hamiltonian(static_2,dynamic,basis=basis,dtype=np.float64)
# compute eigensystem of H2
E2=H2.eigvalsh()
# calculate mean level spacing of spectrum E2
r=mean_level_spacing(E2)
print("mean level spacing is", r)
E2=np.insert(E2,-1,E2[-1])
r=mean_level_spacing(E2)
print("mean level spacing is", r)
E2=np.insert(E2,-1,E2[-1])
r=mean_level_spacing(E2,verbose=False)
print("mean level spacing is", r) | weinbe58/QuSpin | tests/mean_level_spacing_test.py | Python | bsd-3-clause | 1,223 |
from django.contrib import admin
from puzzle_captcha.models import Puzzle, PuzzlePiece
class PuzzlePieceInline(admin.StackedInline):
model = PuzzlePiece
readonly_fields = ('key', 'image', 'order')
can_delete = False
extra = 0
class PuzzleAdmin(admin.ModelAdmin):
list_display = ('key', 'rows', 'cols')
readonly_fields = ('key', 'rows', 'cols')
class Meta:
model = Puzzle
inlines = [
PuzzlePieceInline,
]
admin.site.register(Puzzle, PuzzleAdmin)
| MegaMark16/django-puzzle-captcha | puzzle_captcha/admin.py | Python | bsd-3-clause | 505 |
# proxy module
from __future__ import absolute_import
from apptools.type_manager.hook import *
| enthought/etsproxy | enthought/type_manager/hook.py | Python | bsd-3-clause | 95 |
# This file is part of the ISIS IBEX application.
# Copyright (C) 2012-2016 Science & Technology Facilities Council.
# All rights reserved.
#
# This program is distributed in the hope that it will be useful.
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License v1.0 which accompanies this distribution.
# EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
# AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
# You should have received a copy of the Eclipse Public License v1.0
# along with this program; if not, you can obtain a copy from
# https://www.eclipse.org/org/documents/epl-v10.php or
# http://opensource.org/licenses/eclipse-1.0.php
import unittest
import json
import os
from mock import Mock
from parameterized import parameterized
from BlockServer.config.block import Block
from BlockServer.config.configuration import Configuration
from BlockServer.config.ioc import IOC
from BlockServer.core.active_config_holder import (ActiveConfigHolder, _blocks_changed, _blocks_changed_in_config,
_compare_ioc_properties)
from BlockServer.mocks.mock_ioc_control import MockIocControl
from BlockServer.core.macros import MACROS
from BlockServer.mocks.mock_file_manager import MockConfigurationFileManager
from BlockServer.test_modules.helpers import modify_active
from server_common.constants import IS_LINUX
CONFIG_PATH = "./test_configs/"
BASE_PATH = "./example_base/"
# Helper methods
def quick_block_to_json(name, pv, group, local=True):
return {
'name': name,
'pv': pv,
'group': group,
'local': local
}
def add_basic_blocks_and_iocs(config_holder):
config_holder.add_block(quick_block_to_json("TESTBLOCK1", "PV1", "GROUP1", True))
config_holder.add_block(quick_block_to_json("TESTBLOCK2", "PV2", "GROUP2", True))
config_holder.add_block(quick_block_to_json("TESTBLOCK3", "PV3", "GROUP2", True))
config_holder.add_block(quick_block_to_json("TESTBLOCK4", "PV4", "NONE", True))
config_holder._add_ioc("SIMPLE1")
config_holder._add_ioc("SIMPLE2")
def get_groups_and_blocks(jsondata):
return json.loads(jsondata)
def create_grouping(groups):
return json.dumps([{"name": group, "blocks": blocks} for group, blocks in groups.items()])
def create_dummy_component():
config = Configuration(MACROS)
config.add_block("COMPBLOCK1", "PV1", "GROUP1", True)
config.add_block("COMPBLOCK2", "PV2", "COMPGROUP", True)
config.add_ioc("COMPSIMPLE1")
config.is_component = True
return config
# Note that the ActiveConfigServerManager contains an instance of the Configuration class and hands a lot of
# work off to this object. Rather than testing whether the functionality in the configuration class works
# correctly (e.g. by checking that a block has been edited properly after calling configuration.edit_block),
# we should instead test that ActiveConfigServerManager passes the correct parameters to the Configuration object.
# We are testing that ActiveConfigServerManager correctly interfaces with Configuration, not testing the
# functionality of Configuration, which is done in Configuration's own suite of tests.
class TestActiveConfigHolderSequence(unittest.TestCase):
def setUp(self):
# Note: All configurations are saved in memory
self.mock_archive = Mock()
self.mock_archive.update_archiver = Mock()
self.mock_file_manager = MockConfigurationFileManager()
self.active_config_holder = self.create_active_config_holder()
def create_active_config_holder(self):
config_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "settings")
return ActiveConfigHolder(MACROS, self.mock_archive, self.mock_file_manager, MockIocControl(""), config_dir)
def test_add_ioc(self):
config_holder = self.active_config_holder
iocs = config_holder.get_ioc_names()
self.assertEqual(len(iocs), 0)
config_holder._add_ioc("SIMPLE1")
config_holder._add_ioc("SIMPLE2")
iocs = config_holder.get_ioc_names()
self.assertTrue("SIMPLE1" in iocs)
self.assertTrue("SIMPLE2" in iocs)
@unittest.skipIf(IS_LINUX, "Unable to save config on Linux")
def test_save_config(self):
config_holder = self.active_config_holder
add_basic_blocks_and_iocs(config_holder)
try:
config_holder.save_active("TEST_CONFIG")
except Exception as e:
self.fail(f"test_save_config raised Exception unexpectedly: {e}")
@unittest.skipIf(IS_LINUX, "Location of last_config.txt not correctly configured on Linux")
def test_load_config(self):
config_holder = self.active_config_holder
add_basic_blocks_and_iocs(config_holder)
config_holder.save_active("TEST_CONFIG")
config_holder.clear_config()
blocks = config_holder.get_blocknames()
self.assertEqual(len(blocks), 0)
iocs = config_holder.get_ioc_names()
self.assertEqual(len(iocs), 0)
config_holder.load_active("TEST_CONFIG")
blocks = config_holder.get_blocknames()
self.assertEqual(len(blocks), 4)
self.assertTrue('TESTBLOCK1' in blocks)
self.assertTrue('TESTBLOCK2' in blocks)
self.assertTrue('TESTBLOCK3' in blocks)
self.assertTrue('TESTBLOCK4' in blocks)
iocs = config_holder.get_ioc_names()
self.assertTrue("SIMPLE1" in iocs)
self.assertTrue("SIMPLE2" in iocs)
@unittest.skipIf(IS_LINUX, "Location of last_config.txt not correctly configured on Linux")
def test_GIVEN_load_config_WHEN_load_config_again_THEN_no_ioc_changes(self):
# This test is checking that a load will correctly cache the IOCs that are running so that a comparison will
# return no change
config_holder = self.active_config_holder
add_basic_blocks_and_iocs(config_holder)
config_holder.save_active("TEST_CONFIG")
config_holder.clear_config()
blocks = config_holder.get_blocknames()
self.assertEqual(len(blocks), 0)
iocs = config_holder.get_ioc_names()
self.assertEqual(len(iocs), 0)
config_holder.load_active("TEST_CONFIG")
config_holder.load_active("TEST_CONFIG")
iocs_to_start, iocs_to_restart, iocs_to_stop = config_holder.iocs_changed()
self.assertEqual(len(iocs_to_start), 0)
self.assertEqual(len(iocs_to_restart), 0)
self.assertEqual(len(iocs_to_stop), 0)
def test_load_notexistant_config(self):
config_holder = self.active_config_holder
self.assertRaises(IOError, lambda: config_holder.load_active("DOES_NOT_EXIST"))
def test_save_as_component(self):
config_holder = self.active_config_holder
try:
config_holder.save_active("TEST_CONFIG1", as_comp=True)
except Exception as e:
self.fail(f"test_save_as_component raised Exception unexpectedly: {e}")
@unittest.skipIf(IS_LINUX, "Unable to save config on Linux")
def test_save_config_for_component(self):
config_holder = self.active_config_holder
config_holder.save_active("TEST_CONFIG1", as_comp=True)
try:
config_holder.save_active("TEST_CONFIG1")
except Exception as e:
self.fail(f"test_save_config_for_component raised Exception unexpectedly: {e}")
def test_load_component_fails(self):
config_holder = self.active_config_holder
add_basic_blocks_and_iocs(config_holder)
config_holder.save_active("TEST_COMPONENT", as_comp=True)
config_holder.clear_config()
self.assertRaises(IOError, lambda: config_holder.load_active("TEST_COMPONENT"))
@unittest.skipIf(IS_LINUX, "Location of last_config.txt not correctly configured on Linux")
def test_load_last_config(self):
config_holder = self.active_config_holder
add_basic_blocks_and_iocs(config_holder)
config_holder.save_active("TEST_CONFIG")
config_holder.clear_config()
blocks = config_holder.get_blocknames()
self.assertEqual(len(blocks), 0)
iocs = config_holder.get_ioc_names()
self.assertEqual(len(iocs), 0)
config_holder.load_last_config()
grps = config_holder.get_group_details()
self.assertTrue(len(grps) == 3)
blocks = config_holder.get_blocknames()
self.assertEqual(len(blocks), 4)
self.assertTrue('TESTBLOCK1' in blocks)
self.assertTrue('TESTBLOCK2' in blocks)
self.assertTrue('TESTBLOCK3' in blocks)
self.assertTrue('TESTBLOCK4' in blocks)
iocs = config_holder.get_ioc_names()
self.assertTrue("SIMPLE1" in iocs)
self.assertTrue("SIMPLE2" in iocs)
def test_reloading_current_config_with_blank_name_does_nothing(self):
# arrange
config_name = self.active_config_holder.get_config_name()
self.assertEqual(config_name, "")
load_requests = self.mock_file_manager.get_load_config_history()
self.assertEqual(len(load_requests), 0)
# act
self.active_config_holder.reload_current_config()
# assert
load_requests = self.mock_file_manager.get_load_config_history()
self.assertEqual(len(load_requests), 0)
@unittest.skipIf(IS_LINUX, "Location of last_config.txt not correctly configured on Linux")
def test_reloading_current_config_sends_load_request_correctly(self):
# arrange
config_holder = self.active_config_holder
config_name = "TEST_CONFIG"
add_basic_blocks_and_iocs(config_holder)
config_holder.save_active(config_name)
load_requests = self.mock_file_manager.get_load_config_history()
self.assertEqual(len(load_requests), 0)
# act
config_holder.reload_current_config()
# assert
load_requests = self.mock_file_manager.get_load_config_history()
self.assertEqual(load_requests.count(config_name), 1)
def _modify_active(self, config_holder, new_details, name="config1"):
modify_active(name, MACROS, self.mock_file_manager, new_details, config_holder)
def test_iocs_changed_no_changes(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
self._modify_active(config_holder, details)
# Assert
start, restart, stop = config_holder.iocs_changed()
self.assertEqual(len(start), 0)
self.assertEqual(len(restart), 0)
self.assertEqual(len(stop), 0)
def test_iocs_changed_ioc_added(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
# Act
details['iocs'].append(IOC("NAME"))
self._modify_active(config_holder, details)
# Assert
start, restart, stop = config_holder.iocs_changed()
self.assertEqual(len(start), 1)
self.assertEqual(len(restart), 0)
self.assertEqual(len(stop), 0)
def test_iocs_changed_ioc_removed(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
details['iocs'].append(IOC("NAME"))
self._modify_active(config_holder, details)
# Act
details['iocs'].pop(0)
self._modify_active(config_holder, details)
# Assert
start, restart, stop = config_holder.iocs_changed()
self.assertEqual(len(start), 0)
self.assertEqual(len(restart), 0)
self.assertEqual(len(stop), 1)
def test_GIVEN_an_ioc_defined_in_a_component_WHEN_the_component_is_removed_THEN_the_ioc_is_stopped(self):
# Arrange
config_holder = self.create_active_config_holder()
component = create_dummy_component()
component.iocs = {"DUMMY_IOC": IOC("dummyname")}
self.mock_file_manager.comps["component_name"] = component
config_holder.add_component("component_name")
details = config_holder.get_config_details()
details["blocks"] = [block for block in details["blocks"] if block["component"] is None]
self._modify_active(config_holder, details)
# Act
config_holder.remove_comp("component_name")
# Assert
start, restart, stop = config_holder.iocs_changed()
self.assertEqual(len(start), 0)
self.assertEqual(len(restart), 0)
self.assertEqual(len(stop), 1)
def test_GIVEN_an_ioc_defined_in_a_component_WHEN_the_ioc_simlevel_is_changed_THEN_the_ioc_is_restarted(self):
# Arrange
config_holder = self.create_active_config_holder()
component = create_dummy_component()
component.iocs = {"DUMMY_IOC": IOC("dummyname", simlevel="devsim")}
self.mock_file_manager.comps["component_name"] = component
config_holder.add_component("component_name")
details = config_holder.get_config_details()
details["blocks"] = [block for block in details["blocks"] if block["component"] is None]
self._modify_active(config_holder, details)
# Act
config_holder.remove_comp("component_name")
new_component = create_dummy_component()
new_component.iocs = {"DUMMY_IOC": IOC("dummyname", simlevel="recsim")} # Change simlevel
self.mock_file_manager.comps["component_name"] = new_component
config_holder.add_component("component_name")
# Assert
start, restart, stop = config_holder.iocs_changed()
self.assertEqual(len(start), 0)
self.assertEqual(len(restart), 1)
self.assertEqual(len(stop), 0)
def test_GIVEN_an_ioc_defined_in_a_component_WHEN_the_ioc_macros_are_changed_THEN_the_ioc_is_restarted(self):
# Arrange
config_holder = self.create_active_config_holder()
component = create_dummy_component()
component.iocs = {"DUMMY_IOC": IOC("dummyname", macros={"macros": {"A_MACRO": "VALUE1"}})}
self.mock_file_manager.comps["component_name"] = component
config_holder.add_component("component_name")
details = config_holder.get_config_details()
details["blocks"] = [block for block in details["blocks"] if block["component"] is None]
self._modify_active(config_holder, details)
# Act
config_holder.remove_comp("component_name")
new_component = create_dummy_component()
new_component.iocs = {"DUMMY_IOC": IOC("dummyname", macros={"macros": {"A_MACRO": "VALUE2"}})}
self.mock_file_manager.comps["component_name"] = new_component
config_holder.add_component("component_name")
# Assert
start, restart, stop = config_holder.iocs_changed()
self.assertEqual(len(start), 0)
self.assertEqual(len(restart), 1)
self.assertEqual(len(stop), 0)
def test_GIVEN_an_ioc_defined_in_a_component_WHEN_the_ioc_macros_are_not_changed_THEN_the_ioc_is_not_restarted(self):
# Arrange
config_holder = self.create_active_config_holder()
component = create_dummy_component()
component.iocs = {"DUMMY_IOC": IOC("dummyname", macros={"macros": {"A_MACRO": "VALUE1"}})}
self.mock_file_manager.comps["component_name"] = component
config_holder.add_component("component_name")
details = config_holder.get_config_details()
details["blocks"] = [block for block in details["blocks"] if block["component"] is None]
self._modify_active(config_holder, details)
# Act
config_holder.remove_comp("component_name")
new_component = create_dummy_component()
new_component.iocs = {"DUMMY_IOC": IOC("dummyname", macros={"macros": {"A_MACRO": "VALUE1"}})}
self.mock_file_manager.comps["component_name"] = new_component
config_holder.add_component("component_name")
# Assert
start, restart, stop = config_holder.iocs_changed()
self.assertEqual(len(start), 0)
self.assertEqual(len(restart), 0)
self.assertEqual(len(stop), 0)
def test_GIVEN_an_ioc_defined_in_the_top_level_config_WHEN_the_ioc_is_removed_THEN_the_ioc_is_stopped(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
details['iocs'].append(IOC("NAME"))
self._modify_active(config_holder, details)
# Act
details['iocs'].pop(0)
self._modify_active(config_holder, details)
# Assert
start, restart, stop = config_holder.iocs_changed()
self.assertEqual(len(start), 0)
self.assertEqual(len(restart), 0)
self.assertEqual(len(stop), 1)
def test_given_empty_config_when_block_added_then_blocks_changed_returns_true(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
# Act
details['blocks'].append(Block(name="TESTNAME", pv="TESTPV").to_dict())
self._modify_active(config_holder, details)
# Assert
self.assertTrue(config_holder.blocks_changed())
def test_given_config_when_block_params_changed_then_blocks_changed_returns_true(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
details['blocks'].append(Block(name="TESTNAME", pv="TESTPV").to_dict())
self._modify_active(config_holder, details)
# Act
details['blocks'][0]['local'] = False
self._modify_active(config_holder, details)
# Assert
self.assertTrue(config_holder.blocks_changed())
def test_given_config_with_one_block_when_block_removed_then_blocks_changed_returns_true(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
details['blocks'].append(Block(name="TESTNAME", pv="TESTPV").to_dict())
self._modify_active(config_holder, details)
# Act
details['blocks'].pop(0)
self._modify_active(config_holder, details)
# Assert
self.assertTrue(config_holder.blocks_changed())
def test_given_empty_config_when_component_added_then_blocks_changed_returns_true(self):
# Arrange
config_holder = self.create_active_config_holder()
# Act
self.mock_file_manager.comps["component_name"] = create_dummy_component()
config_holder.add_component("component_name")
# Assert
self.assertTrue(config_holder.blocks_changed())
def test_given_empty_config_when_no_change_then_blocks_changed_returns_false(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
# Act
self._modify_active(config_holder, details)
# Assert
self.assertFalse(config_holder.blocks_changed())
def test_given_config_when_no_change_then_blocks_changed_returns_false(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
details['blocks'].append(Block(name="TESTNAME", pv="TESTPV").to_dict())
self._modify_active(config_holder, details)
# Act
self._modify_active(config_holder, details)
# Assert
self.assertFalse(config_holder.blocks_changed())
def test_given_no_blocks_changed_when_update_archiver_archiver_not_restarted(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
details['blocks'].append(Block(name="TESTNAME", pv="TESTPV").to_dict())
self._modify_active(config_holder, details)
# Act
self._modify_active(config_holder, details)
config_holder.update_archiver()
# Assert
self.assertFalse(self.mock_archive.update_archiver.called)
def test_given_blocks_changed_when_update_archiver_archiver_is_restarted(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
details['blocks'].append(Block(name="TESTNAME", pv="TESTPV").to_dict())
self._modify_active(config_holder, details)
# Act
details['blocks'].append(Block(name="TESTNAME2", pv="TESTPV2").to_dict())
self._modify_active(config_holder, details)
config_holder.update_archiver()
# Assert
self.assertTrue(self.mock_archive.update_archiver.called)
def test_given_no_blocks_changed_but_full_init_when_update_archiver_archiver_is_restarted(self):
# Arrange
config_holder = self.create_active_config_holder()
details = config_holder.get_config_details()
details['blocks'].append(Block(name="TESTNAME", pv="TESTPV").to_dict())
self._modify_active(config_holder, details)
# Act
self._modify_active(config_holder, details)
config_holder.update_archiver(True)
# Assert
self.assertTrue(self.mock_archive.update_archiver.called)
@parameterized.expand([
(Block(name="name", pv="pv"), Block(name="other", pv="pv")),
(Block(name="name", pv="pv"), Block(name="name", pv="other")),
(Block(name="name", pv="pv", local=True), Block(name="name", pv="pv", local=False)),
(Block(name="name", pv="pv", component="A"), Block(name="name", pv="pv", component="B")),
(Block(name="name", pv="pv", runcontrol=True), Block(name="name", pv="pv", runcontrol=False)),
(Block(name="name", pv="pv", lowlimit=True), Block(name="name", pv="pv", lowlimit=False)),
(Block(name="name", pv="pv", highlimit=True), Block(name="name", pv="pv", highlimit=False)),
(Block(name="name", pv="pv", log_periodic=True), Block(name="name", pv="pv", log_periodic=False)),
(Block(name="name", pv="pv", log_rate=True), Block(name="name", pv="pv", log_rate=False)),
(Block(name="name", pv="pv", log_deadband=True), Block(name="name", pv="pv", log_deadband=False)),
])
def test_WHEN_block_attributes_different_THEN_blocks_changed_returns_true(self, block1, block2):
self.assertTrue(_blocks_changed(block1, block2))
def test_WHEN_block_attributes_different_THEN_blocks_changed_returns_false(self):
self.assertFalse(_blocks_changed(Block(name="name", pv="pv"), Block(name="name", pv="pv")))
def test_WHEN_blocks_changed_in_config_called_for_configs_which_contain_same_blocks_THEN_returns_false(self):
config1 = Mock()
config1.blocks = {"a": Block(name="a", pv="pv")}
config2 = Mock()
config2.blocks = {"a": Block(name="a", pv="pv")}
self.assertFalse(_blocks_changed_in_config(config1, config2))
def test_WHEN_blocks_changed_in_config_called_for_configs_with_removed_blocks_THEN_returns_true(self):
config1 = Mock()
config1.blocks = {"a": Block(name="a", pv="pv")}
config2 = Mock()
config2.blocks = {}
self.assertTrue(_blocks_changed_in_config(config1, config2))
def test_WHEN_blocks_changed_in_config_called_for_configs_with_added_blocks_THEN_returns_true(self):
config1 = Mock()
config1.blocks = {}
config2 = Mock()
config2.blocks = {"a": Block(name="a", pv="pv")}
self.assertTrue(_blocks_changed_in_config(config1, config2))
def test_WHEN_blocks_changed_in_config_called_and_block_comparator_says_they_are_different_THEN_returns_true(self):
config1 = Mock()
config1.blocks = {"a": Block(name="a", pv="pv")}
config2 = Mock()
config2.blocks = {"a": Block(name="a", pv="pv")}
self.assertTrue(_blocks_changed_in_config(config1, config2, block_comparator=lambda block1, block2: True))
def test_WHEN_blocks_changed_in_config_called_and_block_comparator_says_they_are_the_same_THEN_returns_false(self):
config1 = Mock()
config1.blocks = {"a": Block(name="a", pv="pv")}
config2 = Mock()
config2.blocks = {"a": Block(name="a", pv="pv")}
self.assertFalse(_blocks_changed_in_config(config1, config2, block_comparator=lambda block1, block2: False))
def test_WHEN_compare_ioc_properties_called_with_the_same_ioc_then_returns_empty_set_of_iocs_to_start_restart(self):
old_config = Mock()
old_config.iocs = {"a": IOC("a")}
new_config = Mock()
new_config.iocs = {"a": IOC("a")}
start, restart = _compare_ioc_properties(old_config, new_config)
self.assertEqual(len(start), 0)
self.assertEqual(len(restart), 0)
@parameterized.expand([
({"a": IOC("a", macros=True)}, {"a": IOC("a", macros=False)}),
({"a": IOC("a", pvs=True)}, {"a": IOC("a", pvs=False)}),
({"a": IOC("a", pvsets=True)}, {"a": IOC("a", pvsets=False)}),
({"a": IOC("a", simlevel="recsim")}, {"a": IOC("a", simlevel="devsim")}),
({"a": IOC("a", restart=True)}, {"a": IOC("a", restart=False)}),
])
def test_WHEN_compare_ioc_properties_called_with_different_then_restarts_ioc(self, old_iocs, new_iocs):
old_config = Mock()
old_config.iocs = old_iocs
new_config = Mock()
new_config.iocs = new_iocs
start, restart = _compare_ioc_properties(old_config, new_config)
self.assertEqual(len(start), 0)
self.assertEqual(len(restart), 1)
def test_WHEN_compare_ioc_properties_called_with_new_ioc_then_starts_new_ioc(self):
old_config = Mock()
old_config.iocs = {}
new_config = Mock()
new_config.iocs = {"a": IOC("a", macros=True)}
start, restart = _compare_ioc_properties(old_config, new_config)
self.assertEqual(len(start), 1)
self.assertEqual(len(restart), 0)
if __name__ == '__main__':
# Run tests
unittest.main()
| ISISComputingGroup/EPICS-inst_servers | BlockServer/test_modules/test_active_config_holder.py | Python | bsd-3-clause | 26,201 |
# Copyright (c) 2015, Simone Margaritelli <evilsocket at gmail dot com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of ARM Inject nor the names of its contributors may be used
# to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from pyadb.adb import ADB
import sys
if len(sys.argv) != 2:
print "Usage: python %s <pid>" % sys.argv[0]
quit()
pid = int(sys.argv[1])
try:
adb = ADB()
print "@ Pushing files to /data/local/tmp ..."
adb.sh( "rm -rf /data/local/tmp/injector /data/local/tmp/libhook.so" )
adb.push( "libs/armeabi-v7a/injector", "/data/local/tmp/injector" )
adb.push( "libs/armeabi-v7a/libhook.so", "/data/local/tmp/libhook.so" )
adb.sh( "chmod 777 /data/local/tmp/injector" )
# we need to set selinux to permissive in order to make ptrace work
adb.set_selinux_level( 0 )
adb.clear_log()
print "@ Injection into PID %d starting ..." % pid
adb.sudo( "/data/local/tmp/injector %d /data/local/tmp/libhook.so" % pid )
adb.logcat("LIBHOOK")
except KeyboardInterrupt:
pass
| evilsocket/arminject | trace_pid.py | Python | bsd-3-clause | 2,396 |
def extractMtllightnovelCom(item):
'''
Parser for 'mtllightnovel.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('devil\'s son-in-law', 'Devil\'s Son-in-Law', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractMtllightnovelCom.py | Python | bsd-3-clause | 635 |
#!/usr/bin/env python
# Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project
# All rights reserved.
#
# This file is part of NeuroM <https://github.com/BlueBrain/NeuroM>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Load and view multiple somas."""
from pathlib import Path
from neurom import load_morphology
from neurom.view import matplotlib_utils
import matplotlib.pyplot as plt
import numpy as np
DATA_PATH = Path(__file__).parent.parent / 'tests/data'
SWC_PATH = Path(DATA_PATH, 'swc')
def random_color():
"""Random color generation."""
return np.random.rand(3, 1)
def plot_somas(somas):
"""Plot set of somas on same figure as spheres, each with different color."""
_, ax = matplotlib_utils.get_figure(new_fig=True, subplot=111,
params={'projection': '3d', 'aspect': 'equal'})
for s in somas:
matplotlib_utils.plot_sphere(ax, s.center, s.radius, color=random_color(), alpha=1)
plt.show()
if __name__ == '__main__':
# define set of files containing relevant morphs
file_nms = [Path(SWC_PATH, file_nm) for file_nm in ['Soma_origin.swc',
'Soma_translated_1.swc',
'Soma_translated_2.swc']]
# load from file and plot
sms = [load_morphology(file_nm).soma for file_nm in file_nms]
plot_somas(sms)
| BlueBrain/NeuroM | examples/plot_somas.py | Python | bsd-3-clause | 2,929 |
from holodeck.settings import *
import os
import sys
# Django settings for Holodeck project.
PATH = os.path.split(os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]))))[0]
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PATH, 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PATH, 'static')
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'nj1p6t#2(fe(e=e_96o05fhti6p#@^mwaqioq=(f(ma_unqvt='
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages"
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'holodeck.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'holodeck.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'holodeck',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| shaunsephton/holodeck | holodeck/django_settings.py | Python | bsd-3-clause | 5,258 |
# project.py
import signac
def classify(job):
yield 'init'
if 'V' in job.document:
yield 'volume-computed'
def next_operation(job):
if 'volume-computed' not in classify(job):
return 'compute_volume'
if __name__ == '__main__':
project = signac.get_project()
print(project)
for job in project.find_jobs():
labels = ','.join(classify(job))
p = '{:04.1f}'.format(job.statepoint()['p'])
print(job, p, labels)
| csadorf/signac | examples/ideal_gas_project/project.py | Python | bsd-3-clause | 472 |
from crispy_forms.bootstrap import FormActions
from crispy_forms.helper import FormHelper
from crispy_forms.layout import ButtonHolder, Div, Fieldset, HTML, Layout, Submit
from django import forms
from django.core.validators import EmailValidator, email_re
from django.core.urlresolvers import reverse
from django.forms.widgets import PasswordInput, HiddenInput
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _, ugettext_noop, ugettext_lazy
from django.template.loader import get_template
from django.template import Context
from corehq.apps.locations.models import Location
from corehq.apps.users.models import CouchUser
from corehq.apps.users.util import format_username
from corehq.apps.app_manager.models import validate_lang
from corehq.apps.commtrack.models import CommTrackUser, Program
import re
# required to translate inside of a mark_safe tag
from django.utils.functional import lazy
import six # Python 3 compatibility
mark_safe_lazy = lazy(mark_safe, six.text_type)
def wrapped_language_validation(value):
try:
validate_lang(value)
except ValueError:
raise forms.ValidationError("%s is not a valid language code! Please "
"enter a valid two or three digit code." % value)
class LanguageField(forms.CharField):
"""
Adds language code validation to a field
"""
def __init__(self, *args, **kwargs):
super(LanguageField, self).__init__(*args, **kwargs)
self.min_length = 2
self.max_length = 3
default_error_messages = {
'invalid': _(u'Please enter a valid two or three digit language code.'),
}
default_validators = [wrapped_language_validation]
class BaseUpdateUserForm(forms.Form):
@property
def direct_properties(self):
return []
def update_user(self, existing_user=None, **kwargs):
is_update_successful = False
if not existing_user and 'email' in self.cleaned_data:
from django.contrib.auth.models import User
django_user = User()
django_user.username = self.cleaned_data['email']
django_user.save()
existing_user = CouchUser.from_django_user(django_user)
existing_user.save()
is_update_successful = True
for prop in self.direct_properties:
setattr(existing_user, prop, self.cleaned_data[prop])
is_update_successful = True
if is_update_successful:
existing_user.save()
return is_update_successful
def initialize_form(self, existing_user=None, **kwargs):
if existing_user is None:
return
for prop in self.direct_properties:
self.initial[prop] = getattr(existing_user, prop, "")
class UpdateUserRoleForm(BaseUpdateUserForm):
role = forms.ChoiceField(choices=(), required=False)
def update_user(self, existing_user=None, domain=None, **kwargs):
is_update_successful = super(UpdateUserRoleForm, self).update_user(existing_user)
if domain and 'role' in self.cleaned_data:
role = self.cleaned_data['role']
try:
existing_user.set_role(domain, role)
existing_user.save()
is_update_successful = True
except KeyError:
pass
return is_update_successful
def load_roles(self, role_choices=None, current_role=None):
if role_choices is None:
role_choices = []
self.fields['role'].choices = role_choices
if current_role:
self.initial['role'] = current_role
class BaseUserInfoForm(forms.Form):
first_name = forms.CharField(label=ugettext_lazy('First Name'), max_length=50, required=False)
last_name = forms.CharField(label=ugettext_lazy('Last Name'), max_length=50, required=False)
email = forms.EmailField(label=ugettext_lazy("E-mail"), max_length=75, required=False)
language = forms.ChoiceField(
choices=(),
initial=None,
required=False,
help_text=mark_safe_lazy(
ugettext_lazy(
"<i class=\"icon-info-sign\"></i> "
"Becomes default language seen in CloudCare and reports (if applicable). "
"Supported languages for reports are en, fr (partial), and hin (partial)."
)
)
)
def load_language(self, language_choices=None):
if language_choices is None:
language_choices = []
self.fields['language'].choices = [('', '')] + language_choices
class UpdateMyAccountInfoForm(BaseUpdateUserForm, BaseUserInfoForm):
email_opt_out = forms.BooleanField(
required=False,
label="",
help_text=ugettext_lazy("Opt out of emails about new features and other CommCare updates.")
)
@property
def direct_properties(self):
return self.fields.keys()
class UpdateCommCareUserInfoForm(BaseUserInfoForm, UpdateUserRoleForm):
@property
def direct_properties(self):
indirect_props = ['role']
return [k for k in self.fields.keys() if k not in indirect_props]
class RoleForm(forms.Form):
def __init__(self, *args, **kwargs):
if kwargs.has_key('role_choices'):
role_choices = kwargs.pop('role_choices')
else:
role_choices = ()
super(RoleForm, self).__init__(*args, **kwargs)
self.fields['role'].choices = role_choices
class Meta:
app_label = 'users'
class CommCareAccountForm(forms.Form):
"""
Form for CommCareAccounts
"""
# 128 is max length in DB
# 25 is domain max length
# @{domain}.commcarehq.org adds 16
# left over is 87 and 80 just sounds better
max_len_username = 80
username = forms.CharField(max_length=max_len_username, required=True)
password = forms.CharField(widget=PasswordInput(), required=True, min_length=1, help_text="Only numbers are allowed in passwords")
password_2 = forms.CharField(label='Password (reenter)', widget=PasswordInput(), required=True, min_length=1)
domain = forms.CharField(widget=HiddenInput())
phone_number = forms.CharField(max_length=80, required=False)
class Meta:
app_label = 'users'
def __init__(self, *args, **kwargs):
super(forms.Form, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
'Create new Mobile Worker account',
'username',
'password',
HTML("{% if only_numeric %}"
"<div class=\"control-group\"><div class=\"controls\">"
"To enable alphanumeric passwords, go to the "
"applications this user will use, go to CommCare "
"Settings, and change Password Format to Alphanumeric."
"</div></div>"
"{% endif %}"
),
'password_2',
'phone_number',
Div(
Div(HTML("Please enter number, including international code, in digits only."),
css_class="controls"),
css_class="control-group"
)
),
FormActions(
ButtonHolder(
Submit('submit', 'Create Mobile Worker')
)
)
)
def clean_phone_number(self):
phone_number = self.cleaned_data['phone_number']
phone_number = re.sub('\s|\+|\-', '', phone_number)
if phone_number == '':
return None
elif not re.match(r'\d+$', phone_number):
raise forms.ValidationError(_("%s is an invalid phone number." % phone_number))
return phone_number
def clean_username(self):
username = self.cleaned_data['username']
if username == 'admin' or username == 'demo_user':
raise forms.ValidationError("The username %s is reserved for CommCare." % username)
return username
def clean(self):
try:
password = self.cleaned_data['password']
password_2 = self.cleaned_data['password_2']
except KeyError:
pass
else:
if password != password_2:
raise forms.ValidationError("Passwords do not match")
if self.password_format == 'n' and not password.isnumeric():
raise forms.ValidationError("Password is not numeric")
try:
username = self.cleaned_data['username']
except KeyError:
pass
else:
if len(username) > CommCareAccountForm.max_len_username:
raise forms.ValidationError(
"Username %s is too long. Must be under %d characters."
% (username, CommCareAccountForm.max_len_username))
validate_username('%s@commcarehq.org' % username)
domain = self.cleaned_data['domain']
username = format_username(username, domain)
num_couch_users = len(CouchUser.view("users/by_username",
key=username))
if num_couch_users > 0:
raise forms.ValidationError("CommCare user already exists")
# set the cleaned username to user@domain.commcarehq.org
self.cleaned_data['username'] = username
return self.cleaned_data
validate_username = EmailValidator(email_re, _(u'Username contains invalid characters.'), 'invalid')
class MultipleSelectionForm(forms.Form):
"""
Form for selecting groups (used by the group UI on the user page)
"""
selected_ids = forms.MultipleChoiceField(
label="",
required=False,
)
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.add_input(Submit('submit', 'Update'))
super(MultipleSelectionForm, self).__init__(*args, **kwargs)
class SupplyPointSelectWidget(forms.Widget):
def __init__(self, attrs=None, domain=None, id='supply-point'):
super(SupplyPointSelectWidget, self).__init__(attrs)
self.domain = domain
self.id = id
def render(self, name, value, attrs=None):
return get_template('locations/manage/partials/autocomplete_select_widget.html').render(Context({
'id': self.id,
'name': name,
'value': value or '',
'query_url': reverse('corehq.apps.commtrack.views.api_query_supply_point', args=[self.domain]),
}))
class CommtrackUserForm(forms.Form):
supply_point = forms.CharField(label='Supply Point:', required=False)
program_id = forms.ChoiceField(label="Program", choices=(), required=False)
def __init__(self, *args, **kwargs):
domain = None
if 'domain' in kwargs:
domain = kwargs['domain']
del kwargs['domain']
super(CommtrackUserForm, self).__init__(*args, **kwargs)
self.fields['supply_point'].widget = SupplyPointSelectWidget(domain=domain)
programs = Program.by_domain(domain, wrap=False)
choices = list((prog['_id'], prog['name']) for prog in programs)
choices.insert(0, ('', ''))
self.fields['program_id'].choices = choices
def save(self, user):
commtrack_user = CommTrackUser.wrap(user.to_json())
location_id = self.cleaned_data['supply_point']
if location_id:
loc = Location.get(location_id)
commtrack_user.clear_locations()
commtrack_user.add_location(loc)
| gmimano/commcaretest | corehq/apps/users/forms.py | Python | bsd-3-clause | 11,739 |
import os, os.path
import pexpect
import subprocess
from astropy.io import fits
import numpy as np
from optparse import OptionParser
import sys
import tempfile
from ftplib import FTP
import shutil
_ERASESTR= " "
def get_agetables(args,options):
cat = 'J/MNRAS/456/3655/'
tab1name = 'table1.dat'
tab2name = 'table2.dat.gz'
table2out = 'DR12_martigages_vizier.fits'
table1out = 'martig_table1.fits'
out = args[0]
_download_file_vizier(cat, out, catalogname=tab1name)
_download_file_vizier(cat, out, catalogname=tab2name)
subprocess.call(['gunzip', os.path.join(out,tab2name)])
ztab2name = 'table2.dat'
tab1_colnames = '2MASS_ID, Teff, Logg, [M/H], [C/M], [N/M], Massin, e_Massin, Massout, Age_in, e_Agein, E_Agein, Age_out'
tab2_colnames = '2MASS_ID, Teff, Logg, [M/H], [C/M], [N/M], Massout, Age'
tab1 = np.genfromtxt(os.path.join(out,tab1name), dtype=None, names=tab1_colnames)
tab2 = np.genfromtxt(os.path.join(out,ztab2name), dtype=None, names=tab2_colnames)
hdu1 = fits.BinTableHDU.from_columns(tab1)
hdu2 = fits.BinTableHDU.from_columns(tab2)
hdu1.writeto(os.path.join(out,table1out))
hdu2.writeto(os.path.join(out,table2out))
def _download_file_vizier(cat,filePath,catalogname='catalog.dat'):
'''
Stolen from Jo Bovy's gaia_tools package!
'''
sys.stdout.write('\r'+"Downloading file %s ...\r" \
% (os.path.basename(filePath)))
sys.stdout.flush()
try:
# make all intermediate directories
os.makedirs(os.path.dirname(filePath))
except OSError: pass
# Safe way of downloading
downloading= True
interrupted= False
file, tmp_savefilename= tempfile.mkstemp()
os.close(file) #Easier this way
ntries= 1
while downloading:
try:
ftp= FTP('cdsarc.u-strasbg.fr')
ftp.login('anonymous', 'test')
ftp.cwd(os.path.join('pub','cats',cat))
with open(tmp_savefilename,'wb') as savefile:
ftp.retrbinary('RETR %s' % catalogname,savefile.write)
shutil.move(tmp_savefilename,os.path.join(filePath,catalogname))
downloading= False
if interrupted:
raise KeyboardInterrupt
except:
raise
if not downloading: #Assume KeyboardInterrupt
raise
elif ntries > _MAX_NTRIES:
raise IOError('File %s does not appear to exist on the server ...' % (os.path.basename(filePath)))
finally:
if os.path.exists(tmp_savefilename):
os.remove(tmp_savefilename)
ntries+= 1
sys.stdout.write('\r'+_ERASESTR+'\r')
sys.stdout.flush()
return None
def get_options():
usage = "usage: %prog [options] <outpath>"
parser = OptionParser(usage=usage)
# Distances at which to calculate the effective selection function
parser.add_option("--convert",dest='convert',default=True ,action='store_true', help="convert to fits?")
return parser
if __name__ == '__main__':
parser = get_options()
options, args= parser.parse_args()
get_agetables(args,options)
| jobovy/apogee-maps | py/get_agetables.py | Python | bsd-3-clause | 3,237 |
import collections
import hashlib
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from corehq.util.soft_assert import soft_assert
from corehq import privileges, toggles
from corehq.apps.hqwebapp.templatetags.hq_shared_tags import toggle_enabled
from corehq.apps.userreports.const import (
REPORT_BUILDER_EVENTS_KEY,
UCR_ES_BACKEND
)
from django_prbac.utils import has_privilege
from corehq.apps.userreports.dbaccessors import get_all_es_data_sources
from corehq.apps.userreports.exceptions import BadBuilderConfigError
def localize(value, lang):
"""
Localize the given value.
This function is intended to be used within UCR to localize user supplied
translations.
:param value: A dict-like object or string
:param lang: A language code.
"""
if isinstance(value, collections.Mapping) and len(value):
return (
value.get(lang, None) or
value.get(default_language(), None) or
value[sorted(value.keys())[0]]
)
return value
def default_language():
return "en"
def has_report_builder_add_on_privilege(request):
return any(
has_privilege(request, p) for p in privileges.REPORT_BUILDER_ADD_ON_PRIVS
)
def has_report_builder_access(request):
builder_enabled = toggle_enabled(request, toggles.REPORT_BUILDER)
legacy_builder_priv = has_privilege(request, privileges.REPORT_BUILDER)
beta_group_enabled = toggle_enabled(request, toggles.REPORT_BUILDER_BETA_GROUP)
has_add_on_priv = has_report_builder_add_on_privilege(request)
return (builder_enabled and legacy_builder_priv) or beta_group_enabled or has_add_on_priv
def add_event(request, event):
events = request.session.get(REPORT_BUILDER_EVENTS_KEY, [])
request.session[REPORT_BUILDER_EVENTS_KEY] = events + [event]
def has_report_builder_trial(request):
return has_privilege(request, privileges.REPORT_BUILDER_TRIAL)
def can_edit_report(request, report):
ucr_toggle = toggle_enabled(request, toggles.USER_CONFIGURABLE_REPORTS)
report_builder_toggle = toggle_enabled(request, toggles.REPORT_BUILDER)
report_builder_beta_toggle = toggle_enabled(request, toggles.REPORT_BUILDER_BETA_GROUP)
add_on_priv = has_report_builder_add_on_privilege(request)
created_by_builder = report.spec.report_meta.created_by_builder
if created_by_builder:
return report_builder_toggle or report_builder_beta_toggle or add_on_priv
else:
return ucr_toggle
def allowed_report_builder_reports(request):
"""
Return the number of report builder reports allowed
"""
builder_enabled = toggle_enabled(request, toggles.REPORT_BUILDER)
legacy_builder_priv = has_privilege(request, privileges.REPORT_BUILDER)
beta_group_enabled = toggle_enabled(request, toggles.REPORT_BUILDER_BETA_GROUP)
if toggle_enabled(request, toggles.UNLIMITED_REPORT_BUILDER_REPORTS):
return float("inf")
if has_privilege(request, privileges.REPORT_BUILDER_30):
return 30
if has_privilege(request, privileges.REPORT_BUILDER_15):
return 15
if (
has_privilege(request, privileges.REPORT_BUILDER_TRIAL) or
has_privilege(request, privileges.REPORT_BUILDER_5) or
beta_group_enabled or
(builder_enabled and legacy_builder_priv)
):
return 5
def number_of_report_builder_reports(domain):
from corehq.apps.userreports.models import ReportConfiguration
existing_reports = ReportConfiguration.by_domain(domain)
builder_reports = filter(
lambda report: report.report_meta.created_by_builder, existing_reports
)
return len(builder_reports)
def get_indicator_adapter(config, raise_errors=False):
from corehq.apps.userreports.sql.adapter import IndicatorSqlAdapter, ErrorRaisingIndicatorSqlAdapter
from corehq.apps.userreports.es.adapter import IndicatorESAdapter
if get_backend_id(config) == UCR_ES_BACKEND:
return IndicatorESAdapter(config)
else:
if raise_errors:
return ErrorRaisingIndicatorSqlAdapter(config)
return IndicatorSqlAdapter(config)
def get_table_name(domain, table_id):
def _hash(domain, table_id):
return hashlib.sha1('{}_{}'.format(hashlib.sha1(domain).hexdigest(), table_id)).hexdigest()[:8]
return truncate_value(
'config_report_{}_{}_{}'.format(domain, table_id, _hash(domain, table_id)),
from_left=False
)
def is_ucr_table(table_name):
return table_name.startswith('config_report_')
def truncate_value(value, max_length=63, from_left=True):
"""
Truncate a value (typically a column name) to a certain number of characters,
using a hash to ensure uniqueness.
"""
hash_length = 8
truncated_length = max_length - hash_length - 1
if from_left:
truncated_value = value[-truncated_length:]
else:
truncated_value = value[:truncated_length]
if len(value) > max_length:
short_hash = hashlib.sha1(value).hexdigest()[:hash_length]
return '{}_{}'.format(truncated_value, short_hash)
return value
def get_ucr_es_indices():
sources = get_all_es_data_sources()
return [get_table_name(s.domain, s.table_id) for s in sources]
def get_backend_id(config):
if settings.OVERRIDE_UCR_BACKEND:
return settings.OVERRIDE_UCR_BACKEND
return config.backend_id
| qedsoftware/commcare-hq | corehq/apps/userreports/util.py | Python | bsd-3-clause | 5,418 |
# coding: utf-8
from hyper import hyper
from numba import jit
@jit(nopython=True)
def hyp_battin(x):
return hyper.hyp2f1(3, 1, 5/2, x)
if __name__ == '__main__':
print(hyp_battin(0.5))
| Pybonacci/cffi_test | examples/example_numba.py | Python | bsd-3-clause | 197 |
import sys
import inspect
import os
import numpy as np
import time
import timeit
import collections
import subprocess
from qtools.qtpy import QtCore, QtGui
from qtools.utils import get_application, show_window
from functools import wraps
from galry import log_debug, log_info, log_warn
from collections import OrderedDict as ordict
# try importing numexpr
try:
import numexpr
except:
numexpr = None
__all__ = [
'get_application',
'get_intermediate_classes',
'show_window',
'run_all_scripts',
'enforce_dtype',
'FpsCounter',
'ordict',
]
def hsv_to_rgb(hsv):
"""
convert hsv values in a numpy array to rgb values
both input and output arrays have shape (M,N,3)
"""
h = hsv[:, :, 0]
s = hsv[:, :, 1]
v = hsv[:, :, 2]
r = np.empty_like(h)
g = np.empty_like(h)
b = np.empty_like(h)
i = (h * 6.0).astype(np.int)
f = (h * 6.0) - i
p = v * (1.0 - s)
q = v * (1.0 - s * f)
t = v * (1.0 - s * (1.0 - f))
idx = i % 6 == 0
r[idx] = v[idx]
g[idx] = t[idx]
b[idx] = p[idx]
idx = i == 1
r[idx] = q[idx]
g[idx] = v[idx]
b[idx] = p[idx]
idx = i == 2
r[idx] = p[idx]
g[idx] = v[idx]
b[idx] = t[idx]
idx = i == 3
r[idx] = p[idx]
g[idx] = q[idx]
b[idx] = v[idx]
idx = i == 4
r[idx] = t[idx]
g[idx] = p[idx]
b[idx] = v[idx]
idx = i == 5
r[idx] = v[idx]
g[idx] = p[idx]
b[idx] = q[idx]
idx = s == 0
r[idx] = v[idx]
g[idx] = v[idx]
b[idx] = v[idx]
rgb = np.empty_like(hsv)
rgb[:, :, 0] = r
rgb[:, :, 1] = g
rgb[:, :, 2] = b
return rgb
def get_intermediate_classes(cls, baseclass):
"""Return all intermediate classes in the OO hierarchy between a base
class and a child class."""
classes = inspect.getmro(cls)
classes = [c for c in classes if issubclass(c, baseclass)]
return classes
def run_all_scripts(dir=".", autodestruct=True, condition=None, ignore=[]):
"""Run all scripts successively."""
if condition is None:
condition = lambda file: file.endswith(".py") and not file.startswith("_")
os.chdir(dir)
files = sorted([file for file in os.listdir(dir) if condition(file)])
for file in files:
if file in ignore:
continue
print "Running %s..." % file
args = ["python", file]
if autodestruct:
args += ["autodestruct"]
subprocess.call(args)
print "Done!"
print
def enforce_dtype(arr, dtype, msg=""):
"""Force the dtype of a Numpy array."""
if isinstance(arr, np.ndarray):
if arr.dtype is not np.dtype(dtype):
log_debug("enforcing dtype for array %s %s" % (str(arr.dtype), msg))
return np.array(arr, dtype)
return arr
def memoize(func):
"""Decorator for memoizing a function."""
cache = {}
@wraps(func)
def wrap(*args):
if args not in cache:
cache[args] = func(*args)
return cache[args]
return wrap
def nid(x):
"""Return the address of an array data, used to check whether two arrays
refer to the same data in memory."""
return x.__array_interface__['data'][0]
class FpsCounter(object):
"""Count FPS."""
# memory for the FPS counter
maxlen = 10
def __init__(self, maxlen=None):
if maxlen is None:
maxlen = self.maxlen
self.times = collections.deque(maxlen=maxlen)
self.fps = 0.
self.delta = 0.
def tick(self):
"""Record the current time stamp.
To be called by paintGL().
"""
self.times.append(timeit.default_timer())
def get_fps(self):
"""Return the current FPS."""
if len(self.times) >= 2:
dif = np.diff(self.times)
fps = 1. / dif.min()
# if the FPS crosses 500, do not update it
if fps <= 500:
self.fps = fps
return self.fps
else:
return 0.
| rossant/galry | galry/tools.py | Python | bsd-3-clause | 4,093 |
from model_utils import Choices
SPONSOR_TYPES = Choices(
('diamond', 'DIAMOND', 'Diamond Sponsor'),
('lanyard', 'LANYARD', 'Lanyard Sponsor'),
('track', 'TRACK', 'Track Sponsor'),
('foodanddrinks', 'FOOD_AND_DRINKS', 'Food & Drinks Sponsor'),
('lounge', 'LOUNGE', 'Lounge Sponsor'),
('standard', 'STANDARD', 'Standard Sponsor'),
('supporter', 'SUPPORTER', 'Supporter Sponsor'),
('mainmedia', 'MAIN_MEDIA', 'Main Media Sponsor'),
('media', 'MEDIA', 'Media sponsors'),
('video', 'VIDEO', 'Video sponsors'),
)
| WebCampZg/conference-web | sponsors/choices.py | Python | bsd-3-clause | 548 |
from codecs import open # To use a consistent encoding
from os import path
from setuptools import setup
HERE = path.dirname(path.abspath(__file__))
# Get version info
ABOUT = {}
with open(path.join(HERE, 'datadog_checks', 'riak_repl', '__about__.py')) as f:
exec(f.read(), ABOUT)
# Get the long description from the README file
with open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
def get_dependencies():
dep_file = path.join(HERE, 'requirements.in')
if not path.isfile(dep_file):
return []
with open(dep_file, encoding='utf-8') as f:
return f.readlines()
def parse_pyproject_array(name):
import os
import re
from ast import literal_eval
pattern = r'^{} = (\[.*?\])$'.format(name)
with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f:
# Windows \r\n prevents match
contents = '\n'.join(line.rstrip() for line in f.readlines())
array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1)
return literal_eval(array)
CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0]
setup(
name='datadog-riak_repl',
version=ABOUT['__version__'],
description='The Riak_repl check',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='datadog agent riak_repl check',
# The project's main homepage.
url='https://github.com/DataDog/integrations-extras',
# Author details
author='Britt Treece',
author_email='britt.treece@gmail.com',
# License
license='BSD-3-Clause',
# See https://pypi.org/classifiers
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
# The package we're going to ship
packages=['datadog_checks', 'datadog_checks.riak_repl'],
# Run-time dependencies
install_requires=[CHECKS_BASE_REQ],
extras_require={'deps': parse_pyproject_array('deps')},
# Extra files to ship with the wheel package
include_package_data=True,
)
| DataDog/integrations-extras | riak_repl/setup.py | Python | bsd-3-clause | 2,385 |
from __future__ import unicode_literals
import datetime
import re
import warnings
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.models.loading import get_model
from django.utils import six
import haystack
from haystack.backends import BaseEngine, BaseSearchBackend, BaseSearchQuery, log_query
from haystack.constants import ID, DJANGO_CT, DJANGO_ID, DEFAULT_OPERATOR
from haystack.exceptions import MissingDependency, MoreLikeThisError
from haystack.inputs import PythonData, Clean, Exact, Raw
from haystack.models import SearchResult
from haystack.utils import get_identifier
from haystack.utils import log as logging
try:
import requests
except ImportError:
raise MissingDependency("The 'elasticsearch' backend requires the installation of 'requests'.")
try:
import pyelasticsearch
except ImportError:
raise MissingDependency("The 'elasticsearch' backend requires the installation of 'pyelasticsearch'. Please refer to the documentation.")
DATETIME_REGEX = re.compile(
r'^(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})T'
r'(?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})(\.\d+)?$')
class ElasticsearchSearchBackend(BaseSearchBackend):
# Word reserved by Elasticsearch for special use.
RESERVED_WORDS = (
'AND',
'NOT',
'OR',
'TO',
)
# Characters reserved by Elasticsearch for special use.
# The '\\' must come first, so as not to overwrite the other slash replacements.
RESERVED_CHARACTERS = (
'\\', '+', '-', '&&', '||', '!', '(', ')', '{', '}',
'[', ']', '^', '"', '~', '*', '?', ':', '/',
)
# Settings to add an n-gram & edge n-gram analyzer.
DEFAULT_SETTINGS = {
'settings': {
"analysis": {
"analyzer": {
"ngram_analyzer": {
"type": "custom",
"tokenizer": "lowercase",
"filter": ["haystack_ngram"]
},
"edgengram_analyzer": {
"type": "custom",
"tokenizer": "lowercase",
"filter": ["haystack_edgengram"]
}
},
"tokenizer": {
"haystack_ngram_tokenizer": {
"type": "nGram",
"min_gram": 3,
"max_gram": 15,
},
"haystack_edgengram_tokenizer": {
"type": "edgeNGram",
"min_gram": 2,
"max_gram": 15,
"side": "front"
}
},
"filter": {
"haystack_ngram": {
"type": "nGram",
"min_gram": 3,
"max_gram": 15
},
"haystack_edgengram": {
"type": "edgeNGram",
"min_gram": 2,
"max_gram": 15
}
}
}
}
}
def __init__(self, connection_alias, **connection_options):
super(ElasticsearchSearchBackend, self).__init__(connection_alias, **connection_options)
if not 'URL' in connection_options:
raise ImproperlyConfigured("You must specify a 'URL' in your settings for connection '%s'." % connection_alias)
if not 'INDEX_NAME' in connection_options:
raise ImproperlyConfigured("You must specify a 'INDEX_NAME' in your settings for connection '%s'." % connection_alias)
self.conn = pyelasticsearch.ElasticSearch(connection_options['URL'], timeout=self.timeout)
self.index_name = connection_options['INDEX_NAME']
self.log = logging.getLogger('haystack')
self.setup_complete = False
self.existing_mapping = {}
def setup(self):
"""
Defers loading until needed.
"""
# Get the existing mapping & cache it. We'll compare it
# during the ``update`` & if it doesn't match, we'll put the new
# mapping.
try:
self.existing_mapping = self.conn.get_mapping(index=self.index_name)
except Exception:
if not self.silently_fail:
raise
unified_index = haystack.connections[self.connection_alias].get_unified_index()
self.content_field_name, field_mapping = self.build_schema(unified_index.all_searchfields())
current_mapping = {
'modelresult': {
'properties': field_mapping,
'_boost': {
'name': 'boost',
'null_value': 1.0
}
}
}
if current_mapping != self.existing_mapping:
try:
# Make sure the index is there first.
self.conn.create_index(self.index_name, self.DEFAULT_SETTINGS)
self.conn.put_mapping(self.index_name, 'modelresult', current_mapping)
self.existing_mapping = current_mapping
except Exception:
if not self.silently_fail:
raise
self.setup_complete = True
def update(self, index, iterable, commit=True):
if not self.setup_complete:
try:
self.setup()
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to add documents to Elasticsearch: %s", e)
return
prepped_docs = []
for obj in iterable:
try:
prepped_data = index.full_prepare(obj)
final_data = {}
# Convert the data to make sure it's happy.
for key, value in prepped_data.items():
final_data[key] = self._from_python(value)
prepped_docs.append(final_data)
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
# We'll log the object identifier but won't include the actual object
# to avoid the possibility of that generating encoding errors while
# processing the log message:
self.log.error(u"%s while preparing object for update" % e.__class__.__name__, exc_info=True, extra={
"data": {
"index": index,
"object": get_identifier(obj)
}
})
self.conn.bulk_index(self.index_name, 'modelresult', prepped_docs, id_field=ID)
if commit:
self.conn.refresh(index=self.index_name)
def remove(self, obj_or_string, commit=True):
doc_id = get_identifier(obj_or_string)
if not self.setup_complete:
try:
self.setup()
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e)
return
try:
self.conn.delete(self.index_name, 'modelresult', doc_id)
if commit:
self.conn.refresh(index=self.index_name)
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e)
def clear(self, models=[], commit=True):
# We actually don't want to do this here, as mappings could be
# very different.
# if not self.setup_complete:
# self.setup()
try:
if not models:
self.conn.delete_index(self.index_name)
else:
models_to_delete = []
for model in models:
models_to_delete.append("%s:%s.%s" % (DJANGO_CT, model._meta.app_label, model._meta.module_name))
# Delete by query in Elasticsearch asssumes you're dealing with
# a ``query`` root object. :/
query = {'query_string': {'query': " OR ".join(models_to_delete)}}
self.conn.delete_by_query(self.index_name, 'modelresult', query)
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
if len(models):
self.log.error("Failed to clear Elasticsearch index of models '%s': %s", ','.join(models_to_delete), e)
else:
self.log.error("Failed to clear Elasticsearch index: %s", e)
def build_search_kwargs(self, query_string, sort_by=None, start_offset=0, end_offset=None,
fields='', highlight=False, facets=None,
date_facets=None, query_facets=None,
narrow_queries=None, spelling_query=None,
within=None, dwithin=None, distance_point=None,
models=None, limit_to_registered_models=None,
result_class=None):
index = haystack.connections[self.connection_alias].get_unified_index()
content_field = index.document_field
if query_string == '*:*':
kwargs = {
'query': {
'filtered': {
'query': {
"match_all": {}
},
},
},
}
else:
kwargs = {
'query': {
'filtered': {
'query': {
'query_string': {
'default_field': content_field,
'default_operator': DEFAULT_OPERATOR,
'query': query_string,
'analyze_wildcard': True,
'auto_generate_phrase_queries': True,
},
},
},
},
}
if fields:
if isinstance(fields, (list, set)):
fields = " ".join(fields)
kwargs['fields'] = fields
if sort_by is not None:
order_list = []
for field, direction in sort_by:
if field == 'distance' and distance_point:
# Do the geo-enabled sort.
lng, lat = distance_point['point'].get_coords()
sort_kwargs = {
"_geo_distance": {
distance_point['field']: [lng, lat],
"order": direction,
"unit": "km"
}
}
else:
if field == 'distance':
warnings.warn("In order to sort by distance, you must call the '.distance(...)' method.")
# Regular sorting.
sort_kwargs = {field: {'order': direction}}
order_list.append(sort_kwargs)
kwargs['sort'] = order_list
# From/size offsets don't seem to work right in Elasticsearch's DSL. :/
# if start_offset is not None:
# kwargs['from'] = start_offset
# if end_offset is not None:
# kwargs['size'] = end_offset - start_offset
if highlight is True:
kwargs['highlight'] = {
'fields': {
content_field: {'store': 'yes'},
}
}
if self.include_spelling:
kwargs['suggest'] = {
'suggest': {
'text': spelling_query or query_string,
'term': {
# Using content_field here will result in suggestions of stemmed words.
'field': '_all',
},
},
}
if narrow_queries is None:
narrow_queries = set()
if facets is not None:
kwargs.setdefault('facets', {})
for facet_fieldname, extra_options in facets.items():
facet_options = {
'terms': {
'field': facet_fieldname,
'size': 100,
},
}
# Special cases for options applied at the facet level (not the terms level).
if extra_options.pop('global_scope', False):
# Renamed "global_scope" since "global" is a python keyword.
facet_options['global'] = True
if 'facet_filter' in extra_options:
facet_options['facet_filter'] = extra_options.pop('facet_filter')
facet_options['terms'].update(extra_options)
kwargs['facets'][facet_fieldname] = facet_options
if date_facets is not None:
kwargs.setdefault('facets', {})
for facet_fieldname, value in date_facets.items():
# Need to detect on gap_by & only add amount if it's more than one.
interval = value.get('gap_by').lower()
# Need to detect on amount (can't be applied on months or years).
if value.get('gap_amount', 1) != 1 and not interval in ('month', 'year'):
# Just the first character is valid for use.
interval = "%s%s" % (value['gap_amount'], interval[:1])
kwargs['facets'][facet_fieldname] = {
'date_histogram': {
'field': facet_fieldname,
'interval': interval,
},
'facet_filter': {
"range": {
facet_fieldname: {
'from': self._from_python(value.get('start_date')),
'to': self._from_python(value.get('end_date')),
}
}
}
}
if query_facets is not None:
kwargs.setdefault('facets', {})
for facet_fieldname, value in query_facets:
kwargs['facets'][facet_fieldname] = {
'query': {
'query_string': {
'query': value,
}
},
}
if limit_to_registered_models is None:
limit_to_registered_models = getattr(settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
if models and len(models):
model_choices = sorted(['%s.%s' % (model._meta.app_label, model._meta.module_name) for model in models])
elif limit_to_registered_models:
# Using narrow queries, limit the results to only models handled
# with the current routers.
model_choices = self.build_models_list()
else:
model_choices = []
if len(model_choices) > 0:
if narrow_queries is None:
narrow_queries = set()
narrow_queries.add('%s:(%s)' % (DJANGO_CT, ' OR '.join(model_choices)))
if narrow_queries:
kwargs['query'].setdefault('filtered', {})
kwargs['query']['filtered'].setdefault('filter', {})
kwargs['query']['filtered']['filter'] = {
'fquery': {
'query': {
'query_string': {
'query': u' AND '.join(list(narrow_queries)),
},
},
'_cache': True,
}
}
if within is not None:
from haystack.utils.geo import generate_bounding_box
((min_lat, min_lng), (max_lat, max_lng)) = generate_bounding_box(within['point_1'], within['point_2'])
within_filter = {
"geo_bounding_box": {
within['field']: {
"top_left": {
"lat": max_lat,
"lon": min_lng
},
"bottom_right": {
"lat": min_lat,
"lon": max_lng
}
}
},
}
kwargs['query'].setdefault('filtered', {})
kwargs['query']['filtered'].setdefault('filter', {})
if kwargs['query']['filtered']['filter']:
compound_filter = {
"and": [
kwargs['query']['filtered']['filter'],
within_filter,
]
}
kwargs['query']['filtered']['filter'] = compound_filter
else:
kwargs['query']['filtered']['filter'] = within_filter
if dwithin is not None:
lng, lat = dwithin['point'].get_coords()
dwithin_filter = {
"geo_distance": {
"distance": dwithin['distance'].km,
dwithin['field']: {
"lat": lat,
"lon": lng
}
}
}
kwargs['query'].setdefault('filtered', {})
kwargs['query']['filtered'].setdefault('filter', {})
if kwargs['query']['filtered']['filter']:
compound_filter = {
"and": [
kwargs['query']['filtered']['filter'],
dwithin_filter
]
}
kwargs['query']['filtered']['filter'] = compound_filter
else:
kwargs['query']['filtered']['filter'] = dwithin_filter
# Remove the "filtered" key if we're not filtering. Otherwise,
# Elasticsearch will blow up.
if not kwargs['query']['filtered'].get('filter'):
kwargs['query'] = kwargs['query']['filtered']['query']
return kwargs
@log_query
def search(self, query_string, **kwargs):
if len(query_string) == 0:
return {
'results': [],
'hits': 0,
}
if not self.setup_complete:
self.setup()
search_kwargs = self.build_search_kwargs(query_string, **kwargs)
search_kwargs['from'] = kwargs.get('start_offset', 0)
order_fields = set()
for order in search_kwargs.get('sort', []):
for key in order.keys():
order_fields.add(key)
geo_sort = '_geo_distance' in order_fields
end_offset = kwargs.get('end_offset')
start_offset = kwargs.get('start_offset', 0)
if end_offset is not None and end_offset > start_offset:
search_kwargs['size'] = end_offset - start_offset
try:
raw_results = self.conn.search(search_kwargs,
index=self.index_name,
doc_type='modelresult')
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to query Elasticsearch using '%s': %s", query_string, e)
raw_results = {}
return self._process_results(raw_results,
highlight=kwargs.get('highlight'),
result_class=kwargs.get('result_class', SearchResult),
distance_point=kwargs.get('distance_point'), geo_sort=geo_sort)
def more_like_this(self, model_instance, additional_query_string=None,
start_offset=0, end_offset=None, models=None,
limit_to_registered_models=None, result_class=None, **kwargs):
from haystack import connections
if not self.setup_complete:
self.setup()
# Deferred models will have a different class ("RealClass_Deferred_fieldname")
# which won't be in our registry:
model_klass = model_instance._meta.concrete_model
index = connections[self.connection_alias].get_unified_index().get_index(model_klass)
field_name = index.get_content_field()
params = {}
if start_offset is not None:
params['search_from'] = start_offset
if end_offset is not None:
params['search_size'] = end_offset - start_offset
doc_id = get_identifier(model_instance)
try:
raw_results = self.conn.more_like_this(self.index_name, 'modelresult', doc_id, [field_name], **params)
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to fetch More Like This from Elasticsearch for document '%s': %s", doc_id, e)
raw_results = {}
return self._process_results(raw_results, result_class=result_class)
def _process_results(self, raw_results, highlight=False,
result_class=None, distance_point=None,
geo_sort=False):
from haystack import connections
results = []
hits = raw_results.get('hits', {}).get('total', 0)
facets = {}
spelling_suggestion = None
if result_class is None:
result_class = SearchResult
if self.include_spelling and 'suggest' in raw_results:
raw_suggest = raw_results['suggest']['suggest']
spelling_suggestion = ' '.join([word['text'] if len(word['options']) == 0 else word['options'][0]['text'] for word in raw_suggest])
if 'facets' in raw_results:
facets = {
'fields': {},
'dates': {},
'queries': {},
}
for facet_fieldname, facet_info in raw_results['facets'].items():
if facet_info.get('_type', 'terms') == 'terms':
facets['fields'][facet_fieldname] = [(individual['term'], individual['count']) for individual in facet_info['terms']]
elif facet_info.get('_type', 'terms') == 'date_histogram':
# Elasticsearch provides UTC timestamps with an extra three
# decimals of precision, which datetime barfs on.
facets['dates'][facet_fieldname] = [(datetime.datetime.utcfromtimestamp(individual['time'] / 1000), individual['count']) for individual in facet_info['entries']]
elif facet_info.get('_type', 'terms') == 'query':
facets['queries'][facet_fieldname] = facet_info['count']
unified_index = connections[self.connection_alias].get_unified_index()
indexed_models = unified_index.get_indexed_models()
content_field = unified_index.document_field
for raw_result in raw_results.get('hits', {}).get('hits', []):
source = raw_result['_source']
app_label, model_name = source[DJANGO_CT].split('.')
additional_fields = {}
model = get_model(app_label, model_name)
if model and model in indexed_models:
for key, value in source.items():
index = unified_index.get_index(model)
string_key = str(key)
if string_key in index.fields and hasattr(index.fields[string_key], 'convert'):
additional_fields[string_key] = index.fields[string_key].convert(value)
else:
additional_fields[string_key] = self._to_python(value)
del(additional_fields[DJANGO_CT])
del(additional_fields[DJANGO_ID])
if 'highlight' in raw_result:
additional_fields['highlighted'] = raw_result['highlight'].get(content_field, '')
if distance_point:
additional_fields['_point_of_origin'] = distance_point
if geo_sort and raw_result.get('sort'):
from haystack.utils.geo import Distance
additional_fields['_distance'] = Distance(km=float(raw_result['sort'][0]))
else:
additional_fields['_distance'] = None
result = result_class(app_label, model_name, source[DJANGO_ID], raw_result['_score'], **additional_fields)
results.append(result)
else:
hits -= 1
return {
'results': results,
'hits': hits,
'facets': facets,
'spelling_suggestion': spelling_suggestion,
}
def build_schema(self, fields):
content_field_name = ''
mapping = {}
for field_name, field_class in fields.items():
field_mapping = {
'boost': field_class.boost,
'index': 'analyzed',
'store': 'yes',
'type': 'string',
}
if field_class.document is True:
content_field_name = field_class.index_fieldname
# DRL_FIXME: Perhaps move to something where, if none of these
# checks succeed, call a custom method on the form that
# returns, per-backend, the right type of storage?
if field_class.field_type in ['date', 'datetime']:
field_mapping['type'] = 'date'
elif field_class.field_type == 'integer':
field_mapping['type'] = 'long'
elif field_class.field_type == 'float':
field_mapping['type'] = 'float'
elif field_class.field_type == 'boolean':
field_mapping['type'] = 'boolean'
elif field_class.field_type == 'ngram':
field_mapping['analyzer'] = "ngram_analyzer"
elif field_class.field_type == 'edge_ngram':
field_mapping['analyzer'] = "edgengram_analyzer"
elif field_class.field_type == 'location':
field_mapping['type'] = 'geo_point'
# The docs claim nothing is needed for multivalue...
# if field_class.is_multivalued:
# field_data['multi_valued'] = 'true'
if field_class.stored is False:
field_mapping['store'] = 'no'
# Do this last to override `text` fields.
if field_class.indexed is False or hasattr(field_class, 'facet_for'):
field_mapping['index'] = 'not_analyzed'
if field_mapping['type'] == 'string' and field_class.indexed:
field_mapping["term_vector"] = "with_positions_offsets"
if not hasattr(field_class, 'facet_for') and not field_class.field_type in('ngram', 'edge_ngram'):
field_mapping["analyzer"] = "snowball"
mapping[field_class.index_fieldname] = field_mapping
return (content_field_name, mapping)
def _iso_datetime(self, value):
"""
If value appears to be something datetime-like, return it in ISO format.
Otherwise, return None.
"""
if hasattr(value, 'strftime'):
if hasattr(value, 'hour'):
return value.isoformat()
else:
return '%sT00:00:00' % value.isoformat()
def _from_python(self, value):
"""Convert more Python data types to ES-understandable JSON."""
iso = self._iso_datetime(value)
if iso:
return iso
elif isinstance(value, six.binary_type):
# TODO: Be stricter.
return six.text_type(value, errors='replace')
elif isinstance(value, set):
return list(value)
return value
def _to_python(self, value):
"""Convert values from ElasticSearch to native Python values."""
if isinstance(value, (int, float, complex, list, tuple, bool)):
return value
if isinstance(value, six.string_types):
possible_datetime = DATETIME_REGEX.search(value)
if possible_datetime:
date_values = possible_datetime.groupdict()
for dk, dv in date_values.items():
date_values[dk] = int(dv)
return datetime.datetime(
date_values['year'], date_values['month'],
date_values['day'], date_values['hour'],
date_values['minute'], date_values['second'])
try:
# This is slightly gross but it's hard to tell otherwise what the
# string's original type might have been. Be careful who you trust.
converted_value = eval(value)
# Try to handle most built-in types.
if isinstance(
converted_value,
(int, list, tuple, set, dict, float, complex)):
return converted_value
except Exception:
# If it fails (SyntaxError or its ilk) or we don't trust it,
# continue on.
pass
return value
# Sucks that this is almost an exact copy of what's in the Solr backend,
# but we can't import due to dependencies.
class ElasticsearchSearchQuery(BaseSearchQuery):
def matching_all_fragment(self):
return '*:*'
def add_spatial(self, lat, lon, sfield, distance, filter='bbox'):
"""Adds spatial query parameters to search query"""
kwargs = {
'lat': lat,
'long': long,
'sfield': sfield,
'distance': distance,
}
self.spatial_query.update(kwargs)
def add_order_by_distance(self, lat, long, sfield):
"""Orders the search result by distance from point."""
kwargs = {
'lat': lat,
'long': long,
'sfield': sfield,
}
self.order_by_distance.update(kwargs)
def build_query_fragment(self, field, filter_type, value):
from haystack import connections
query_frag = ''
if not hasattr(value, 'input_type_name'):
# Handle when we've got a ``ValuesListQuerySet``...
if hasattr(value, 'values_list'):
value = list(value)
if isinstance(value, six.string_types):
# It's not an ``InputType``. Assume ``Clean``.
value = Clean(value)
else:
value = PythonData(value)
# Prepare the query using the InputType.
prepared_value = value.prepare(self)
if not isinstance(prepared_value, (set, list, tuple)):
# Then convert whatever we get back to what pysolr wants if needed.
prepared_value = self.backend._from_python(prepared_value)
# 'content' is a special reserved word, much like 'pk' in
# Django's ORM layer. It indicates 'no special field'.
if field == 'content':
index_fieldname = ''
else:
index_fieldname = u'%s:' % connections[self._using].get_unified_index().get_index_fieldname(field)
filter_types = {
'contains': u'%s',
'startswith': u'%s*',
'exact': u'%s',
'gt': u'{%s TO *}',
'gte': u'[%s TO *]',
'lt': u'{* TO %s}',
'lte': u'[* TO %s]',
}
if value.post_process is False:
query_frag = prepared_value
else:
if filter_type in ['contains', 'startswith']:
if value.input_type_name == 'exact':
query_frag = prepared_value
else:
# Iterate over terms & incorportate the converted form of each into the query.
terms = []
if isinstance(prepared_value, six.string_types):
for possible_value in prepared_value.split(' '):
terms.append(filter_types[filter_type] % self.backend._from_python(possible_value))
else:
terms.append(filter_types[filter_type] % self.backend._from_python(prepared_value))
if len(terms) == 1:
query_frag = terms[0]
else:
query_frag = u"(%s)" % " AND ".join(terms)
elif filter_type == 'in':
in_options = []
for possible_value in prepared_value:
in_options.append(u'"%s"' % self.backend._from_python(possible_value))
query_frag = u"(%s)" % " OR ".join(in_options)
elif filter_type == 'range':
start = self.backend._from_python(prepared_value[0])
end = self.backend._from_python(prepared_value[1])
query_frag = u'["%s" TO "%s"]' % (start, end)
elif filter_type == 'exact':
if value.input_type_name == 'exact':
query_frag = prepared_value
else:
prepared_value = Exact(prepared_value).prepare(self)
query_frag = filter_types[filter_type] % prepared_value
else:
if value.input_type_name != 'exact':
prepared_value = Exact(prepared_value).prepare(self)
query_frag = filter_types[filter_type] % prepared_value
if len(query_frag) and not isinstance(value, Raw):
if not query_frag.startswith('(') and not query_frag.endswith(')'):
query_frag = "(%s)" % query_frag
return u"%s%s" % (index_fieldname, query_frag)
def build_alt_parser_query(self, parser_name, query_string='', **kwargs):
if query_string:
kwargs['v'] = query_string
kwarg_bits = []
for key in sorted(kwargs.keys()):
if isinstance(kwargs[key], six.string_types) and ' ' in kwargs[key]:
kwarg_bits.append(u"%s='%s'" % (key, kwargs[key]))
else:
kwarg_bits.append(u"%s=%s" % (key, kwargs[key]))
return u"{!%s %s}" % (parser_name, ' '.join(kwarg_bits))
def build_params(self, spelling_query=None, **kwargs):
search_kwargs = {
'start_offset': self.start_offset,
'result_class': self.result_class
}
order_by_list = None
if self.order_by:
if order_by_list is None:
order_by_list = []
for field in self.order_by:
direction = 'asc'
if field.startswith('-'):
direction = 'desc'
field = field[1:]
order_by_list.append((field, direction))
search_kwargs['sort_by'] = order_by_list
if self.date_facets:
search_kwargs['date_facets'] = self.date_facets
if self.distance_point:
search_kwargs['distance_point'] = self.distance_point
if self.dwithin:
search_kwargs['dwithin'] = self.dwithin
if self.end_offset is not None:
search_kwargs['end_offset'] = self.end_offset
if self.facets:
search_kwargs['facets'] = self.facets
if self.fields:
search_kwargs['fields'] = self.fields
if self.highlight:
search_kwargs['highlight'] = self.highlight
if self.models:
search_kwargs['models'] = self.models
if self.narrow_queries:
search_kwargs['narrow_queries'] = self.narrow_queries
if self.query_facets:
search_kwargs['query_facets'] = self.query_facets
if self.within:
search_kwargs['within'] = self.within
if spelling_query:
search_kwargs['spelling_query'] = spelling_query
return search_kwargs
def run(self, spelling_query=None, **kwargs):
"""Builds and executes the query. Returns a list of search results."""
final_query = self.build_query()
search_kwargs = self.build_params(spelling_query, **kwargs)
results = self.backend.search(final_query, **search_kwargs)
self._results = results.get('results', [])
self._hit_count = results.get('hits', 0)
self._facet_counts = self.post_process_facets(results)
self._spelling_suggestion = results.get('spelling_suggestion', None)
def run_mlt(self, **kwargs):
"""Builds and executes the query. Returns a list of search results."""
if self._more_like_this is False or self._mlt_instance is None:
raise MoreLikeThisError("No instance was provided to determine 'More Like This' results.")
additional_query_string = self.build_query()
search_kwargs = {
'start_offset': self.start_offset,
'result_class': self.result_class,
'models': self.models
}
if self.end_offset is not None:
search_kwargs['end_offset'] = self.end_offset - self.start_offset
results = self.backend.more_like_this(self._mlt_instance, additional_query_string, **search_kwargs)
self._results = results.get('results', [])
self._hit_count = results.get('hits', 0)
class ElasticsearchSearchEngine(BaseEngine):
backend = ElasticsearchSearchBackend
query = ElasticsearchSearchQuery
| zhangtianyi1234/django-haystack | haystack/backends/elasticsearch_backend.py | Python | bsd-3-clause | 37,888 |
from casexml.apps.case.xform import get_case_ids_from_form
from corehq.apps.change_feed import topics
from corehq.apps.change_feed.producer import producer
from corehq.apps.change_feed import data_sources
from corehq.form_processor.interfaces.dbaccessors import FormAccessors, CaseAccessors
from corehq.form_processor.signals import sql_case_post_save
from pillowtop.feed.interface import ChangeMeta
def republish_all_changes_for_form(domain, form_id):
"""
Publishes all changes for the form and any touched cases/ledgers.
"""
form = FormAccessors(domain=domain).get_form(form_id)
publish_form_saved(form)
for case in _get_cases_from_form(domain, form):
publish_case_saved(case, send_post_save_signal=False)
_publish_ledgers_from_form(domain, form)
def publish_form_saved(form):
producer.send_change(topics.FORM_SQL, change_meta_from_sql_form(form))
def change_meta_from_sql_form(form):
return ChangeMeta(
document_id=form.form_id,
data_source_type=data_sources.FORM_SQL,
data_source_name='form-sql', # todo: this isn't really needed.
document_type=form.doc_type,
document_subtype=form.xmlns,
domain=form.domain,
is_deletion=form.is_deleted,
)
def publish_form_deleted(domain, form_id):
producer.send_change(topics.FORM_SQL, ChangeMeta(
document_id=form_id,
data_source_type=data_sources.FORM_SQL,
data_source_name='form-sql',
document_type='XFormInstance-Deleted',
domain=domain,
is_deletion=True,
))
def publish_case_saved(case, send_post_save_signal=True):
"""
Publish the change to kafka and run case post-save signals.
"""
producer.send_change(topics.CASE_SQL, change_meta_from_sql_case(case))
if send_post_save_signal:
sql_case_post_save.send(case.__class__, case=case)
def change_meta_from_sql_case(case):
return ChangeMeta(
document_id=case.case_id,
data_source_type=data_sources.CASE_SQL,
data_source_name='case-sql', # todo: this isn't really needed.
document_type='CommCareCase',
document_subtype=case.type,
domain=case.domain,
is_deletion=case.is_deleted,
)
def publish_case_deleted(domain, case_id):
producer.send_change(topics.CASE_SQL, ChangeMeta(
document_id=case_id,
data_source_type=data_sources.CASE_SQL,
data_source_name='case-sql', # todo: this isn't really needed.
document_type='CommCareCase-Deleted',
domain=domain,
is_deletion=True,
))
def publish_ledger_v2_saved(ledger_value):
producer.send_change(topics.LEDGER, change_meta_from_ledger_v2(ledger_value))
def change_meta_from_ledger_v2(ledger_value):
return ChangeMeta(
document_id=ledger_value.ledger_reference.as_id(),
data_source_type=data_sources.LEDGER_V2,
data_source_name='ledger-v2', # todo: this isn't really needed.
domain=ledger_value.domain,
is_deletion=False,
)
def publish_ledger_v1_saved(stock_state):
producer.send_change(topics.LEDGER, change_meta_from_ledger_v1(stock_state))
def change_meta_from_ledger_v1(stock_state):
return ChangeMeta(
document_id=stock_state.pk,
data_source_type=data_sources.LEDGER_V1,
data_source_name='ledger-v1', # todo: this isn't really needed.
domain=stock_state.domain,
is_deletion=False,
)
def _get_cases_from_form(domain, form):
from corehq.form_processor.parsers.ledgers.form import get_case_ids_from_stock_transactions
case_ids = get_case_ids_from_form(form) | get_case_ids_from_stock_transactions(form)
return CaseAccessors(domain).get_cases(list(case_ids))
def _publish_ledgers_from_form(domain, form):
from corehq.form_processor.parsers.ledgers.form import get_all_stock_report_helpers_from_form
unique_references = {
transaction.ledger_reference
for helper in get_all_stock_report_helpers_from_form(form)
for transaction in helper.transactions
}
for ledger_reference in unique_references:
producer.send_change(topics.LEDGER, _change_meta_from_ledger_reference(domain, ledger_reference))
def _change_meta_from_ledger_reference(domain, ledger_reference):
return ChangeMeta(
document_id=ledger_reference.as_id(),
data_source_type=data_sources.LEDGER_V2,
data_source_name='ledger-v2', # todo: this isn't really needed.
domain=domain,
is_deletion=False,
)
| qedsoftware/commcare-hq | corehq/form_processor/change_publishers.py | Python | bsd-3-clause | 4,539 |
# last modified 10.3.2009
class StatsError(Exception):
"""
Exception raised for errors resulting from collection
of statistical information about the grid.
Attributes:
expression -- input expression in which error occurred
message -- explanation of error
"""
def __init__(self, expression, message):
self.expression = expression
self.message = message
def desc(self):
return self.message
class TYPE_ERROR(StatsError):
"""Exception raised if type of statistical container
is not known..
"""
pass
| placiflury/gridmonitor-infocache | infocache/errors/stats.py | Python | bsd-3-clause | 593 |
# -*- coding: utf-8 -*-
"""
banner
Description goes here...
:copyright: (c) 2014 by Openlabs Technologies & Consulting (P) Limited
:license: BSD, see LICENSE for more details.
"""
import unittest
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, \
NoAlertPresentException
from base import Selenium2OnSauce
class Banner(Selenium2OnSauce):
def test_advanced_skills(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/foundation-advance-skills-development/advanced-skills/")
self.assertTrue(self.is_element_present(By.ID, "wfmis"))
def test_advanced_skills_epp(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/software-products/advanced-skills/")
def test_advanced_skills_erd(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/erd/advanced-skills/")
def test_bpm(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/bpm/")
def test_central_overnment(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/research/government-research/central-government/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[3]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[4]/div"))
def test_company_research(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/research/company-research/")
def test_company_training_provider(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/company-training-programs/")
def test_courseware(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/foundation-advance-skills-development/foundation-skills/courseware/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[3]/div"))
def test_developing_tomorrow(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/")
def test_download(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/foundation-advance-skills-development/foundation-skills/courseware/download/")
def test_epp(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/erd/foundation-skills/epp/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
def test_erd(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/erd/")
def test_event(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/event-workforce-enablement/")
def test_executive_summary(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/foundation-advance-skills-development/foundation-skills/courseware/read-only/executive-summary/")
def test_foundation_advance_skills_devlopment(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/foundation-advance-skills-development/")
def test_foundation_convocation_banner(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/")
self.assertTrue(self.is_element_present(By.XPATH, "(//a[contains(text(),'Know More')])[3]"))
driver.get("http://pursuite.openlabs.us/about-us/ssc-nasscom/vision-mission/")
def test_foundation_skills_bpm(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/bpm/foundation-skills/")
def test_foundation_skills_ed(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/erd/foundation-skills/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
def test_foundation_skills_epp(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/software-products/foundation-skills/")
def test_full_course(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/foundation-advance-skills-development/foundation-skills/courseware/read-only/full-course/")
def test_gbfs_bpm(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/bpm/foundation-skills/gbfs/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "span.filetitle"))
def test_government(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/research/government-research/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
def test_government_research(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/research/government-research/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[3]/div"))
def test_government_training_program(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/government-training-programs/")
def test_healp_you_choose(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/")
self.assertTrue(self.is_element_present(By.LINK_TEXT, "Know More"))
def test_ict_academy_tamilnadu(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/private-sector-training-programs/ict-academy-tamilnadu/")
def test_il_fs(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resougvrces/private-sector-training-programs/ilfs/")
def test_implementation_cycle_bpm(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/bpm/foundation-skills/gbfs/implementation-cycle/")
def test_interactive_tools(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/interactive-tools/")
def test_it_initiative(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
def test_it_ites(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/it-ites-initiativesprograms/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[7]/div"))
def test_listining_of_programs(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/event-workforce-enablement/listing-programs/")
def test_nasscom_research(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/research/nasscom-research/")
def test_niit(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/private-sector-training-programs/niit/")
def test_obf_bpm(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/bpm/foundation-skills/gbfs/outcome-based-framework-gbfs/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "span.filetitle"))
def test_other_bodies_government(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/government-training-programs/other-bodies/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
def test_other_bodies(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/research/government-research/other-bodies/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[3]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[4]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[5]/div"))
def test_other_publication(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/foundation-advance-skills-development/foundation-skills/courseware/other-publication/")
def test_policy_development(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/policy-development/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[3]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[4]/div"))
def test_private_sector_training_programs(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/private-sector-training-programs/")
def test_program_registration(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/event-workforce-enablement/program-registration/")
def test_promotion_marketing(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/promotion-marketing/")
def test_read_only(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/foundation-advance-skills-development/foundation-skills/courseware/read-only/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
def test_research(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/research/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
def test_skills_academy(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/private-sector-training-programs/skills-academy/")
def test_software_products(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/software-products/")
def test_ssc_training_programs(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/ssc-nasscom-training-programs/")
def test_state_government(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/research/government-research/state-government/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[3]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[4]/div"))
def test_talent_sprint(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/private-sector-training-programs/talent-sprint/")
def test_training_materials(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/training-materials/")
self.assertTrue(self.is_element_present(By.CSS_SELECTOR, "div.mid-box-flip"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[2]/div"))
self.assertTrue(self.is_element_present(By.XPATH, "//div[@id='contentAndSidebars']/div/div[2]/div[2]/div/div/div[3]/div"))
def test_training_that_helps_you(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/")
self.assertTrue(self.is_element_present(By.XPATH, "(//a[contains(text(),'Know More')])[2]"))
def test_training_tools(self):
driver = self.driver
driver.get("http://pursuite.openlabs.us/ssc-article/it-ites-initiative/developing-tomorrows-workforce-today/training-programs-tools-resources/training-tools/")
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException, e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
if __name__ == "__main__":
unittest.main()
| arpitprogressive/arpittest | intergration_test/banner.py | Python | bsd-3-clause | 18,254 |
""" Computes the Control Flow Graph of a function. """
from pythran.passmanager import FunctionAnalysis
import ast
import networkx as nx
class CFG(FunctionAnalysis):
"""
Computes the Control Flow Graph of a function.
The processing of a node yields a pair containing
* the OUT nodes, to be linked with the IN nodes of the successor
* the RAISE nodes, nodes that stop the control flow (exception/break/...)
"""
def __init__(self):
self.result = nx.DiGraph()
super(CFG, self).__init__()
def visit_FunctionDef(self, node):
# the function itself is the entry point
self.result.add_node(node)
currs = (node,)
for n in node.body:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, _ = self.visit(n)
# add an edge to None for nodes that end the control flow
# without a return
self.result.add_node(None)
for curr in currs:
self.result.add_edge(curr, None)
def visit_Pass(self, node):
"""OUT = node, RAISES = ()"""
return (node,), ()
# All these nodes have the same behavior as pass
visit_Assign = visit_AugAssign = visit_Import = visit_Pass
visit_Expr = visit_Print = visit_ImportFrom = visit_Pass
visit_Yield = visit_Delete = visit_Pass
def visit_Return(self, node):
"""OUT = (), RAISES = ()"""
return (), ()
def visit_For(self, node):
"""
OUT = (node,) + last body statements
RAISES = body's that are not break or continue
"""
currs = (node,)
break_currs = (node,)
raises = ()
# handle body
for n in node.body:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, nraises = self.visit(n)
for nraise in nraises:
if type(nraise) is ast.Break:
break_currs += (nraise,)
elif type(nraise) is ast.Continue:
self.result.add_edge(nraise, node)
else:
raises += (nraise,)
# add the backward loop
for curr in currs:
self.result.add_edge(curr, node)
# the else statement if needed
if node.orelse:
for n in node.orelse:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, nraises = self.visit(n)
return break_currs + currs, raises
visit_While = visit_For
def visit_If(self, node):
"""
OUT = true branch U false branch
RAISES = true branch U false branch
"""
currs = (node,)
raises = ()
# true branch
for n in node.body:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, nraises = self.visit(n)
raises += nraises
tcurrs = currs
# false branch
currs = (node,)
for n in node.orelse:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, nraises = self.visit(n)
raises += nraises
return tcurrs + currs, raises
def visit_Raise(self, node):
"""OUT = (), RAISES = (node)"""
return (), (node,)
visit_Break = visit_Continue = visit_Raise
def visit_Assert(self, node):
"""OUT = RAISES = (node)"""
return (node,), (node,)
def visit_TryExcept(self, node):
"""
OUT = body's U handler's
RAISES = handler's
this equation is not has good has it could be...
but we need type information to be more accurate
"""
currs = (node,)
raises = ()
for handler in node.handlers:
self.result.add_node(handler)
for n in node.body:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, nraises = self.visit(n)
for nraise in nraises:
if type(nraise) is ast.Raise:
for handler in node.handlers:
self.result.add_edge(nraise, handler)
else:
raises += (nraise,)
for handler in node.handlers:
ncurrs, nraises = self.visit(handler)
currs += ncurrs
raises += nraises
return currs, raises
def visit_ExceptHandler(self, node):
"""OUT = body's, RAISES = body's"""
currs = (node,)
raises = ()
for n in node.body:
self.result.add_node(n)
for curr in currs:
self.result.add_edge(curr, n)
currs, nraises = self.visit(n)
raises += nraises
return currs, raises
| artas360/pythran | pythran/analyses/cfg.py | Python | bsd-3-clause | 4,991 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include
from django.conf.urls import url
from django.contrib import admin
from django.views.i18n import JavaScriptCatalog
from demo.apps.app import application
js_info_dict = {
'packages': ('base', ),
}
urlpatterns = [
url(r'^jsi18n/$', JavaScriptCatalog.as_view(), name='javascript_catalog'),
# Admin
url(r'^' + settings.ADMIN_URL, admin.site.urls),
# Apps
url(r'', include(application.urls)),
]
if settings.DEBUG:
# Add the Debug Toolbar’s URLs to the project’s URLconf
import debug_toolbar
urlpatterns += [url(r'^__debug__/', include(debug_toolbar.urls)), ]
# In DEBUG mode, serve media files through Django.
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views import static
urlpatterns += staticfiles_urlpatterns()
# Remove leading and trailing slashes so the regex matches.
media_url = settings.MEDIA_URL.lstrip('/').rstrip('/')
urlpatterns += [
url(r'^%s/(?P<path>.*)$' % media_url, static.serve,
{'document_root': settings.MEDIA_ROOT}),
]
| reinbach/django-machina | example_projects/demo/demo_project/urls.py | Python | bsd-3-clause | 1,208 |
from django import forms
from order.models import Pizza, Bread, Customer
class PizzaForm(forms.ModelForm):
class Meta:
model = Pizza
fields = ('size', 'toppings', 'crust')
widgets = {
'size': forms.RadioSelect(),
'crust': forms.RadioSelect(),
'toppings': forms.CheckboxSelectMultiple(),
}
def process(self, order):
data = self.cleaned_data
size = data['size']
crust = data['crust']
toppings = data['toppings']
pizza = Pizza.objects.create()
pizza.size = size
pizza.crust = crust
for topping in toppings:
pizza.toppings.add(topping)
pizza.save()
order.pizzas.add(pizza)
order.save()
class BreadForm(forms.ModelForm):
class Meta:
model = Bread
fields = ('flavor',)
widgets = {
'type': forms.RadioSelect(),
}
def process(self, order):
data = self.cleaned_data
flavor = data['flavor']
bread = Bread.objects.create(flavor=flavor)
order.breads.add(bread)
order.save()
class CustomerForm(forms.ModelForm):
class Meta:
model = Customer
def process(self, order):
data = self.cleaned_data
name = str(data['name'])
number = str(data['number'])
customer = Customer.objects.create(name=name, number=number)
order.customer = customer
order.save()
| ajpocus/pizzeria | order/forms.py | Python | bsd-3-clause | 1,411 |
from unittest import mock
from .. import *
from bfg9000.tools.ld import LdLinker
from bfg9000.path import abspath
from bfg9000.versioning import Version
def mock_execute(args, **kwargs):
return 'SEARCH_DIR("/dir1")\nSEARCH_DIR("=/dir2")\n'
class TestLdLinker(CrossPlatformTestCase):
def __init__(self, *args, **kwargs):
super().__init__(clear_variables=True, *args, **kwargs)
def test_flavor(self):
ld = LdLinker(None, self.env, ['ld'], 'version')
self.assertEqual(ld.flavor, 'ld')
def test_lang(self):
class MockBuilder:
lang = 'c++'
ld = LdLinker(MockBuilder(), self.env, ['ld'], 'version')
self.assertEqual(ld.lang, 'c++')
def test_family(self):
class MockBuilder:
family = 'native'
ld = LdLinker(MockBuilder(), self.env, ['ld'], 'version')
self.assertEqual(ld.family, 'native')
def test_gnu_ld(self):
version = 'GNU ld (GNU Binutils for Ubuntu) 2.26.1'
ld = LdLinker(None, self.env, ['ld'], version)
self.assertEqual(ld.brand, 'bfd')
self.assertEqual(ld.version, Version('2.26.1'))
def test_gnu_gold(self):
version = 'GNU gold (GNU Binutils for Ubuntu 2.26.1) 1.11'
ld = LdLinker(None, self.env, ['ld'], version)
self.assertEqual(ld.brand, 'gold')
self.assertEqual(ld.version, Version('1.11'))
def test_unknown_brand(self):
version = 'unknown'
ld = LdLinker(None, self.env, ['ld'], version)
self.assertEqual(ld.brand, 'unknown')
self.assertEqual(ld.version, None)
def test_search_dirs(self):
with mock.patch('bfg9000.shell.execute', mock_execute):
ld = LdLinker(None, self.env, ['ld'], 'version')
self.assertEqual(ld.search_dirs(),
[abspath('/dir1'), abspath('/dir2')])
def test_search_dirs_sysroot(self):
with mock.patch('bfg9000.shell.execute', mock_execute):
ld = LdLinker(None, self.env, ['ld'], 'version')
self.assertEqual(ld.search_dirs(sysroot='/sysroot'),
[abspath('/dir1'), abspath('/sysroot/dir2')])
def test_search_dirs_fail(self):
def mock_bad_execute(*args, **kwargs):
raise OSError()
with mock.patch('bfg9000.shell.execute', mock_bad_execute):
ld = LdLinker(None, self.env, ['ld'], 'version')
self.assertEqual(ld.search_dirs(), [])
self.assertRaises(OSError, lambda: ld.search_dirs(strict=True))
| jimporter/bfg9000 | test/unit/tools/test_ld.py | Python | bsd-3-clause | 2,557 |
VERSION = (0, 1, 5)
__version__ = '.'.join(map(str, VERSION))
| briandailey/django-pyres | django_pyres/__init__.py | Python | bsd-3-clause | 63 |
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
with open('README.rst') as r:
readme = r.read()
with open('AUTHORS.txt') as a:
# reSt-ify the authors list
authors = ''
for author in a.read().split('\n'):
authors += '| '+author+'\n'
with open('LICENSE.txt') as l:
license = l.read()
setup(
name='jupyternotify',
version='0.1.15',
description='A Jupyter Notebook %%magic for Browser Notifications of Cell Completion',
long_description=readme+'\n\n'+authors+'\nLicense\n-------\n'+license,
author='Michelangelo D\'Agostino',
author_email='mdagostino@shoprunner.com',
url='https://github.com/shoprunner/jupyter-notify',
license='BSD-3-Clause',
packages=find_packages(exclude=('tests', 'docs')),
package_data={'jupyternotify': ['js/*.js']},
install_requires=[
'ipython',
'jupyter'
],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6'
]
)
| ShopRunner/jupyter-notify | setup.py | Python | bsd-3-clause | 1,019 |
import soco
from collections import namedtuple
SonosTrack = namedtuple('SonosTrack', [
'title', 'artist', 'album', 'album_art_uri', 'position',
'playlist_position', 'duration', 'uri', 'resources', 'album_art',
'metadata'
])
SonosTrack.__new__.__defaults__ = (None,) * len(SonosTrack._fields)
class Track(SonosTrack):
def get_unique_id(self):
from hashlib import sha256
h = sha256()
h.update(str(self.artist).encode('utf-8') + str(self.album).encode('utf-8') + str(self.title).encode('utf-8'))
return h.hexdigest()
Resources = namedtuple('Resources', [
'bitrate', 'bits_per_sample', 'color_depth', 'duration', 'import_uri',
'nr_audio_channels', 'protection', 'protocol_info', 'resolution',
'sample_frequency', 'size', 'uri'
])
Resources.__new__.__defaults__ = (None,) * len(Resources._fields)
class SonosWrapper(object):
""" A wrapper around some SoCo calls to simplify things. """
debug = False
speakers = None
sonos = None
def __init__(self, speakers):
self.speakers = speakers
def toggle_debug(self):
self.debug = not(self.debug)
def get_speakers(self):
return self.speakers
def get_current_track_info(self, ip):
if self.debug:
return Track(**{
'title': '99',
'artist': 'Toto',
'album': 'The Essential Toto',
'album_art_uri': 'http://127.0.0.1:1400/getaa?s=1&u=x-sonos-spotify%3aspotify%253atrack%253a4oz7fKT4bJ04KCaMM7Sp03%3fsid%3d9%26flags%3d8224%26sn%3d1',
'position': '0:00:11',
'playlist_position': '0',
'duration': '0:05:12',
'resources': [Resources(uri='x-sonos-spotify:spotify%3atrack%3a4oz7fKT4bJ04KCaMM7Sp03?sid=9&flags=8224&sn=1')],
})
else:
return Track(**self.speakers[ip].get_current_track_info())
def get_queue(self, ip):
songs = []
if self.debug:
songs.extend([
Track(**{
'title': '99',
'artist': 'Toto',
'album': 'The Essential Toto',
'album_art_uri': 'http://127.0.0.1:1400/getaa?s=1&u=x-sonos-spotify%3aspotify%253atrack%253a4oz7fKT4bJ04KCaMM7Sp03%3fsid%3d9%26flags%3d8224%26sn%3d1',
'position': '0:00:11',
'playlist_position': '0',
'duration': '0:05:12',
'resources': [
Resources(uri='x-sonos-spotify:spotify%3atrack%3a4oz7fKT4bJ04KCaMM7Sp03?sid=9&flags=8224&sn=1')
],
}),
Track(**{
'title': 'Africa',
'artist': 'Toto',
'album': 'The Essential Toto',
'album_art_uri': 'http://127.0.0.1:1400/getaa?s=1&u=x-sonos-spotify%3aspotify%253atrack%253a5ob66YV6bJ04KCaMM7Sp03%3fsid%3d9%26flags%3d8224%26sn%3d1',
'position': '0:00:11',
'playlist_position': '2',
'duration': '0:05:12',
'resources': [Resources(uri='x-sonos-spotify:spotify%3atrack%3a5ob66YV6bJ04KCaMM7Sp03?sid=9&flags=8224&sn=1')],
})
])
else:
sonos_songs = self.speakers[ip].get_queue()
for song in sonos_songs:
s = {
'title': song.title,
'artist': song.creator,
'album': song.album,
'album_art_uri': song.album_art_uri,
'resources': song.resources
}
songs.append(Track(**s))
return songs
def __getattr__(self, name):
def wrapper(*args, **kwargs):
return getattr(self.sonos, name)(*args, **kwargs)
return wrapper | andpe/minos | minos/sonos/__init__.py | Python | bsd-3-clause | 3,903 |
from distutils.core import setup
setup(
name='Korail',
packages=['korail'],
version='0.0.3',
description='An unoffical API for Korail.',
long_description=open('README.rst').read(),
license='BSD License',
author='Su Yeol Jeon',
author_email='devxoul@gmail.com',
url='https://github.com/devxoul/korail',
keywords=['Korail'],
classifiers=[],
install_requires=[
'requests',
'BeautifulSoup4'
]
)
| devxoul/korail | setup.py | Python | bsd-3-clause | 460 |
import unittest
import os
from pprint import pprint
import pym_elfinder.exceptions as exc
from .. import lib
from .. import lib_localfilesystem as lfs
class TestCmdRm(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.finder = lib.create_finder()
cls.fixt = lib.CMD_FIXT['cmd_rm.txt']
cls.file_1 = os.path.join(lfs.DIR, 'file_1.txt')
cls.file_2 = os.path.join(lfs.DIR, 'file_2.txt')
cls.dir_1 = os.path.join(lfs.DIR, 'dir_1')
cls.file_1_1 = os.path.join(lfs.DIR, 'dir_1', 'file_1_1.txt')
def test_rm_files(self):
"""
Test removal of file_1 and file_2.
"""
lfs.mkfile(self.file_1)
lfs.mkfile(self.file_2)
req = self.fixt[0]['request']
r0 = self.fixt[0]['response'] # expected response
cmd, args = lib.prepare_request(req)
assert cmd == 'rm'
# This throws exception on error
self.finder.run(cmd, args, debug=True)
r = self.finder.response
#pprint(r); raise Exception("DIE")
#pprint(r0); raise Exception("DIE")
lib.prepare_response(r0, r)
self.assertEqual(r0.keys(), r.keys())
del r0['debug']
del r['debug']
self.maxDiff = None
self.assertEqual(r0, r)
def test_rm_non_empty_dir(self):
"""
Test removal of dir_1
"""
os.mkdir(self.dir_1)
lfs.mkfile(self.file_1_1)
req = self.fixt[1]['request']
cmd, args = lib.prepare_request(req)
assert cmd == 'rm'
with self.assertRaisesRegexp(exc.FinderError, exc.ERROR_RM):
self.finder.run(cmd, args, debug=True)
r = self.finder.response
self.assertTrue('error' in r)
self.assertEqual(r['error'][0], exc.ERROR_RM)
os.remove(self.file_1_1)
os.rmdir(self.dir_1)
| dmdm/Pym-elFinder | pym_elfinder_tests/t002_functional/test_3080_cmd_rm.py | Python | bsd-3-clause | 1,935 |
import time
from subprocess import *
PATH = "/home/richie_rich/OSProj/redis-OS-project/src/redis-cli"
p1 = Popen([PATH], shell=True, stdin=PIPE)
p1.communicate(input="FLUSHALL")
strength = 1000000
rangeVal = strength + 1
string = "set key"
string1 = ""
count = 0
for i in xrange(1,rangeVal):
count = count + 1
string1 = string1 + string + str(i) + " val" + str(i) + "\n"
if (i % 1000) == 0 :
p1 = Popen([PATH], shell=True, stdin=PIPE)
p1.communicate(input=string1)
string = "set key"
string1 = ""
print string1
print "Inserted %d items" %(count)
| richasinha/redis-OS-project | src/setRedis.py | Python | bsd-3-clause | 595 |
import unittest
from TASSELpy.TASSELbridge import TASSELbridge
from TASSELpy.test.net.maizegenetics.analysis.association.FixedEffectLMPlugin import easy_GLMTest
class associationTestSuite(unittest.TestSuite):
def __init__(self):
super(associationTestSuite, self).__init__()
self.addTest(unittest.makeSuite(easy_GLMTest))
if __name__ == "__main__":
runner = unittest.TextTestRunner()
runner.run(associationTestSuite())
TASSELbridge.stop()
| er432/TASSELpy | TASSELpy/test/net/maizegenetics/analysis/association/associationTestSuite.py | Python | bsd-3-clause | 472 |
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# Copyright (c) 2019 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Module for the downloading, checking, and unpacking of necessary files into the source tree.
"""
import argparse
import configparser
import enum
import hashlib
import shutil
import subprocess
import sys
import urllib.request
from pathlib import Path
from _common import ENCODING, USE_REGISTRY, ExtractorEnum, get_logger, \
get_chromium_version, add_common_params
from _extraction import extract_tar_file, extract_with_7z, extract_with_winrar
sys.path.insert(0, str(Path(__file__).parent / 'third_party'))
import schema #pylint: disable=wrong-import-position
sys.path.pop(0)
# Constants
class HashesURLEnum(str, enum.Enum):
"""Enum for supported hash URL schemes"""
chromium = 'chromium'
class HashMismatchError(BaseException):
"""Exception for computed hashes not matching expected hashes"""
class DownloadInfo: #pylint: disable=too-few-public-methods
"""Representation of an downloads.ini file for downloading files"""
_hashes = ('md5', 'sha1', 'sha256', 'sha512')
hash_url_delimiter = '|'
_nonempty_keys = ('url', 'download_filename')
_optional_keys = (
'version',
'strip_leading_dirs',
)
_passthrough_properties = (*_nonempty_keys, *_optional_keys, 'extractor', 'output_path')
_ini_vars = {
'_chromium_version': get_chromium_version(),
}
@staticmethod
def _is_hash_url(value):
return value.count(DownloadInfo.hash_url_delimiter) == 2 and value.split(
DownloadInfo.hash_url_delimiter)[0] in iter(HashesURLEnum)
_schema = schema.Schema({
schema.Optional(schema.And(str, len)): {
**{x: schema.And(str, len)
for x in _nonempty_keys},
'output_path': (lambda x: str(Path(x).relative_to(''))),
**{schema.Optional(x): schema.And(str, len)
for x in _optional_keys},
schema.Optional('extractor'): schema.Or(ExtractorEnum.TAR, ExtractorEnum.SEVENZIP,
ExtractorEnum.WINRAR),
schema.Optional(schema.Or(*_hashes)): schema.And(str, len),
schema.Optional('hash_url'): lambda x: DownloadInfo._is_hash_url(x), #pylint: disable=unnecessary-lambda
}
})
class _DownloadsProperties: #pylint: disable=too-few-public-methods
def __init__(self, section_dict, passthrough_properties, hashes):
self._section_dict = section_dict
self._passthrough_properties = passthrough_properties
self._hashes = hashes
def has_hash_url(self):
"""
Returns a boolean indicating whether the current
download has a hash URL"""
return 'hash_url' in self._section_dict
def __getattr__(self, name):
if name in self._passthrough_properties:
return self._section_dict.get(name, fallback=None)
if name == 'hashes':
hashes_dict = {}
for hash_name in (*self._hashes, 'hash_url'):
value = self._section_dict.get(hash_name, fallback=None)
if value:
if hash_name == 'hash_url':
value = value.split(DownloadInfo.hash_url_delimiter)
hashes_dict[hash_name] = value
return hashes_dict
raise AttributeError('"{}" has no attribute "{}"'.format(type(self).__name__, name))
def _parse_data(self, path):
"""
Parses an INI file located at path
Raises schema.SchemaError if validation fails
"""
def _section_generator(data):
for section in data:
if section == configparser.DEFAULTSECT:
continue
yield section, dict(
filter(lambda x: x[0] not in self._ini_vars, data.items(section)))
new_data = configparser.ConfigParser(defaults=self._ini_vars)
with path.open(encoding=ENCODING) as ini_file:
new_data.read_file(ini_file, source=str(path))
try:
self._schema.validate(dict(_section_generator(new_data)))
except schema.SchemaError as exc:
get_logger().error('downloads.ini failed schema validation (located in %s)', path)
raise exc
return new_data
def __init__(self, ini_paths):
"""Reads an iterable of pathlib.Path to download.ini files"""
self._data = configparser.ConfigParser()
for path in ini_paths:
self._data.read_dict(self._parse_data(path))
def __getitem__(self, section):
"""
Returns an object with keys as attributes and
values already pre-processed strings
"""
return self._DownloadsProperties(self._data[section], self._passthrough_properties,
self._hashes)
def __contains__(self, item):
"""
Returns True if item is a name of a section; False otherwise.
"""
return self._data.has_section(item)
def __iter__(self):
"""Returns an iterator over the section names"""
return iter(self._data.sections())
def properties_iter(self):
"""Iterator for the download properties sorted by output path"""
return sorted(
map(lambda x: (x, self[x]), self), key=(lambda x: str(Path(x[1].output_path))))
class _UrlRetrieveReportHook: #pylint: disable=too-few-public-methods
"""Hook for urllib.request.urlretrieve to log progress information to console"""
def __init__(self):
self._max_len_printed = 0
self._last_percentage = None
def __call__(self, block_count, block_size, total_size):
# Use total_blocks to handle case total_size < block_size
# total_blocks is ceiling of total_size / block_size
# Ceiling division from: https://stackoverflow.com/a/17511341
total_blocks = -(-total_size // block_size)
if total_blocks > 0:
# Do not needlessly update the console. Since the console is
# updated synchronously, we don't want updating the console to
# bottleneck downloading. Thus, only refresh the output when the
# displayed value should change.
percentage = round(block_count / total_blocks, ndigits=3)
if percentage == self._last_percentage:
return
self._last_percentage = percentage
print('\r' + ' ' * self._max_len_printed, end='')
status_line = 'Progress: {:.1%} of {:,d} B'.format(percentage, total_size)
else:
downloaded_estimate = block_count * block_size
status_line = 'Progress: {:,d} B of unknown size'.format(downloaded_estimate)
self._max_len_printed = len(status_line)
print('\r' + status_line, end='')
def _download_via_urllib(url, file_path, show_progress, disable_ssl_verification):
reporthook = None
if show_progress:
reporthook = _UrlRetrieveReportHook()
if disable_ssl_verification:
import ssl
# TODO: Remove this or properly implement disabling SSL certificate verification
orig_https_context = ssl._create_default_https_context #pylint: disable=protected-access
ssl._create_default_https_context = ssl._create_unverified_context #pylint: disable=protected-access
try:
urllib.request.urlretrieve(url, str(file_path), reporthook=reporthook)
finally:
# Try to reduce damage of hack by reverting original HTTPS context ASAP
if disable_ssl_verification:
ssl._create_default_https_context = orig_https_context #pylint: disable=protected-access
if show_progress:
print()
def _download_if_needed(file_path, url, show_progress, disable_ssl_verification):
"""
Downloads a file from url to the specified path file_path if necessary.
If show_progress is True, download progress is printed to the console.
"""
if file_path.exists():
get_logger().info('%s already exists. Skipping download.', file_path)
return
# File name for partially download file
tmp_file_path = file_path.with_name(file_path.name + '.partial')
if tmp_file_path.exists():
get_logger().debug('Resuming downloading URL %s ...', url)
else:
get_logger().debug('Downloading URL %s ...', url)
# Perform download
if shutil.which('curl'):
get_logger().debug('Using curl')
try:
subprocess.run(['curl', '-L', '-o', str(tmp_file_path), '-C', '-', url], check=True)
except subprocess.CalledProcessError as exc:
get_logger().error('curl failed. Re-run the download command to resume downloading.')
raise exc
else:
get_logger().debug('Using urllib')
_download_via_urllib(url, tmp_file_path, show_progress, disable_ssl_verification)
# Download complete; rename file
tmp_file_path.rename(file_path)
def _chromium_hashes_generator(hashes_path):
with hashes_path.open(encoding=ENCODING) as hashes_file:
hash_lines = hashes_file.read().splitlines()
for hash_name, hash_hex, _ in map(lambda x: x.lower().split(' '), hash_lines):
if hash_name in hashlib.algorithms_available:
yield hash_name, hash_hex
else:
get_logger().warning('Skipping unknown hash algorithm: %s', hash_name)
def _get_hash_pairs(download_properties, cache_dir):
"""Generator of (hash_name, hash_hex) for the given download"""
for entry_type, entry_value in download_properties.hashes.items():
if entry_type == 'hash_url':
hash_processor, hash_filename, _ = entry_value
if hash_processor == 'chromium':
yield from _chromium_hashes_generator(cache_dir / hash_filename)
else:
raise ValueError('Unknown hash_url processor: %s' % hash_processor)
else:
yield entry_type, entry_value
def retrieve_downloads(download_info, cache_dir, show_progress, disable_ssl_verification=False):
"""
Retrieve downloads into the downloads cache.
download_info is the DowloadInfo of downloads to retrieve.
cache_dir is the pathlib.Path to the downloads cache.
show_progress is a boolean indicating if download progress is printed to the console.
disable_ssl_verification is a boolean indicating if certificate verification
should be disabled for downloads using HTTPS.
Raises FileNotFoundError if the downloads path does not exist.
Raises NotADirectoryError if the downloads path is not a directory.
"""
if not cache_dir.exists():
raise FileNotFoundError(cache_dir)
if not cache_dir.is_dir():
raise NotADirectoryError(cache_dir)
for download_name, download_properties in download_info.properties_iter():
get_logger().info('Downloading "%s" to "%s" ...', download_name,
download_properties.download_filename)
download_path = cache_dir / download_properties.download_filename
_download_if_needed(download_path, download_properties.url, show_progress,
disable_ssl_verification)
if download_properties.has_hash_url():
get_logger().info('Downloading hashes for "%s"', download_name)
_, hash_filename, hash_url = download_properties.hashes['hash_url']
_download_if_needed(cache_dir / hash_filename, hash_url, show_progress,
disable_ssl_verification)
def check_downloads(download_info, cache_dir):
"""
Check integrity of the downloads cache.
download_info is the DownloadInfo of downloads to unpack.
cache_dir is the pathlib.Path to the downloads cache.
Raises source_retrieval.HashMismatchError when the computed and expected hashes do not match.
"""
for download_name, download_properties in download_info.properties_iter():
get_logger().info('Verifying hashes for "%s" ...', download_name)
download_path = cache_dir / download_properties.download_filename
with download_path.open('rb') as file_obj:
archive_data = file_obj.read()
for hash_name, hash_hex in _get_hash_pairs(download_properties, cache_dir):
get_logger().debug('Verifying %s hash...', hash_name)
hasher = hashlib.new(hash_name, data=archive_data)
if not hasher.hexdigest().lower() == hash_hex.lower():
raise HashMismatchError(download_path)
def unpack_downloads(download_info, cache_dir, output_dir, extractors=None):
"""
Unpack downloads in the downloads cache to output_dir. Assumes all downloads are retrieved.
download_info is the DownloadInfo of downloads to unpack.
cache_dir is the pathlib.Path directory containing the download cache
output_dir is the pathlib.Path directory to unpack the downloads to.
extractors is a dictionary of PlatformEnum to a command or path to the
extractor binary. Defaults to 'tar' for tar, and '_use_registry' for 7-Zip and WinRAR.
May raise undetermined exceptions during archive unpacking.
"""
for download_name, download_properties in download_info.properties_iter():
download_path = cache_dir / download_properties.download_filename
get_logger().info('Unpacking "%s" to %s ...', download_name,
download_properties.output_path)
extractor_name = download_properties.extractor or ExtractorEnum.TAR
if extractor_name == ExtractorEnum.SEVENZIP:
extractor_func = extract_with_7z
elif extractor_name == ExtractorEnum.WINRAR:
extractor_func = extract_with_winrar
elif extractor_name == ExtractorEnum.TAR:
extractor_func = extract_tar_file
else:
raise NotImplementedError(extractor_name)
if download_properties.strip_leading_dirs is None:
strip_leading_dirs_path = None
else:
strip_leading_dirs_path = Path(download_properties.strip_leading_dirs)
extractor_func(
archive_path=download_path,
output_dir=output_dir / Path(download_properties.output_path),
relative_to=strip_leading_dirs_path,
extractors=extractors)
def _add_common_args(parser):
parser.add_argument(
'-i',
'--ini',
type=Path,
nargs='+',
help='The downloads INI to parse for downloads. Can be specified multiple times.')
parser.add_argument(
'-c', '--cache', type=Path, required=True, help='Path to the directory to cache downloads.')
def _retrieve_callback(args):
retrieve_downloads(
DownloadInfo(args.ini), args.cache, args.show_progress, args.disable_ssl_verification)
try:
check_downloads(DownloadInfo(args.ini), args.cache)
except HashMismatchError as exc:
get_logger().error('File checksum does not match: %s', exc)
sys.exit(1)
def _unpack_callback(args):
extractors = {
ExtractorEnum.SEVENZIP: args.sevenz_path,
ExtractorEnum.WINRAR: args.winrar_path,
ExtractorEnum.TAR: args.tar_path,
}
unpack_downloads(DownloadInfo(args.ini), args.cache, args.output, extractors)
def main():
"""CLI Entrypoint"""
parser = argparse.ArgumentParser(description=__doc__)
add_common_params(parser)
subparsers = parser.add_subparsers(title='Download actions', dest='action')
# retrieve
retrieve_parser = subparsers.add_parser(
'retrieve',
help='Retrieve and check download files',
description=('Retrieves and checks downloads without unpacking. '
'The downloader will attempt to use CLI command "curl". '
'If it is not present, Python\'s urllib will be used. However, only '
'the CLI-based downloaders can be resumed if the download is aborted.'))
_add_common_args(retrieve_parser)
retrieve_parser.add_argument(
'--hide-progress-bar',
action='store_false',
dest='show_progress',
help='Hide the download progress.')
retrieve_parser.add_argument(
'--disable-ssl-verification',
action='store_true',
help='Disables certification verification for downloads using HTTPS.')
retrieve_parser.set_defaults(callback=_retrieve_callback)
# unpack
unpack_parser = subparsers.add_parser(
'unpack',
help='Unpack download files',
description='Verifies hashes of and unpacks download files into the specified directory.')
_add_common_args(unpack_parser)
unpack_parser.add_argument(
'--tar-path',
default='tar',
help=('(Linux and macOS only) Command or path to the BSD or GNU tar '
'binary for extraction. Default: %(default)s'))
unpack_parser.add_argument(
'--7z-path',
dest='sevenz_path',
default=USE_REGISTRY,
help=('Command or path to 7-Zip\'s "7z" binary. If "_use_registry" is '
'specified, determine the path from the registry. Default: %(default)s'))
unpack_parser.add_argument(
'--winrar-path',
dest='winrar_path',
default=USE_REGISTRY,
help=('Command or path to WinRAR\'s "winrar" binary. If "_use_registry" is '
'specified, determine the path from the registry. Default: %(default)s'))
unpack_parser.add_argument('output', type=Path, help='The directory to unpack to.')
unpack_parser.set_defaults(callback=_unpack_callback)
args = parser.parse_args()
args.callback(args)
if __name__ == '__main__':
main()
| Eloston/ungoogled-chromium | utils/downloads.py | Python | bsd-3-clause | 17,917 |
from skimage.data import coffee, camera
from sklearn_theano.feature_extraction.caffe.googlenet import (
GoogLeNetTransformer, GoogLeNetClassifier)
import numpy as np
from nose import SkipTest
import os
co = coffee().astype(np.float32)
ca = camera().astype(np.float32)[:, :, np.newaxis] * np.ones((1, 1, 3),
dtype='float32')
def test_googlenet_transformer():
"""smoke test for googlenet transformer"""
if os.environ.get('CI', None) is not None:
raise SkipTest("Skipping heavy data loading on CI")
t = GoogLeNetTransformer()
t.transform(co)
t.transform(ca)
def test_googlenet_classifier():
"""smoke test for googlenet classifier"""
if os.environ.get('CI', None) is not None:
raise SkipTest("Skipping heavy data loading on CI")
c = GoogLeNetClassifier()
c.predict(co)
c.predict(ca)
| kastnerkyle/sklearn-theano | sklearn_theano/feature_extraction/caffe/tests/test_googlenet.py | Python | bsd-3-clause | 908 |
import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()
assert "Option CPL_DEBUG" in log
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)
| johanvdw/Fiona | tests/test_drivers.py | Python | bsd-3-clause | 760 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='sphinx_numfig',
version='0.1.0',
description='Python Boilerplate contains all the boilerplate you need to create a Python package.',
long_description=readme + '\n\n' + history,
author='Matthias Bussonnier',
author_email='bussonniermatthias@gmail.com',
url='https://github.com/Carreau/sphinx_numfig',
packages=[
'sphinx_numfig',
],
package_dir={'sphinx_numfig':
'sphinx_numfig'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='sphinx_numfig',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
tests_require=test_requirements
) | Carreau/sphinx_numfig | setup.py | Python | bsd-3-clause | 1,529 |
# -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class CookiesTest(object):
def test_create_and_access_a_cookie(self):
"should be able to create and access a cookie"
self.browser.cookies.add({'sha': 'zam'})
self.assertEqual(self.browser.cookies['sha'], 'zam')
def test_create_many_cookies_at_once_as_dict(self):
"should be able to create many cookies at once as dict"
cookies = {'sha': 'zam', 'foo': 'bar'}
self.browser.cookies.add(cookies)
self.assertEqual(self.browser.cookies['sha'], 'zam')
self.assertEqual(self.browser.cookies['foo'], 'bar')
def test_create_many_cookies_at_once_as_list(self):
"should be able to create many cookies at once as list"
cookies = [{'sha': 'zam'}, {'foo': 'bar'}]
self.browser.cookies.add(cookies)
self.assertEqual(self.browser.cookies['sha'], 'zam')
self.assertEqual(self.browser.cookies['foo'], 'bar')
def test_create_some_cookies_and_delete_them_all(self):
"should be able to delete all cookies"
self.browser.cookies.add({'whatever': 'and ever'})
self.browser.cookies.add({'anothercookie': 'im bored'})
self.browser.cookies.delete()
self.assertEqual(self.browser.cookies, {})
def test_create_and_delete_a_cookie(self):
"should be able to create and destroy a cookie"
self.browser.cookies.delete()
self.browser.cookies.add({'cookie': 'with milk'})
self.browser.cookies.delete('cookie')
self.assertEqual(self.browser.cookies, {})
def test_create_and_delete_many_cookies(self):
"should be able to create and destroy many cookies"
self.browser.cookies.delete()
self.browser.cookies.add({'acookie': 'cooked'})
self.browser.cookies.add({'anothercookie': 'uncooked'})
self.browser.cookies.add({'notacookie': 'halfcooked'})
self.browser.cookies.delete('acookie', 'notacookie')
self.assertEqual('uncooked', self.browser.cookies['anothercookie'])
def test_try_to_destroy_an_absent_cookie_and_nothing_happens(self):
self.browser.cookies.delete()
self.browser.cookies.add({'foo': 'bar'})
self.browser.cookies.delete('mwahahahaha')
self.assertEqual(self.browser.cookies, {'foo': 'bar'})
def test_create_and_get_all_cookies(self):
"should be able to create some cookies and retrieve them all"
self.browser.cookies.delete()
self.browser.cookies.add({'taco': 'shrimp'})
self.browser.cookies.add({'lavar': 'burton'})
self.assertEqual(len(self.browser.cookies.all()), 2)
self.browser.cookies.delete()
self.assertEqual(self.browser.cookies.all(), {})
def test_create_and_use_contains(self):
"should be able to create many cookies at once as dict"
cookies = {'sha': 'zam'}
self.browser.cookies.add(cookies)
self.assertIn('sha', self.browser.cookies)
self.assertNotIn('foo', self.browser.cookies)
| nikolas/splinter | tests/cookies.py | Python | bsd-3-clause | 3,162 |
try:
import simplejson
except ImportError:
import json as simplejson
from .meta import DocumentMeta, BaseDocumentSession
json_objects = []
class JSONDocument(object):
"""
JSON Document base class
"""
__metaclass__ = DocumentMeta
def __init__(self, **kwargs):
json_objects.append(kwargs)
class Session(BaseDocumentSession):
"""
A class featuring a database session
"""
def commit(self):
"""
Dumps the scraped data to the filesystem
"""
with open(self.file_name, 'w') as f:
simplejson.dump(json_objects, f)
def close(self):
super(Session,self).close()
json_session = Session()
| bossiernesto/onyx | persistance/documents/json_doc.py | Python | bsd-3-clause | 703 |
from __future__ import absolute_import, print_function
from django.db import models
from django.utils import timezone
from sentry.db.models import FlexibleForeignKey, Model, sane_repr
from sentry.models.search_common import SearchType
class SavedSearch(Model):
"""
A saved search query.
"""
__core__ = True
# TODO: Remove this column and rows where it's not null once we've
# completely removed Sentry 9
project = FlexibleForeignKey("sentry.Project", null=True)
organization = FlexibleForeignKey("sentry.Organization", null=True)
type = models.PositiveSmallIntegerField(default=SearchType.ISSUE.value, null=True)
name = models.CharField(max_length=128)
query = models.TextField()
date_added = models.DateTimeField(default=timezone.now)
# TODO: Remove this column once we've completely removed Sentry 9
is_default = models.BooleanField(default=False)
is_global = models.NullBooleanField(null=True, default=False, db_index=True)
owner = FlexibleForeignKey("sentry.User", null=True)
class Meta:
app_label = "sentry"
db_table = "sentry_savedsearch"
# Note that we also have a partial unique constraint on:
# (organization_id, name, type) WHERE owner_id IS NULL
# (is_global, name) WHERE is_global
unique_together = (
("project", "name"),
# Each user can have one default search per org
("organization", "owner", "type"),
)
@property
def is_pinned(self):
if hasattr(self, "_is_pinned"):
return self._is_pinned
return self.owner is not None and self.organization is not None
@is_pinned.setter
def is_pinned(self, value):
self._is_pinned = value
@property
def is_org_custom_search(self):
return self.owner is None and self.organization is not None
__repr__ = sane_repr("project_id", "name")
# TODO: Remove once we've completely removed sentry 9
class SavedSearchUserDefault(Model):
"""
Indicates the default saved search for a given user
"""
__core__ = True
savedsearch = FlexibleForeignKey("sentry.SavedSearch")
project = FlexibleForeignKey("sentry.Project")
user = FlexibleForeignKey("sentry.User")
class Meta:
unique_together = (("project", "user"),)
app_label = "sentry"
db_table = "sentry_savedsearch_userdefault"
| beeftornado/sentry | src/sentry/models/savedsearch.py | Python | bsd-3-clause | 2,419 |
import re, datetime
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.http import Http404
from django.views.generic import date_based, list_detail
from basic.events.models import *
def event_list(request, page=0):
return list_detail.object_list(
request,
queryset=EventTime.objects.all(),
paginate_by=20,
page=page,
)
event_list.__doc__ = list_detail.object_list.__doc__
def event_archive_year(request, year):
return date_based.archive_year(
request,
year=year,
date_field='start',
queryset=EventTime.objects.all(),
make_object_list=True,
allow_future=True,
)
event_archive_year.__doc__ = date_based.archive_year.__doc__
def event_archive_month(request, year, month):
return date_based.archive_month(
request,
year=year,
month=month,
date_field='start',
queryset=EventTime.objects.all(),
allow_future=True,
)
event_archive_month.__doc__ = date_based.archive_month.__doc__
def event_archive_day(request, year, month, day):
return date_based.archive_day(
request,
year=year,
month=month,
day=day,
date_field='start',
queryset=EventTime.objects.all(),
allow_future=True,
)
event_archive_day.__doc__ = date_based.archive_day.__doc__
def event_detail(request, slug, year, month, day, id):
return date_based.object_detail(
request,
year=year,
month=month,
day=day,
date_field='start',
object_id=id,
queryset=EventTime.objects.all(),
allow_future=True,
)
event_detail.__doc__ = date_based.object_detail.__doc__ | sedden/django-basic-apps | basic/events/views.py | Python | bsd-3-clause | 1,774 |
import learn
import inject
import project
from build_surrogate import build_surrogate
from ActiveSubspace import ActiveSubspace
| aerialhedgehog/VyPy | trunk/VyPy/regression/active_subspace/__init__.py | Python | bsd-3-clause | 139 |
# -*- coding: utf-8 -*-
"""
:copyright: (c) 2014 by Openlabs Technologies & Consulting (P) Limited
:license: BSD, see LICENSE for more details.
"""
from trytond.model import ModelSQL, ModelView, fields
from trytond.pool import PoolMeta, Pool
from trytond.pyson import Eval, Bool
from trytond.transaction import Transaction
__all__ = ['Carrier', 'CarrierZonePriceList']
__metaclass__ = PoolMeta
class Carrier:
__name__ = 'carrier'
zone_currency = fields.Many2One(
'currency.currency', 'Currency',
states={
'invisible': Eval('carrier_cost_method') != 'zone',
'required': Eval('carrier_cost_method') == 'zone',
'readonly': Bool(Eval('zone_price_list', [])),
},
depends=['carrier_cost_method', 'zone_price_list'])
zone_price_list = fields.One2Many(
'carrier.zone_price_list', 'carrier',
'Price List',
states={
'invisible': Eval('carrier_cost_method') != 'zone',
},
depends=['carrier_cost_method'])
zone_currency_digits = fields.Function(
fields.Integer(
'Zone Currency Digits', on_change_with=['zone_currency']
), 'on_change_with_zone_currency_digits'
)
@classmethod
def __setup__(cls):
super(Carrier, cls).__setup__()
selection = ('zone', 'Zone')
if selection not in cls.carrier_cost_method.selection:
cls.carrier_cost_method.selection.append(selection)
def on_change_with_zone_currency_digits(self, name=None):
if self.zone_currency:
return self.zone_currency.digits
return 2
def get_sale_price(self):
Address = Pool().get('party.address')
ZonePriceList = Pool().get('carrier.zone_price_list')
price, currency_id = super(Carrier, self).get_sale_price()
if self.carrier_cost_method == 'zone':
zone = None
if 'address' in Transaction().context:
zone = self.find_zone_for_address(
Address(Transaction().context['address'])
)
elif 'zone' in Transaction().context:
zone, = ZonePriceList.search([
('carrier', '=', self.id),
('id', '=', Transaction().context['zone']),
])
if zone is not None:
return zone.price, self.zone_currency.id
return price, currency_id
def get_purchase_price(self):
Address = Pool().get('party.address')
ZonePriceList = Pool().get('carrier.zone_price_list')
price, currency_id = super(Carrier, self).get_purchase_price()
if self.carrier_cost_method == 'zone':
zone = None
if 'address' in Transaction().context:
zone = self.find_zone_for_address(
Address(Transaction().context['address'])
)
elif 'zone' in Transaction().context:
zone, = ZonePriceList.search([
('carrier', '=', self.id),
('id', '=', Transaction().context['zone']),
])
if zone is not None:
return zone.price, self.zone_currency.id
return price, currency_id
def find_zone_for_address(self, address):
"""
A helper function that finds the most matching zone from the given
address.
:param address: Active Record of the address
:return: Active Record of the zone_price_list
"""
CarrierZone = Pool().get('carrier.zone_price_list')
zones = CarrierZone.search([
('country', '=', address.country),
('subdivision', '=', address.subdivision),
], limit=1)
if not zones:
zones = CarrierZone.search([
('country', '=', address.country),
('subdivision', '=', None),
], limit=1)
if zones:
return zones[0]
class CarrierZonePriceList(ModelSQL, ModelView):
'Carrier Zone price List'
__name__ = 'carrier.zone_price_list'
carrier = fields.Many2One('carrier', 'Carrier', required=True, select=True)
country = fields.Many2One(
'country.country', 'Country', required=True, select=True
)
subdivision = fields.Many2One(
'country.subdivision', 'Subdivision', select=True,
domain=[('country', '=', Eval('country'))],
depends=['country']
)
price = fields.Numeric(
'Price', required=True,
digits=(16, Eval('_parent_carrier.weight_currency_digits', 2))
)
# TODO add a sequence and order by sequence
| openlabs/trytond-carrier-zone | carrier.py | Python | bsd-3-clause | 4,654 |