repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
willvousden/emcee
|
refs/heads/master
|
tests/integration/test_stretch.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
from emcee import moves
from .test_proposal import _test_normal, _test_uniform
__all__ = ["test_normal_stretch", "test_uniform_stretch",
"test_nsplits_stretch"]
def test_normal_stretch(**kwargs):
_test_normal(moves.StretchMove(), **kwargs)
def test_uniform_stretch(**kwargs):
_test_uniform(moves.StretchMove(), **kwargs)
def test_nsplits_stretch(**kwargs):
_test_normal(moves.StretchMove(nsplits=5), **kwargs)
def test_randomize_stretch(**kwargs):
_test_normal(moves.StretchMove(randomize_split=True), **kwargs)
|
sarchar/pyspv
|
refs/heads/master
|
pyspv/network.py
|
1
|
import collections
import ipaddress
import random
import socket
import struct
import threading
import time
import traceback
from . import socks
from .block import Block, BlockHeader
from .bloom import Bloom
from .inv import Inv
from .serialize import Serialize
from .transaction import Transaction
from .util import *
################################################################################
################################################################################
class OutOfPeers(Exception):
pass
################################################################################
################################################################################
class Manager(threading.Thread):
REQUEST_WAIT = 0
REQUEST_GO = 1
REQUEST_DONT = 2
PEER_RECORD_SIZE = 14
PROTOCOL_VERSION = 60002
SERVICES = 1
BLOCKCHAIN_SYNC_WAIT_TIME = 10
HEADERS_REQUEST_TIMEOUT = 25
GETBLOCKS_REQUEST_TIMEOUT = 60
BLOCK_REQUEST_TIMEOUT = 120
TX_REQUEST_TIMEOUT = 30
MAX_MESSAGE_SIZE = 2*1024*1024
INVENTORY_CHECK_TIME = 3
MANAGE_INVENTORY_CHECK_TIME = 60
KEEP_BLOCK_IN_INVENTORY_TIME = 120*60
KEEP_TRANSACTION_IN_INVENTORY_TIME = 30*60
REBROADCAST_TRANSACTION_TIME = 30*60
INVENTORY_FLAG_HOLD_FOREVER = 0x01
INVENTORY_FLAG_MUST_CONFIRM = 0x02
def __init__(self, spv=None, peer_goal=1, listen=('', 0), tor=False, user_agent='pyspv'):
threading.Thread.__init__(self)
self.spv = spv
self.peer_goal = peer_goal
self.user_agent = '/{}/'.format(user_agent).replace(' ', ':')
self.peers = {}
self.peer_addresses_db_file = self.spv.config.get_file("addresses.dat")
self.peer_address_lock = threading.Lock()
self.load_peer_addresses()
self.inv_lock = threading.Lock()
self.inprogress_invs = {}
self.inventory = collections.deque()
self.inventory_items = {}
self.last_manage_inventory_time = time.time()
self.blockchain_sync_lock = threading.Lock()
self.tx_bloom_filter = Bloom(hash_count=32, size=2**23) # Use 8MB for our tx bloom filter
self.headers_request = None
self.headers_request_last_peer = None
self.tor = tor
if tor:
# Using Tor disables incoming connections
listen = None
if listen is not None:
if listen[0] == '':
listen = ('0.0.0.0', listen[1])
if listen[1] == 0:
listen = (listen[0], self.spv.coin.DEFAULT_PORT)
self.listen_address = listen
def start(self):
self.running = False
threading.Thread.start(self)
# Wait for thread to start ...
while not self.running:
pass
def shutdown(self):
# Shutdown all peers first
for _, p in self.peers.items():
p.shutdown()
self.running = False
def join(self, *args, **kwargs):
kwargs['timeout'] = 3
for _, p in self.peers.items():
p.join(*args, **kwargs)
if p.is_alive():
import sys
print("*** STACKTRACE - START :: peer({}) ***".format(p.peer_address))
code = []
for thread_id, stack in sys._current_frames().items():
code.append("\n# Thread ID: {}".format(thread_id))
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('\nFile: "{}", line {}, in {}'.format(filename, lineno, name))
if line:
code.append(" {}".format(line.strip()))
for line in code:
print(line, end='')
print("\n*** STACKTRACE - END ***")
threading.Thread.join(self, *args, **kwargs)
def run(self):
self.running = True
if self.spv.logging_level <= DEBUG:
print("[NETWORK] starting")
self.start_listening()
while self.running:
now = time.time()
if len(self.peer_addresses) < 5:
self.get_new_addresses_from_peer_sources()
self.check_for_incoming_connections()
self.check_for_dead_peers()
self.check_for_new_peers()
self.manage_inventory()
with self.blockchain_sync_lock:
if self.headers_request is not None and \
((self.headers_request['peer'].inprogress_command != 'headers' and (now - self.headers_request['time']) >= Manager.HEADERS_REQUEST_TIMEOUT) or \
(self.headers_request['peer'].inprogress_command == 'headers' and (now - self.headers_request['peer'].last_data_time) >= Manager.HEADERS_REQUEST_TIMEOUT)):
# Misbehaving/dead peer?
self.peer_is_bad(self.headers_request['peer'].peer_address)
self.headers_request['peer'].state = 'dead'
time.sleep(0.01)
if self.spv.logging_level <= DEBUG:
print("[NETWORK] stopping")
if self.listen_socket is not None:
self.listen_socket.close()
def get_new_addresses_from_peer_sources(self):
for seed in self.spv.coin.SEEDS:
for _, _, _, _, ipport in socket.getaddrinfo(seed, None):
if len(ipport) != 2: # no IPv6 support yet
continue
ip, _ = ipport
self.add_peer_address((ip, self.spv.coin.DEFAULT_PORT))
def add_peer_address(self, peer_address):
if peer_address in self.peer_addresses:
return True
try:
ipaddress.IPv4Address(peer_address[0]).packed
except ipaddress.AddressValueError:
# peer_address[0] is probably an IPv6 address
if self.spv.logging_level <= INFO:
print("[NETWORK] peer address {} is not valid IPv4".format(peer_address[0]))
return False
if self.spv.logging_level <= DEBUG:
print("[NETWORK] new peer found", peer_address)
self.peer_addresses[peer_address] = {
'last_successful_connection_time': 0.0,
'index': self.peer_index,
}
self.update_peer_address(peer_address)
self.peer_index += 1
return True
def update_peer_address(self, peer_address):
if peer_address not in self.peer_addresses:
return
with open(self.peer_addresses_db_file, "ab") as fp:
data = ipaddress.IPv4Address(peer_address[0]).packed + struct.pack("<Hd", peer_address[1], self.peer_addresses[peer_address]['last_successful_connection_time'])
fp.seek(self.peer_addresses[peer_address]['index'] * Manager.PEER_RECORD_SIZE, 0)
fp.write(data)
def delete_peer_address(self, peer_address):
if peer_address not in self.peer_addresses:
return
old = self.peer_addresses.pop(peer_address)
self.peer_index -= 1
with open(self.peer_addresses_db_file, "a+b") as fp:
assert fp.tell() >= Manager.PEER_RECORD_SIZE # This has to be true, since self.peer_addresses has at least one entry
# When files are opened for append, they are positioned at the end of the file. Back up and read the final record, it'll be used to replace 'old'
fp.seek(fp.tell()-Manager.PEER_RECORD_SIZE, 0)
data = fp.read(Manager.PEER_RECORD_SIZE)
fp.truncate(self.peer_index * Manager.PEER_RECORD_SIZE)
if old['index'] == (fp.tell() // Manager.PEER_RECORD_SIZE):
return
port, _ = struct.unpack("<Hd", data[4:])
peer_address = (ipaddress.IPv4Address(data[0:4]).exploded, port)
self.peer_addresses[peer_address]['index'] = old['index']
fp.seek(old['index'] * Manager.PEER_RECORD_SIZE)
fp.write(data)
def load_peer_addresses(self):
self.peer_addresses = {}
self.peer_index = 0
try:
with open(self.peer_addresses_db_file, "rb") as fp:
while True:
data = fp.read(Manager.PEER_RECORD_SIZE)
if len(data) == 0:
break
port, last = struct.unpack("<Hd", data[4:])
peer_address = (ipaddress.IPv4Address(data[0:4]).exploded, port)
self.peer_addresses[peer_address] = {
'last_successful_connection_time': last,
'index': self.peer_index,
}
self.peer_index += 1
if self.spv.logging_level <= DEBUG:
print("[NETWORK] {} peer addresses loaded".format(len(self.peer_addresses)))
except FileNotFoundError:
pass
def start_listening(self):
self.listen_socket = None
if self.listen_address is None:
return
self.listen_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.listen_socket.bind(self.listen_address)
except OSError:
if self.spv.logging_level <= WARNING:
print("[NETWORK] couldn't listen on address {}".format(self.listen_address))
self.listen_socket.close()
self.listen_socket = None
return
self.listen_socket.setblocking(False)
self.listen_socket.listen(5)
def check_for_incoming_connections(self):
if self.listen_socket is None:
return
try:
sock, peer_address = self.listen_socket.accept()
except (socket.timeout, BlockingIOError):
return
if self.spv.logging_level <= DEBUG:
print('[MANAGER] incoming connection from {}'.format(peer_address))
if not self.add_peer_address(peer_address):
sock.close()
return
self.peers[peer_address] = Peer(self, peer_address, sock)
self.peers[peer_address].start()
def check_for_dead_peers(self):
dead_peers = set()
for peer_address, peer in self.peers.items():
if peer.is_alive():
continue
dead_peers.add(peer_address)
with self.inv_lock:
for peer_address in dead_peers:
peer = self.peers.pop(peer_address)
if self.headers_request is not None and self.headers_request['peer'] is peer:
# We lost a peer who was requesting headers, so let someone else do it.
self.headers_request = None
for inv in peer.inprogress_invs:
if inv in self.inprogress_invs:
self.inprogress_invs.pop(inv)
def check_for_new_peers(self):
try:
while len(self.peers) < self.peer_goal:
self.start_new_peer()
except OutOfPeers:
# TODO - handle out of peers case
if self.spv.logging_level <= WARNING:
traceback.print_exc()
def start_new_peer(self):
peer_addresses = list(self.peer_addresses.keys())
while len(peer_addresses) > 0:
k = random.randrange(0, len(peer_addresses))
peer_addresses[k], peer_addresses[len(peer_addresses)-1] = peer_addresses[len(peer_addresses)-1], peer_addresses[k]
p = peer_addresses.pop()
#p_ = ('127.0.0.1', 18333)
#if p_ not in self.peers:
# p = p_
if p not in self.peers:
self.peers[p] = Peer(self, p)
self.peers[p].start()
break
else:
raise OutOfPeers()
def peer_is_bad(self, peer_address):
with self.peer_address_lock:
self.delete_peer_address(peer_address)
def peer_is_good(self, peer_address):
p = self.peer_addresses.get(peer_address, None)
if p is not None:
p['last_successful_connection_time'] = time.time()
with self.peer_address_lock:
self.update_peer_address(peer_address)
def peer_found(self, peer_address):
with self.peer_address_lock:
self.add_peer_address(peer_address)
def will_request_inv(self, inv):
# We need to determine if we've ever seen this transaction before. The
# easy case is if we've previously saved the transaction (for whatever
# reason) to the txdb. The harder case is if we've seen it previously
# but choose to ignore it because it wasn't important. For the harder
# case, we can use a bloom filter for broadcasted transactions which
# means we will sometimes false positive on a transaction we actually
# do want. Theoretically that's OK because those 1 in a million times
# when we get a false positive will be covered when the transaction
# makes it into a block. Once we get a block, all transactions in the
# block are examined.
with self.inv_lock:
if inv in self.inprogress_invs:
return Manager.REQUEST_WAIT
if inv.type == Inv.MSG_TX:
if self.spv.txdb.has_tx(inv.hash):
return Manager.REQUEST_DONT
if self.tx_bloom_filter.has(inv.hash):
return Manager.REQUEST_DONT
elif inv.type == Inv.MSG_BLOCK:
if self.spv.blockchain.get_needs_headers():
return Manager.REQUEST_WAIT
if inv.hash in self.spv.blockchain.blocks:
return Manager.REQUEST_DONT
self.inprogress_invs[inv] = time.time()
return Manager.REQUEST_GO
def will_request_headers(self, peer):
with self.blockchain_sync_lock:
if not self.spv.blockchain.get_needs_headers():
return Manager.REQUEST_DONT
if self.headers_request is not None:
assert peer is not self.headers_request['peer'], "Don't do that"
return Manager.REQUEST_WAIT
if peer is self.headers_request_last_peer:
return Manager.REQUEST_WAIT
self.headers_request = {
'time': time.time(),
'peer': peer
}
self.headers_request_last_peer = peer
return Manager.REQUEST_GO
def will_request_blocks(self):
if self.spv.blockchain.get_needs_headers():
return Manager.REQUEST_DONT
return Manager.REQUEST_GO
def received_transaction(self, inv, tx):
'''tx is None -> peer failed to deliver the transaction'''
if tx is not None:
self.add_to_inventory(inv, tx)
self.tx_bloom_filter.add(inv.hash)
self.spv.on_tx(tx)
# Do this after adding the tx to the wallet to handle race condition
with self.inv_lock:
if inv in self.inprogress_invs:
self.inprogress_invs.pop(inv)
def received_headers(self, headers):
try:
return self.spv.blockchain.add_block_headers(headers)
finally:
with self.blockchain_sync_lock:
self.headers_request = None
def received_block(self, inv, block, syncing_blockchain):
if not syncing_blockchain:
self.add_to_inventory(inv, block)
self.spv.on_block(block)
self.spv.blockchain.add_block(block)
with self.inv_lock:
if inv in self.inprogress_invs:
self.inprogress_invs.pop(inv)
def add_to_inventory(self, inv, item, flags=0):
with self.inv_lock:
if inv in self.inventory_items:
return
self.inventory.append(inv)
self.inventory_items[inv] = {
'sent_to' : set(),
'inv_to' : set(),
'data' : item.serialize(),
'time_added': time.time(),
'time_check': time.time(),
'last_sent' : 0,
'flags' : flags
}
# Transactions that have MUST_CONFIRM set have to be added to our txdb, otherwise
# we'll never be able to confirm their depth
if (flags & Manager.INVENTORY_FLAG_MUST_CONFIRM) != 0:
if not self.spv.txdb.has_tx(inv.hash):
raise Exception("tx must be present in the transaction database in order to check confirmations")
def get_inventory_data(self, inv):
with self.inv_lock:
if inv not in self.inventory_items:
return None
return self.inventory_items[inv]['data']
def manage_inventory(self):
# drop blocks and transactions from self.inventory as necessary
now = time.time()
if now < self.last_manage_inventory_time + Manager.MANAGE_INVENTORY_CHECK_TIME:
return
with self.inv_lock:
for _ in range(len(self.inventory)):
inv = self.inventory.popleft()
item = self.inventory_items.pop(inv)
if (item['flags'] & Manager.INVENTORY_FLAG_HOLD_FOREVER) == 0:
if inv.type == Inv.MSG_BLOCK:
if (now - item['time_added']) >= Manager.KEEP_BLOCK_IN_INVENTORY_TIME:
continue
elif inv.type == Inv.MSG_TX:
# If this tx is one that we produced, we hold onto it until it has enough confirmations
# If its a relayed transaction, we hold onto it for a period of time or until it's been broadcasted
# through enough peers.
if (item['flags'] & Manager.INVENTORY_FLAG_MUST_CONFIRM) != 0:
if self.spv.get_tx_depth(inv.hash) < self.spv.coin.TRANSACTION_CONFIRMATION_DEPTH:
continue
# If we want it confirmed and it was last relayed some time ago, rebroadcast
# by clearing the inv_to and sent_to sets.
if (now - item['last_time']) >= Manager.REBROADCAST_TRANSACTION_TIME:
item['sent_to'] = set()
item['inv_to'] = set()
else:
if (now - item['time_added']) >= Manager.KEEP_TRANSACTION_IN_INVENTORY_TIME:
if len(item['sent_to']) >= min(8, self.peer_goal):
continue
item['time_check'] = now
self.inventory_items[inv] = item
self.inventory.append(inv)
self.last_manage_inventory_time = now
def inventory_filter(self, peer_address, count=200):
with self.inv_lock:
r = []
for inv in self.inventory:
if len(r) == count:
break
if peer_address not in self.inventory_items[inv]['inv_to']:
r.append(inv)
return r
def inventory_sent(self, peer_address, invs):
with self.inv_lock:
for inv in invs:
if inv in self.inventory_items:
self.inventory_items[inv]['inv_to'].add(peer_address)
def will_send_inventory(self, peer_address, inv):
now = time.time()
with self.inv_lock:
if inv not in self.inventory_items:
return Manager.REQUEST_DONT
if peer_address in self.inventory_items[inv]:
return Manager.REQUEST_DONT
self.inventory_items[inv]['sent_to'].add(peer_address)
self.inventory_items[inv]['last_sent'] = time.time()
return Manager.REQUEST_GO
################################################################################
################################################################################
class Peer(threading.Thread):
MAX_INVS_IN_PROGRESS = 10
def __init__(self, manager, peer_address, sock=None):
threading.Thread.__init__(self)
self.manager = manager
self.peer_address = peer_address
self.socket = sock
def shutdown(self):
self.running = False
def start(self):
self.running = False
threading.Thread.start(self)
while not self.running:
pass
def run(self):
self.state = 'init'
self.running = True
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} Peer starting...".format(self.peer_address))
while self.running:
try:
self.step()
except:
traceback.print_exc()
break
time.sleep(0.1)
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} Peer exiting ({} bytes recv/{} bytes sent)...".format(self.peer_address, self.bytes_received, self.bytes_sent))
def step(self):
if self.state == 'init':
self.sent_version = False
self.data_buffer = bytes()
self.bytes_sent = 0
self.bytes_received = 0
self.last_data_time = time.time()
self.last_block_inv_time = time.time()
self.inprogress_command = ''
self.outgoing_data_queue = collections.deque()
self.peer_verack = 0
self.invs = {}
self.inprogress_invs = {}
self.handshake_time = None
self.headers_request = None
self.blocks_request = None
self.syncing_blockchain = 1
self.next_sync_time = 0
self.last_inventory_check_time = time.time()
self.requested_invs = collections.deque()
if self.socket is None:
if self.make_connection():
self.send_version()
self.state = 'connected'
else:
self.socket.settimeout(0.1)
self.state = 'connected'
elif self.state == 'connected':
self.handle_outgoing_data()
self.handle_incoming_data()
self.handle_initial_blockchain_sync()
self.handle_invs()
self.handle_inventory()
elif self.state == 'dead':
self.close_connection()
self.running = False
def make_connection(self):
if self.manager.tor:
self.socket = socks.socksocket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setproxy(socks.PROXY_TYPE_SOCKS5, *self.manager.spv.args.torproxy)
else:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.settimeout(5)
try:
self.socket.connect(self.peer_address)
self.socket.settimeout(0.1)
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} connected.".format(self.peer_address))
return True
except:
self.state = 'dead'
self.manager.peer_is_bad(self.peer_address)
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} could not connect.".format(self.peer_address))
return False
def close_connection(self):
try:
if self.socket is not None:
self.socket.close()
self.socket = None
except:
# TODO :: catch the proper exception / close properly
traceback.print_exc()
def handle_incoming_data(self):
try:
data = self.socket.recv(4096)
self.bytes_received += len(data)
except ConnectionResetError:
data = b''
except socket.timeout:
# Normal, no new data
return
# zero length data means we've lost connection
if len(data) == 0:
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} connection lost.".format(self.peer_address))
self.state = 'dead'
return
self.data_buffer = self.data_buffer + data
self.last_data_time = time.time()
while self.state != 'dead':
command, payload, length, self.data_buffer = Serialize.unwrap_network_message(self.manager.spv.coin, self.data_buffer)
self.inprogress_command = command
if length is not None and length > Manager.MAX_MESSAGE_SIZE:
if self.manager.spv.logging_level <= WARNING:
print("[PEER] {} sent a large message. dropping.".format(self.peer_address))
self.state = 'dead'
break
if payload is None:
break
self.handle_command(command, payload)
def handle_outgoing_data(self):
while len(self.outgoing_data_queue) > 0:
q = self.outgoing_data_queue.popleft()
try:
r = self.socket.send(q)
self.bytes_sent += r
if r < len(q):
self.outgoing_data_queue.appendleft(q[r:])
return
except (ConnectionAbortedError, OSError):
if self.manager.spv.logging_level <= DEBUG:
traceback.print_exc()
self.state = 'dead'
break
def queue_outgoing_data(self, data):
self.outgoing_data_queue.append(data)
def handle_command(self, command, payload):
# We only allow 'version' and 'verack' commands if we haven't finished handshake
if self.peer_verack < 2 and command not in ('version', 'verack'):
raise Exception("invalid command")
try:
cmd = getattr(self, 'cmd_' + command)
except AttributeError:
if self.manager.spv.logging_level <= WARNING:
print('[PEER] {} unhandled command {}'.format(self.peer_address, command))
return
cmd(payload)
def handle_initial_blockchain_sync(self):
# Sync headers until we're within some window of blocks
# of the creation date of our wallet. From that point forward
# sync and process full blocks.
#
# Some magic happens here to make sure we're not just downloading
# headers and blocks from a small group peers.
if self.syncing_blockchain == 0:
return
now = time.time()
if self.headers_request is not None:
# Manager checks to see if our headers_request has timed out, so we don't need to.
return
if self.blocks_request is not None:
if (now - self.blocks_request) > Manager.GETBLOCKS_REQUEST_TIMEOUT:
# The only safe assumption we can make here is that the peer doesn't know about any more blocks. Thus, we have everything.
self.blocks_request = None
if self.syncing_blockchain == 2:
self.state = 'dead'
self.manager.peer_is_bad(self.peer_address)
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} peer is messing with our blockchain sync".format(self.peer_address))
else:
self.syncing_blockchain = 0
return
# Delay requests as necessary
if time.time() < self.next_sync_time:
return
# Wait for a bit before requesting from peer
if self.handshake_time is None or (time.time() - self.handshake_time) < Manager.BLOCKCHAIN_SYNC_WAIT_TIME:
return
# Requesting from peer wouldn't work, says the peer!
if self.manager.spv.blockchain.get_best_chain_height() >= self.peer_last_block:
return
r = self.manager.will_request_headers(self)
if r == Manager.REQUEST_GO:
self.headers_request = time.time()
self.send_getheaders(self.manager.spv.blockchain.get_best_chain_locator())
return
elif r == Manager.REQUEST_WAIT:
# Manager wants to give another peer the chance to deliver headers
self.next_sync_time = time.time() + 5
return
elif r == Manager.REQUEST_DONT:
# We're done syncing headers. try getblocks...
pass
# We don't need to call getblocks if we know about any blocks
# handle_invs will eventually request the blocks
if any(inv.type == Inv.MSG_BLOCK for inv in self.invs.keys()):
return
r = self.manager.will_request_blocks()
if r == Manager.REQUEST_GO:
self.blocks_request = time.time()
self.send_getblocks(self.manager.spv.blockchain.get_best_chain_locator())
return
elif r == Manager.REQUEST_WAIT:
# We never really get here...
self.next_sync_time = time.time() + 5
return
elif r == Manager.REQUEST_DONT:
# Manager says so!
pass
def handle_invs(self):
now = time.time()
if len(self.inprogress_invs) > 0:
inprogress_block_invs = [inv for inv in self.inprogress_invs if inv.type == Inv.MSG_BLOCK]
if len(inprogress_block_invs):
if self.inprogress_command != 'block' and (now - self.last_block_inv_time) > Manager.BLOCK_REQUEST_TIMEOUT:
# Peer is ignoring our request for blocks...
if self.manager.spv.logging_level <= WARNING:
print('[PEER] {} peer is ignoring our request for blocks'.format(self.peer_address))
self.state = 'dead'
return
# TODO - should we consider the peer misbehaving if its ignoring our request for transactions?
inprogress_tx_invs = ((inv, when) for inv, when in self.inprogress_invs.items() if inv.type == Inv.MSG_TX)
timedout_invs = set()
for inv, when in inprogress_tx_invs:
if (now - when) > Manager.TX_REQUEST_TIMEOUT:
# Tell manager (by passing None) that the tx request timed out
self.manager.received_transaction(inv, None)
timedout_invs.add(inv)
# Fix for #3 - don't pop items out of inprogress_invs during iteration
for inv in timedout_invs:
self.inprogress_invs.pop(inv)
if len(self.inprogress_invs):
return
requests = set()
aborts = set()
# This sorted() call prioritizes blocks before transactions
for inv, when in sorted(self.invs.items(), key=lambda x: 1 if x[0].type == Inv.MSG_BLOCK else 2):
if when > now:
# This mechanism allows us to "retry" fetching the item later if one request fails
continue
res = self.manager.will_request_inv(inv)
if res == Manager.REQUEST_GO:
assert inv not in self.inprogress_invs
requests.add(inv)
self.invs[inv] = now + 2 # it'll get retried later if it doesn't get removed below
elif res == Manager.REQUEST_DONT:
aborts.add(inv)
elif res == Manager.REQUEST_WAIT:
self.invs[inv] = now + 5
if len(requests) + len(self.inprogress_invs) >= Peer.MAX_INVS_IN_PROGRESS:
break
for inv in aborts:
self.invs.pop(inv)
for inv in self.request_invs(requests):
self.invs.pop(inv)
def request_invs(self, invs):
if len(invs) != 0:
now = time.time()
for inv in invs:
# Requesting a new block, so allow time to timeout...
if inv.type == Inv.MSG_BLOCK:
self.last_block_inv_time = time.time()
self.inprogress_invs[inv] = now
yield inv
self.send_getdata(invs)
def handle_inventory(self):
now = time.time()
if len(self.requested_invs):
# Queue up an inv if there isn't any other outgoing data
if len(self.outgoing_data_queue):
return
for _ in range(len(self.requested_invs)):
inv, when = self.requested_invs.popleft()
r = self.manager.will_send_inventory(self.peer_address, inv)
if r == Manager.REQUEST_GO:
data = self.manager.get_inventory_data(inv)
if data is None:
continue
if inv.type == Inv.MSG_TX:
self.send_tx(inv, data)
elif inv.type == Inv.MSG_BLOCK:
self.send_block(inv, data)
return
elif r == Manager.REQUEST_WAIT:
self.requested_invs.append((inv, when + 3))
continue
elif r == Manager.REQUEST_DONT:
continue
if now < (self.last_inventory_check_time + Manager.INVENTORY_CHECK_TIME):
return
invs = self.manager.inventory_filter(self.peer_address)
if len(invs):
self.send_inv(invs)
self.manager.inventory_sent(self.peer_address, invs)
self.last_inventory_check_time = now
def send_version(self):
assert not self.sent_version, "don't call this twice"
version = Manager.PROTOCOL_VERSION
services = Manager.SERVICES
now = int(time.time())
recipient_address = Serialize.serialize_network_address(self.peer_address, services, with_timestamp=False)
sender_address = Serialize.serialize_network_address(None, services, with_timestamp=False)
nonce = random.randrange(0, 1 << 64)
user_agent = Serialize.serialize_string(self.manager.user_agent)
last_block = 0 # we aren't a full node...
payload = struct.pack("<LQQ", version, services, now) + recipient_address + sender_address + struct.pack("<Q", nonce) + user_agent + struct.pack("<L", last_block)
self.queue_outgoing_data(Serialize.wrap_network_message(self.manager.spv.coin, "version", payload))
self.sent_version = True
def send_verack(self):
self.queue_outgoing_data(Serialize.wrap_network_message(self.manager.spv.coin, "verack", b''))
def send_pong(self, payload):
self.queue_outgoing_data(Serialize.wrap_network_message(self.manager.spv.coin, "pong", payload))
def send_inv(self, invs):
data = []
data.append(Serialize.serialize_variable_int(len(invs)))
for inv in invs:
data.append(inv.serialize())
payload = b''.join(data)
self.queue_outgoing_data(Serialize.wrap_network_message(self.manager.spv.coin, "inv", payload))
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} sent inv for {} items".format(self.peer_address, len(invs)))
def send_getdata(self, invs):
data = []
for inv in invs:
data.append(inv.serialize())
payload = Serialize.serialize_variable_int(len(data)) + b''.join(data)
self.queue_outgoing_data(Serialize.wrap_network_message(self.manager.spv.coin, "getdata", payload))
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} sent getdata for {} items".format(self.peer_address, len(invs)))
def send_getheaders(self, block_locator):
last_block = (b'\x00' * 32)
payload = struct.pack("<L", Manager.PROTOCOL_VERSION) + block_locator.serialize() + last_block
self.queue_outgoing_data(Serialize.wrap_network_message(self.manager.spv.coin, "getheaders", payload))
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} sent getheaders (block_locator top={})".format(self.peer_address, bytes_to_hexstring(block_locator.hashes[0])))
def send_getblocks(self, block_locator):
last_block = (b'\x00' * 32)
payload = struct.pack("<L", Manager.PROTOCOL_VERSION) + block_locator.serialize() + last_block
self.queue_outgoing_data(Serialize.wrap_network_message(self.manager.spv.coin, "getblocks", payload))
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} sent getblocks".format(self.peer_address))
def send_tx(self, inv, tx_data):
self.queue_outgoing_data(Serialize.wrap_network_message(self.manager.spv.coin, "tx", tx_data))
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} sent tx {}".format(self.peer_address, bytes_to_hexstring(inv.hash)))
def send_block(self, inv, block_data):
self.queue_outgoing_data(Serialize.wrap_network_message(self.manager.spv.coin, "block", block_data))
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} sent block {}".format(self.peer_address, bytes_to_hexstring(inv.hash)))
def send_addr(self, addresses):
data = []
for address in addresses:
data.append(Serialize.serialize_network_address(address, Manager.SERVICES, with_timestamp=False))
payload = Serialize.serialize_variable_int(len(addresses)) + b''.join(data)
self.queue_outgoing_data(Serialize.wrap_network_message(self.manager.spv.coin, "addr", payload))
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} sent addr for {} addresses".format(self.peer_address, len(addresses)))
def cmd_version(self, payload):
if len(payload) < 20:
if self.manager.spv.logging_level <= WARNING:
print('[PEER] {} sent badly formatted version command'.format(self.peer_address))
self.state = 'dead'
return
self.peer_version = 0
try:
self.peer_version, self.peer_services, self.peer_time = struct.unpack("<LQQ", payload[:20])
_, _, payload = Serialize.unserialize_network_address(payload[20:], with_timestamp=False)
_, _, payload = Serialize.unserialize_network_address(payload, with_timestamp=False)
nonce = struct.unpack("<Q", payload[:8])[0]
self.peer_user_agent, payload = Serialize.unserialize_string(payload[8:])
self.peer_last_block = struct.unpack("<L", payload)[0]
except struct.error:
# Not enough data usually
self.state = 'dead'
self.manager.peer_is_bad(self.peer_address)
if self.manager.spv.logging_level <= DEBUG:
print("[PEER] {} bad version {}".format(self.peer_address, self.peer_version))
return
if self.manager.spv.logging_level <= INFO:
print("[PEER] {} version {} (User-agent {}, last block {})".format(self.peer_address, self.peer_version, self.peer_user_agent, self.peer_last_block))
time_offset = abs(self.peer_time - time.time())
if time_offset > 140*60:
# Peer time is just too out of wack.
if self.manager.spv.logging_level <= WARNING:
print("[PEER] {} peer's clock (or yours!) is off by too much ({} sec)".format(self.peer_address, time_offset))
self.state = 'dead'
return
# Let's only connect to peers that are fully synced. If we connect to a syncing peer, it doesn't
# really benefit us and it possibly harms them since we can't send them blocks.
if self.peer_last_block < self.manager.spv.blockchain.get_best_chain_height():
if self.manager.spv.logging_level <= INFO:
print("[PEER] {} peer doesn't have a blockchain longer than ours".format(self.peer_address))
self.state = 'dead'
return
self.send_verack()
self.peer_verack += 1
if not self.sent_version:
self.send_version()
if self.peer_verack == 2:
self.manager.spv.add_time_data(self.peer_time)
self.handshake_time = time.time()
def cmd_verack(self, payload):
self.peer_verack += 1
if self.peer_verack == 2:
self.manager.spv.add_time_data(self.peer_time)
self.handshake_time = time.time()
def cmd_ping(self, payload):
self.send_pong(payload)
def cmd_addr(self, payload):
count, payload = Serialize.unserialize_variable_int(payload)
for i in range(min(count, 1024)):
addr, _, _, payload = Serialize.unserialize_network_address(payload, with_timestamp=self.peer_version >= 31402)
self.manager.peer_found(addr)
def cmd_inv(self, payload):
count, payload = Serialize.unserialize_variable_int(payload)
for i in range(count):
inv, payload = Inv.unserialize(payload)
if self.manager.spv.logging_level <= INFO:
print('[PEER] {} got {}'.format(self.peer_address, str(inv)))
if inv.type == Inv.MSG_BLOCK:
# Doesn't matter if this was a getblocks request or
# unsolicited. We now know about at least one block and should
# fetch it before calling getblocks again.
self.blocks_request = None
if inv not in self.invs and inv not in self.inprogress_invs:
self.invs[inv] = time.time()
def cmd_tx(self, payload):
tx, _ = Transaction.unserialize(payload, self.manager.spv.coin)
tx_hash = tx.hash()
inv = Inv(Inv.MSG_TX, tx_hash)
if self.manager.spv.logging_level <= INFO:
print("[PEER] {} got tx {}".format(self.peer_address, bytes_to_hexstring(inv.hash)))
if inv in self.inprogress_invs:
self.manager.received_transaction(inv, tx)
self.inprogress_invs.pop(inv)
else:
raise Exception("peer sent a tx without us asking it to")
def cmd_headers(self, payload):
count, payload = Serialize.unserialize_variable_int(payload)
headers = []
for i in range(count):
block_header, payload = BlockHeader.unserialize(payload, self.manager.spv.coin)
headers.append(block_header)
tx_count, payload = Serialize.unserialize_variable_int(payload)
bad_peer = not block_header.check() or tx_count != 0
if bad_peer:
# Misbehaving peer: all headers are actually blocks with 0 transactions
if self.manager.spv.logging_level <= WARNING:
print("[PEER] {} sent bad headers".format(self.peer_address, len(headers)))
self.manager.peer_is_bad(self.peer_address)
self.state = 'dead'
return
if self.manager.spv.logging_level <= INFO:
print("[PEER] {} got {} headers".format(self.peer_address, len(headers)))
if not self.manager.received_headers(headers):
if len(headers) != 0:
# Blockchain didn't accept our headers? bad...
self.manager.peer_is_bad(self.peer_address)
self.state = 'dead'
self.headers_request = None
def cmd_block(self, payload):
block, payload = Block.unserialize(payload, self.manager.spv.coin)
if not block.check():
# peer sent a bad block?
if self.manager.spv.logging_level <= WARNING:
print("[PEER] {} peer sent bad block {}".format(self.peer_address, block))
self.manager.peer_is_bad(self.peer_address)
self.state = 'dead'
return
inv = Inv(Inv.MSG_BLOCK, block.header.hash())
if inv in self.inprogress_invs:
if self.manager.spv.logging_level <= INFO:
print("[PEER] {} got {}".format(self.peer_address, block))
self.manager.received_block(inv, block, self.syncing_blockchain != 0)
self.inprogress_invs.pop(inv)
# If we are not syncing from this peer and the peer sends us a block that doesn't connect,
# we should try syncing again. If the peer again doesn't send us blocks, we should disconnect.
if self.syncing_blockchain == 0 and not self.manager.spv.blockchain.blocks[inv.hash]['connected']:
self.syncing_blockchain = 2
else:
raise Exception("peer sent a block without us asking it to")
def cmd_getdata(self, payload):
count, payload = Serialize.unserialize_variable_int(payload)
now = time.time()
for _ in range(count):
inv, payload = Inv.unserialize(payload)
self.requested_invs.append((inv, now))
if self.manager.spv.logging_level <= INFO:
print('[PEER] {} requested {} items'.format(self.peer_address, count))
def cmd_getblocks(self, payload):
if self.manager.spv.logging_level <= DEBUG:
print('[PEER] {} ignoring getblocks command'.format(self.peer_address))
def cmd_getaddr(self, payload):
# Select random addresses and send them
peer_addresses = list(self.manager.peer_addresses.keys())
random.shuffle(peer_addresses)
peer_addresses = peer_addresses[:10]
self.send_addr(peer_addresses)
|
spacecowboy/changelog-writer
|
refs/heads/master
|
tests/test_changelog.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import pytest
from changelog.changelog import (read_config,
DEFAULT_CONFIG)
class TestConfig():
def test_none(self):
with pytest.raises(Exception):
read_config(None)
def test_default(self):
config = read_config(DEFAULT_CONFIG)
assert len(config) > 0
assert config["git"] is not None
|
mcking49/apache-flask
|
refs/heads/master
|
Python/Lib/site-packages/requests/packages/chardet/universaldetector.py
|
1775
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
import codecs
from .latin1prober import Latin1Prober # windows-1252
from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets
from .sbcsgroupprober import SBCSGroupProber # single-byte character sets
from .escprober import EscCharSetProber # ISO-2122, etc.
import re
MINIMUM_THRESHOLD = 0.20
ePureAscii = 0
eEscAscii = 1
eHighbyte = 2
class UniversalDetector:
def __init__(self):
self._highBitDetector = re.compile(b'[\x80-\xFF]')
self._escDetector = re.compile(b'(\033|~{)')
self._mEscCharSetProber = None
self._mCharSetProbers = []
self.reset()
def reset(self):
self.result = {'encoding': None, 'confidence': 0.0}
self.done = False
self._mStart = True
self._mGotData = False
self._mInputState = ePureAscii
self._mLastChar = b''
if self._mEscCharSetProber:
self._mEscCharSetProber.reset()
for prober in self._mCharSetProbers:
prober.reset()
def feed(self, aBuf):
if self.done:
return
aLen = len(aBuf)
if not aLen:
return
if not self._mGotData:
# If the data starts with BOM, we know it is UTF
if aBuf[:3] == codecs.BOM_UTF8:
# EF BB BF UTF-8 with BOM
self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_LE:
# FF FE 00 00 UTF-32, little-endian BOM
self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_BE:
# 00 00 FE FF UTF-32, big-endian BOM
self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
elif aBuf[:4] == b'\xFE\xFF\x00\x00':
# FE FF 00 00 UCS-4, unusual octet order BOM (3412)
self.result = {
'encoding': "X-ISO-10646-UCS-4-3412",
'confidence': 1.0
}
elif aBuf[:4] == b'\x00\x00\xFF\xFE':
# 00 00 FF FE UCS-4, unusual octet order BOM (2143)
self.result = {
'encoding': "X-ISO-10646-UCS-4-2143",
'confidence': 1.0
}
elif aBuf[:2] == codecs.BOM_LE:
# FF FE UTF-16, little endian BOM
self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
elif aBuf[:2] == codecs.BOM_BE:
# FE FF UTF-16, big endian BOM
self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
self._mGotData = True
if self.result['encoding'] and (self.result['confidence'] > 0.0):
self.done = True
return
if self._mInputState == ePureAscii:
if self._highBitDetector.search(aBuf):
self._mInputState = eHighbyte
elif ((self._mInputState == ePureAscii) and
self._escDetector.search(self._mLastChar + aBuf)):
self._mInputState = eEscAscii
self._mLastChar = aBuf[-1:]
if self._mInputState == eEscAscii:
if not self._mEscCharSetProber:
self._mEscCharSetProber = EscCharSetProber()
if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': self._mEscCharSetProber.get_charset_name(),
'confidence': self._mEscCharSetProber.get_confidence()}
self.done = True
elif self._mInputState == eHighbyte:
if not self._mCharSetProbers:
self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
Latin1Prober()]
for prober in self._mCharSetProbers:
if prober.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': prober.get_charset_name(),
'confidence': prober.get_confidence()}
self.done = True
break
def close(self):
if self.done:
return
if not self._mGotData:
if constants._debug:
sys.stderr.write('no data received!\n')
return
self.done = True
if self._mInputState == ePureAscii:
self.result = {'encoding': 'ascii', 'confidence': 1.0}
return self.result
if self._mInputState == eHighbyte:
proberConfidence = None
maxProberConfidence = 0.0
maxProber = None
for prober in self._mCharSetProbers:
if not prober:
continue
proberConfidence = prober.get_confidence()
if proberConfidence > maxProberConfidence:
maxProberConfidence = proberConfidence
maxProber = prober
if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
self.result = {'encoding': maxProber.get_charset_name(),
'confidence': maxProber.get_confidence()}
return self.result
if constants._debug:
sys.stderr.write('no probers hit minimum threshhold\n')
for prober in self._mCharSetProbers[0].mProbers:
if not prober:
continue
sys.stderr.write('%s confidence = %s\n' %
(prober.get_charset_name(),
prober.get_confidence()))
|
keitaroyam/yamtbx
|
refs/heads/master
|
yamtbx/command_line/kamo_resolve_indexing_ambiguity.py
|
1
|
# LIBTBX_SET_DISPATCHER_NAME kamo.resolve_indexing_ambiguity
"""
(c) RIKEN 2015. All rights reserved.
Author: Keitaro Yamashita
This software is released under the new BSD License; see LICENSE.
"""
from yamtbx.dataproc.auto.multi_merging.resolve_reindex import ReferenceBased, BrehmDiederichs, KabschSelectiveBreeding
from yamtbx.util import read_path_list
from libtbx.utils import multi_out
import iotbx.phil
import libtbx.phil
import sys
import os
master_params_str = """
lstin = None
.type = path
.help = list of XDS_ASCII.HKL
method = brehm_diederichs *selective_breeding reference
.type = choice(multi=False)
.help = Method to resolve ambiguity
logfile = "reindexing.log"
.type = path
.help = logfile name
nproc = 1
.type = int
.help = number of processors
dry_run = False
.type = bool
.help = If true, do not modify files
skip_bad_files = False
.type = bool
.help = "Set true if you want to ignore bad files (too few reflections)"
d_min = 3
.type = float
.help = high resolution cutoff used in the method
min_ios = None
.type = float
.help = minimum I/sigma(I) cutoff used in the method
max_delta = 5
.type = float
.help = maximum obliquity used in determining the lattice symmetry, using a modified Le-Page algorithm.
max_cycles = 100
.type = int(value_min=1)
.help = Maximum number of cycles for selective_breeding algorithm.
reference_file = None
.type = path
.help = Only needed when method=reference
reference_label = None
.type = str
.help = data label of reference_file
"""
def run(params):
log_out = multi_out()
log_out.register("log", open(params.logfile, "w"), atexit_send_to=None)
log_out.register("stdout", sys.stdout)
libtbx.phil.parse(master_params_str).format(params).show(out=log_out, prefix=" ")
xac_files = read_path_list(params.lstin, only_exists=True, err_out=log_out)
if len(xac_files) == 0:
print >>log_out, "No (existing) files in the list: %s" % params.lstin
return
if params.method == "brehm_diederichs":
rb = BrehmDiederichs(xac_files, max_delta=params.max_delta,
d_min=params.d_min, min_ios=params.min_ios,
nproc=params.nproc, log_out=log_out)
elif params.method == "selective_breeding":
rb = KabschSelectiveBreeding(xac_files, max_delta=params.max_delta,
d_min=params.d_min, min_ios=params.min_ios,
nproc=params.nproc, log_out=log_out)
elif params.method == "reference":
import iotbx.file_reader
ref_file = iotbx.file_reader.any_file(params.reference_file)
if ref_file.file_type == "hkl":
ref_arrays = ref_file.file_server.miller_arrays
if not ref_arrays:
raise "No arrays in reference file"
if params.reference_label is not None:
ref_arrays = filter(lambda x: params.reference_label in x.info().labels, ref_arrays)
if not ref_arrays: raise "No arrays matched to specified label (%s)" % params.reference_label
ref_array = ref_arrays[0].as_intensity_array()
else:
ref_array = None
for array in ref_arrays:
if array.is_xray_intensity_array():
ref_array = array
print >>log_out, "Using %s as reference data" % array.info().label_string()
break
elif array.is_xray_amplitude_array():
ref_array = array.f_as_f_sq()
print >>log_out, "Using %s as reference data" % array.info().label_string()
break
elif ref_file.file_type == "pdb":
import mmtbx.utils
xrs = ref_file.file_content.xray_structure_simple()
fmodel_params = mmtbx.command_line.fmodel.fmodel_from_xray_structure_master_params.extract()
fmodel_params.fmodel.k_sol = 0.35
fmodel_params.fmodel.b_sol = 50
fmodel_params.high_resolution = params.d_min
ref_array = mmtbx.utils.fmodel_from_xray_structure(xray_structure=xrs, params=fmodel_params).f_model.as_intensity_array()
else:
raise "input file type invalid"
if ref_array is None:
raise "suitable reference data not found"
rb = ReferenceBased(xac_files, ref_array, max_delta=params.max_delta,
d_min=params.d_min, min_ios=params.min_ios,
nproc=params.nproc, log_out=log_out)
else:
raise "Unknown method: %s" % params.method
if rb.bad_files:
print "%s: %d bad files are included:" % ("WARNING" if params.skip_bad_files else "ERROR", len(rb.bad_files))
for f in rb.bad_files: print " %s" % f
if not params.skip_bad_files:
print
print "You may want to change d_min= or min_ios= parameters to include these files."
print "Alternatively, specify skip_bad_files=true to ignore these files (they are not included in output files)"
return
if params.method == "selective_breeding":
rb.assign_operators(max_cycle=params.max_cycles)
else:
rb.assign_operators()
rb.show_assign_summary()
if params.dry_run:
print >>log_out, "This is dry-run. Exiting here."
else:
out_prefix = os.path.splitext(os.path.basename(params.lstin))[0]
ofs_cell = open(out_prefix+"_reindexed_cells.dat", "w")
new_files = rb.modify_xds_ascii_files(cells_dat_out=ofs_cell)
lstout = out_prefix + "_reindexed.lst"
ofs = open(lstout, "w")
ofs.write("\n".join(new_files)+"\n")
ofs.close()
print >>log_out, "Reindexing done. For merging, use %s instead!" % lstout
if params.method == "brehm_diederichs":
print >>log_out, """
CCTBX-implementation (by Richard Gildea) of the "algorithm 2" of the following paper was used.
For publication, please cite:
Brehm, W. and Diederichs, K. Breaking the indexing ambiguity in serial crystallography.
Acta Cryst. (2014). D70, 101-109
http://dx.doi.org/10.1107/S1399004713025431"""
elif params.method == "selective_breeding":
print >>log_out, """
"Selective breeding" algorithm was used. For publication, please cite:
Kabsch, W. Processing of X-ray snapshots from crystals in random orientations.
Acta Cryst. (2014). D70, 2204-2216
http://dx.doi.org/10.1107/S1399004714013534"""
# run()
def show_help():
print """
Use this command to resolve indexing ambiguity
Case 1) Reference-based (when you have isomorphous data)
kamo.resolve_indexing_ambiguity formerge.lst method=reference reference_file=yourdata.mtz [d_min=3]
Case 2) Using selective-breeding algorithm (when you don't have reference data)
kamo.resolve_indexing_ambiguity formerge.lst method=selective_breeding [d_min=3]
Case 3) Using Brehm & Diederichs algorithm (when you don't have reference data)
kamo.resolve_indexing_ambiguity formerge.lst method=brehm_diederichs [d_min=3]
You can also give min_ios= to cutoff data by I/sigma(I).
"""
iotbx.phil.parse(master_params_str).show(prefix=" ", attributes_level=1)
print
# show_help()
if __name__ == "__main__":
import sys
if "-h" in sys.argv or "--help" in sys.argv:
show_help()
quit()
cmdline = iotbx.phil.process_command_line(args=sys.argv[1:],
master_string=master_params_str)
params = cmdline.work.extract()
args = cmdline.remaining_args
for arg in args:
if os.path.isfile(arg) and params.lstin is None:
params.lstin = arg
if params.lstin is None:
show_help()
print "Error: Give .lst of XDS_ASCII files"
quit()
if params.method is None:
show_help()
print "Error: Give method="
quit()
if params.method == "reference" and params.reference_file is None:
show_help()
print "Error: Give reference_file= when you use params.method=reference"
quit()
if params.method == "brehm_diederichs" and params.reference_file is not None:
show_help()
print "Error: You can't give reference_file= when you use params.method=brehm_diederichs"
quit()
run(params)
|
alexweav/OpenAiGym-Solutions
|
refs/heads/master
|
classic_control/CartPole-v0.py
|
1
|
import gym
import numpy as np
import time
from numerical_gradient import *
#Displays numerical evidence that backprop gradients are correct
#Slows down performance dramatically and doesnt affect outcome, so disabled by default
check_gradient = False
verbose = False
render = False
print_every = 100
#Some useful numerical values
num_hidden_neurons = 10
learning_rate = 3e-4
gamma = 0.99
num_games = 100000
win_reward_threshold = 475.0
def main():
env = gym.make('CartPole-v0')
input_dim = env.observation_space.low.shape[0]
model = init_model(input_dim, num_hidden_neurons)
rmsprop_cache = init_rmsprop_cache(model)
num_wins = 0
final_rewards = np.empty((0, 0))
for game in range(num_games):
observation = env.reset()
observation = observation.reshape(1, input_dim)
done = False
reward = 0.0
observations = [] #List of all observations made throughout game
hidden_activations = [] #Store hidden layer activations at every stage for backprop
d_log_probs = [] #Store the derivative of the loss function at every stage
rewards = []
num_game_steps = 0
total_reward = 0.0
while not done:
#Single step of the game
if render:
env.render()
probability, hidden_activation = eval_model(model, observation)
action = decide_action(probability)
observation, reward, done, _ = env.step(action)
rewards.append(reward)
total_reward += reward
observation = observation.reshape(1, input_dim)
d_log_probability = action - probability
#Store all the values for this game step and proceed
observations += [observation]
hidden_activations += [hidden_activation]
d_log_probs += [d_log_probability]
num_game_steps += 1
if verbose:
print("Game ", game, " final reward: ", total_reward)
if game < 100:
final_rewards = np.append(final_rewards, total_reward)
else:
final_rewards = np.delete(final_rewards, 0)
final_rewards = np.append(final_rewards, total_reward)
if total_reward >= win_reward_threshold:
num_wins += 1
if game % print_every == 0:
print("Last set number of wins: ", num_wins)
print("Last set average reward: ", np.mean(final_rewards))
num_wins = 0
observations = np.vstack(observations)
d_log_probs = np.vstack(d_log_probs)
hidden_activations = np.vstack(hidden_activations)
rewards = np.vstack(rewards)
accumulated_rewards = accumulate_reward(rewards)
accumulated_rewards -= np.mean(accumulated_rewards)
accumulated_rewards /= np.std(accumulated_rewards)
d_log_probs *= accumulated_rewards/accumulated_rewards.shape[0]
#Model derivatives for frame 0 of the episode
#How to get deritaves for all frames without loop?
model_derivatives = backprop(hidden_activations, d_log_probs, model, observations)
model = update(model, model_derivatives, rmsprop_cache)
#Initiates model and returns it in the form of a dict
def init_model(input_dim, num_hidden_neurons):
model = {}
model['W1'] = np.random.randn(input_dim, num_hidden_neurons) / np.sqrt(input_dim)
model['b1'] = np.zeros((1, num_hidden_neurons))
model['W2'] = np.random.randn(num_hidden_neurons) / np.sqrt(num_hidden_neurons)
model['b2'] = np.zeros(1)
return model
def init_rmsprop_cache(model):
rmsprop_cache = {}
for key, params in model.items():
rmsprop_cache[key] = np.zeros_like(params)
return rmsprop_cache
#Standard sigmoid function
def sigmoid(x):
return 1. / (1. + np.exp(-x))
#Applies rectified linear activation to input
def relu(x):
x[x<0] = 0
return x
#Performs a feedforward pass, returns the final output probability and hidden layer activations
def eval_model(model, data):
hidden_scores = np.dot(data, model['W1']) + model['b1']
hidden_activations = relu(hidden_scores)
final_scores = np.dot(hidden_activations, model['W2']) + model['b2']
probability = sigmoid(final_scores)
return probability, hidden_activations
#Decides an action to take given the probability of the "1" action
def decide_action(probability):
if np.random.uniform() < probability:
return 1
else:
return 0
#Backpropagation of a single frame
def backprop(hidden_activations, d_log_prob, model, episode_observations):
N = episode_observations.shape[0]
d_b2 = np.sum(d_log_prob, axis=0)
d_W2 = np.dot(hidden_activations.T, d_log_prob).ravel()
d_hidden_activations = (model['W2'] * d_log_prob).reshape(N, num_hidden_neurons)
d_hidden_activations[hidden_activations <= 0] = 0 #ReLU backprop, trivial, no need to check
d_b1 = np.sum(d_hidden_activations, axis=0)
d_W1 = np.dot(episode_observations.T, d_hidden_activations)
if check_gradient:
d_b2_num = numerical_gradient_layer(lambda b : np.dot(hidden_activations, model['W2']) + b, model['b2'], d_log_prob)
d_W2_num = numerical_gradient_layer(lambda w : np.dot(hidden_activations, w) + model['b2'], model['W2'], d_log_prob)
#d_hidden_activations_num = numerical_gradient_layer(lambda x : np.dot(x, model['W2']) + model['b2'], hidden_activations, d_log_prob)
print('d_b2 error:', np.max(relative_error(d_b2, d_b2_num)))
#print(d_b2)
#print(d_b2_num)
print('d_W2 error:', np.max(relative_error(d_W2, d_W2_num)))
#print(d_W2)
#print(d_W2_num)
d_b1_num = numerical_gradient_layer(lambda b : np.dot(episode_observations, model['W1']) + b, model['b1'], d_hidden_activations)
d_W1_num = numerical_gradient_layer(lambda w : np.dot(episode_observations, w) + model['b1'], model['W1'], d_hidden_activations)
print('d_b1 error:', np.max(relative_error(d_b1, d_b1_num)))
print('d_W1 error:', np.max(relative_error(d_W1, d_W1_num)))
return {'W1':d_W1, 'b1':d_b1, 'W2':d_W2, 'b2':d_b2}
#RMSProp update of a single matrix
def rmsprop(theta, dtheta, error, learning_rate, decay):
eps = 1e-8
error = decay * error + (1 - decay) * dtheta**2
return theta - learning_rate * dtheta / (np.sqrt(error) + eps), error
def update(model, model_derivatives, rmsprop_cache):
for key, layer in model.items():
model[key], rmsprop_cache[key] = rmsprop(layer, model_derivatives[key], rmsprop_cache[key], learning_rate, gamma)
return model
def accumulate_reward(rewards):
accumulated_reward = np.zeros_like(rewards)
accumulator = 0
for i in range(rewards.shape[0]):
accumulator = gamma * accumulator + rewards[i]
accumulated_reward[i] = accumulator
return accumulated_reward
main()
|
kenlist/chromium_tools_gyp
|
refs/heads/master
|
test/win/gyptest-link-entrypointsymbol.py
|
342
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure entrypointsymbol setting is extracted properly.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('entrypointsymbol.gyp', chdir=CHDIR)
test.build('entrypointsymbol.gyp', 'test_ok', chdir=CHDIR)
test.build('entrypointsymbol.gyp', 'test_fail', chdir=CHDIR, status=1)
test.pass_test()
|
maciekzdaleka/lab11
|
refs/heads/master
|
read-aws-queue.py
|
1
|
# This script created a queue
#
# Author - Paul Doyle Nov 2015
#
#
import boto.sqs
import boto.sqs.queue
from boto.sqs.message import Message
from boto.sqs.connection import SQSConnection
from boto.exception import SQSError
import sys
import urllib2
# Get the keys from a specific url and then use them to connect to AWS Service
response = urllib2.urlopen('http://ec2-52-30-7-5.eu-west-1.compute.amazonaws.com:81/key')
html=response.read()
result = html.split(':')
#print (result[0])
#print (result[1])
access_key_id = result[0]
secret_access_key = result[1]
#print (access_key_id,secret_access_key)
# Set up a connection to the AWS service.
conn = boto.sqs.connect_to_region("eu-west-1", aws_access_key_id=access_key_id, aws_secret_access_key=secret_access_key)
student_number = 'C13470112'
#conn.delete_queue(sys.argv[1])
# Get a list of the queues that exists and then print the list out
my_queue = conn.get_queue(student_number+sys.argv[1])
rs = my_queue.get_messages()
m = rs[0]
print m.get_body()
|
chdecultot/erpnext
|
refs/heads/develop
|
erpnext/healthcare/web_form/patient_appointments/patient_appointments.py
|
30
|
from __future__ import unicode_literals
import frappe
def get_context(context):
context.read_only = 1
def get_list_context(context):
context.row_template = "erpnext/templates/includes/healthcare/appointment_row_template.html"
context.get_list = get_appointment_list
def get_appointment_list(doctype, txt, filters, limit_start, limit_page_length = 20, order_by='modified desc'):
patient = get_patient()
lab_tests = frappe.db.sql("""select * from `tabPatient Appointment`
where patient = %s and (status = 'Open' or status = 'Scheduled') order by appointment_date""", patient, as_dict = True)
return lab_tests
def get_patient():
return frappe.get_value("Patient",{"email": frappe.session.user}, "name")
def has_website_permission(doc, ptype, user, verbose=False):
if doc.patient == get_patient():
return True
else:
return False
|
lokeshjindal15/pd-gem5
|
refs/heads/master
|
tests/configs/realview64-switcheroo-timing.py
|
33
|
# Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
from m5.objects import *
from arm_generic import *
import switcheroo
root = LinuxArmFSSwitcheroo(
machine_type='VExpress_EMM64',
mem_class=DDR3_1600_x64,
cpu_classes=(TimingSimpleCPU, TimingSimpleCPU)
).create_root()
# Setup a custom test method that uses the switcheroo tester that
# switches between CPU models.
run_test = switcheroo.run_test
|
rameshvs/nipype
|
refs/heads/master
|
nipype/interfaces/fsl/tests/test_auto_FSLCommand.py
|
5
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.fsl.base import FSLCommand
def test_FSLCommand_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
output_type=dict(),
terminal_output=dict(mandatory=True,
nohash=True,
),
)
inputs = FSLCommand.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
|
tardyp/buildbot
|
refs/heads/master
|
master/buildbot/test/unit/data/test_patches.py
|
6
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.trial import unittest
from buildbot.data import patches
from buildbot.test.fake import fakemaster
from buildbot.test.util.misc import TestReactorMixin
class Patch(TestReactorMixin, unittest.TestCase):
def setUp(self):
self.setUpTestReactor()
self.master = fakemaster.make_master(self, wantMq=True, wantDb=True,
wantData=True)
self.rtype = patches.Patch(self.master)
# no update methods -> nothing to test
|
pozdnyakov/chromium-crosswalk
|
refs/heads/master
|
tools/telemetry/telemetry/page/__init__.py
|
461
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
|
smartpm/smart
|
refs/heads/master
|
smart/commands/search.py
|
2
|
#
# Copyright (c) 2004 Conectiva, Inc.
#
# Written by Gustavo Niemeyer <niemeyer@conectiva.com>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from smart.commands import query
from smart import *
USAGE=_("smart search expression ...")
DESCRIPTION=_("""
This command allows searching for the given expressions
in the name, summary, and description of known packages.
""")
EXAMPLES=_("""
smart search ldap
smart search kernel module
smart search rpm 'package manager'
smart search pkgname
smart search 'pkgn*e'
""")
def option_parser():
return query.option_parser(usage=USAGE, description=DESCRIPTION, \
examples=EXAMPLES)
def parse_options(argv):
opts = query.parse_options(argv, usage=USAGE, \
description=DESCRIPTION, examples=EXAMPLES)
if not argv:
raise Error, _("Search expression not specified")
opts.name = opts.args
opts.summary = opts.args
opts.description = opts.args
for arg in argv:
if ":/" in arg:
opts.url.append(arg)
elif "/" in arg:
opts.path.append(arg)
opts.show_summary = True
opts.hide_version = True
opts.args = []
return opts
main = query.main
|
Tim55667757/FileRevision
|
refs/heads/master
|
FileRevision.py
|
1
|
# -*- coding: utf-8 -*-
#
# Author: Timur Gilmullin
# (c) Positive Technologies, 2013
# This module realize work with revisions file.
# Revisions looks like dictionary with key funcName and value is the tuple: hash + source:
# {"funcName1": (funcName1_hash, funcName1_source),
# "funcName2": (funcName2_hash, funcName2_source), ...}
# File for revision contains list:
# [revision's last date-n-time, {revisions}]
# Empty revision-file looks like this: [None, {}]
import os
import inspect
from datetime import datetime
import traceback
# text messages:
MSG_CHECK = "Checking revision for function:"
MSG_NOT_MODIFIED = "Given function not modified from last revision."
MSG_MODIFIED = "Given function was modified since last revision!"
MSG_UPDATE = "Starting update function:"
MSG_UPDATED = "Given function was update successful."
MSG_UPDATE_ERROR = "It was an error during update given function!"
MSG_DELETE = "Starting file-revision's clean process..."
MSG_DELETED = "All function revisions delete successful."
MSG_DELETE_ERROR = "It was an error during delete revision!"
class Revision():
"""
Main class for realize work with revisions
"""
def __init__(self, fileRevision='revision.txt'):
self.fileRevision = fileRevision
self.mainRevision = self._ReadFromFile(self.fileRevision) # get main revision first
def _ReadFromFile(self, file=None):
"""
Helper function that parse and return revision from file.
"""
revision = [None, {}]
if file == None:
file = self.fileRevision
try:
if os.path.exists(file) and os.path.isfile(file):
with open(file) as fH:
revision = eval(fH.read())
except:
traceback.print_exc()
finally:
return revision
def _WriteToFile(self, revision=[None, {}], file=None):
"""
Helper procedure than trying to write given revision to file.
"""
status = False
if file == None:
file = self.fileRevision
try:
with open(file, "w") as fH:
fH.write(str(revision))
status = True
except:
traceback.print_exc()
finally:
return status
def _GetOld(self, func=None):
"""
Get old revision for given function and return tuple: (old_hash, old_source).
"""
funcHashOld = None # old code is None if function not exist in previous revision
funcSourceOld = None # old hash is None if function not exist in previous revision
try:
if func.__name__ in self.mainRevision[1]:
funcHashOld = self.mainRevision[1][func.__name__][0] # field with old hash of function
funcSourceOld = self.mainRevision[1][func.__name__][1] # field with old code of function
except:
traceback.print_exc()
finally:
return (funcHashOld, funcSourceOld)
def _GetNew(self, func=None):
"""
Get new revision for given function and return tuple: (new_hash, new_source).
"""
funcSourceNew = None # if function doesn't exist, its also doesn't have code
funcHashNew = None # hash is None if function not exist
try:
funcSourceNew = inspect.getsource(func) # get function's source
funcHashNew = hash(funcSourceNew) # new hash of function
except:
traceback.print_exc()
finally:
return (funcHashNew, funcSourceNew)
def _Similar(self, hashOld, sourceOld, hashNew, sourceNew):
"""
Checks if given params for modified then return tuple with revision's diff:
(old_revision, new_revision), otherwise return None.
"""
similar = True # old and new functions are similar, by default
if hashNew != hashOld:
if sourceOld != sourceNew:
similar = False # modified if hashes are not similar and functions not contains similar code
return similar
def Update(self, func=None):
"""
Set new revision for function.
revision = [revision date-n-time,
{"funcName1": (funcName1_hash, funcName1_source),
{"funcName2": (funcName2_hash, funcName2_source), ...}]
"""
status = False
if func:
try:
funcSourceNew = inspect.getsource(func) # get function's source
funcHashNew = hash(funcSourceNew) # new hash of function
revisionDateNew = datetime.now().strftime('%d.%m.%Y %H:%M:%S') # revision's date
funcRevisionNew = {func.__name__: [funcHashNew, funcSourceNew]} # form for function's revision
self.mainRevision[0] = revisionDateNew # set new date for main revision
self.mainRevision[1].update(funcRevisionNew) # add function's revision to main revision
if self._WriteToFile(self.mainRevision): # write main revision to file
status = True
except:
traceback.print_exc()
finally:
return status
def DeleteAll(self):
"""
Helper function that parse and return revision from file.
"""
status = False
try:
self.mainRevision = [None, {}] # clean revision
if self._WriteToFile(self.mainRevision): # write main revision to file
status = True
except:
traceback.print_exc()
finally:
return status
def ShowOld(self, func=None):
"""
Function return old revision for given function.
"""
funcHashOld, funcSourceOld = self._GetOld(func) # get old revision for given function
dateStr = "Last revision: " + str(self.mainRevision[0])
hashStr = "\nOld function's hash: " + str(funcHashOld)
codeStr = "\nOld function's code:\n" + "- " * 30 + "\n" + str(funcSourceOld) + "\n" + "- " * 30
oldRevision = dateStr + hashStr + codeStr
return oldRevision
def ShowNew(self, func=None):
"""
Function return old revision for given function.
"""
funcHashNew, funcSourceNew = self._GetNew(func) # get old revision for given function
hashStr = "New function's hash: " + str(funcHashNew)
codeStr = "\nNew function's code:\n" + "- " * 30 + "\n" + str(funcSourceNew) + "\n" + "- " * 30
newRevision = hashStr + codeStr
return newRevision
def Diff(self, func=None):
"""
Checks if given function modified then return tuple with revision's diff:
(old_revision, new_revision), otherwise return None.
"""
funcHashOld, funcSourceOld = self._GetOld(func) # get old revision for given function
funcHashNew, funcSourceNew = self._GetNew(func) # get new revision for given function
# check old and new revisions:
if self._Similar(funcHashOld, funcSourceOld, funcHashNew, funcSourceNew):
diff = None # not difference
else:
diff = ("Last revision: " + str(self.mainRevision[0]) +
"\nOld function's hash: " + str(funcHashOld) +
"\nOld function's code:\n" + "- " * 30 + "\n" +
str(funcSourceOld) + "\n" + "- " * 30,
"\nNew function's hash: " + str(funcHashNew) +
"\nNew function's code:\n" + "- " * 30 + "\n" +
str(funcSourceNew) + "\n" + "- " * 30) # if new function not similar old function
return diff
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _testFunction(a=None):
"""
This is fake test function for module.
"""
# this is comment
if a:
return True
else:
return False
if __name__ == '__main__':
func = _testFunction # set function for review in revision
revision = Revision('revision.txt') # init revision class for using with revision.txt
# how to use this module for review revision of function:
print(MSG_CHECK, func.__name__)
funcModified = revision.Diff(func) # get function's diff as tuple (old_revision, new_revision)
if funcModified:
print(MSG_MODIFIED)
print(funcModified[0]) # old revision
print(funcModified[1]) # new revision
else:
print(MSG_NOT_MODIFIED)
# how to use this module for update revision:
action = input("Update function's revision? [y/n]: ")
if action == 'y':
print(MSG_UPDATE, func.__name__)
if revision.Update(func):
print(MSG_UPDATED)
else:
print(MSG_UPDATE_ERROR)
# how to use this module for clean file-revision:
action = input("Clean file-revision now? [y/n]: ")
if action == 'y':
print(MSG_DELETE)
if revision.DeleteAll():
print(MSG_DELETED)
else:
print(MSG_DELETE_ERROR)
# how to use this module for show old review:
action = input('Show old revision for function? [y/n]: ')
if action == 'y':
print(revision.ShowOld(func))
# how to use this module for show new review:
action = input('Show new revision for function? [y/n]: ')
if action == 'y':
print(revision.ShowNew(func))
|
fafaman/django
|
refs/heads/master
|
django/contrib/sites/__init__.py
|
808
|
default_app_config = 'django.contrib.sites.apps.SitesConfig'
|
fcracker79/pybitcointools
|
refs/heads/master
|
bitcoin/bci.py
|
4
|
#!/usr/bin/python
import json, re
import random
import sys
try:
from urllib.request import build_opener
except:
from urllib2 import build_opener
# Makes a request to a given URL (first arg) and optional params (second arg)
def make_request(*args):
opener = build_opener()
opener.addheaders = [('User-agent',
'Mozilla/5.0'+str(random.randrange(1000000)))]
try:
return opener.open(*args).read().strip()
except Exception as e:
try:
p = e.read().strip()
except:
p = e
raise Exception(p)
def is_testnet(inp):
'''Checks if inp is a testnet address or if UTXO is a known testnet TxID'''
if isinstance(inp, (list, tuple)) and len(inp) >= 1:
return any([is_testnet(x) for x in inp])
elif not isinstance(inp, basestring): # sanity check
raise TypeError("Input must be str/unicode, not type %s" % str(type(inp)))
if not inp or (inp.lower() in ("btc", "testnet")):
pass
## ADDRESSES
if inp[0] in "123mn":
if re.match("^[2mn][a-km-zA-HJ-NP-Z0-9]{26,33}$", inp):
return True
elif re.match("^[13][a-km-zA-HJ-NP-Z0-9]{26,33}$", inp):
return False
else:
#sys.stderr.write("Bad address format %s")
return None
## TXID
elif re.match('^[0-9a-fA-F]{64}$', inp):
base_url = "http://api.blockcypher.com/v1/btc/{network}/txs/{txid}?includesHex=false"
try:
# try testnet fetchtx
make_request(base_url.format(network="test3", txid=inp.lower()))
return True
except:
# try mainnet fetchtx
make_request(base_url.format(network="main", txid=inp.lower()))
return False
sys.stderr.write("TxID %s has no match for testnet or mainnet (Bad TxID)")
return None
else:
raise TypeError("{0} is unknown input".format(inp))
def set_network(*args):
'''Decides if args for unspent/fetchtx/pushtx are mainnet or testnet'''
r = []
for arg in args:
if not arg:
pass
if isinstance(arg, basestring):
r.append(is_testnet(arg))
elif isinstance(arg, (list, tuple)):
return set_network(*arg)
if any(r) and not all(r):
raise Exception("Mixed Testnet/Mainnet queries")
return "testnet" if any(r) else "btc"
def parse_addr_args(*args):
# Valid input formats: unspent([addr1, addr2, addr3])
# unspent([addr1, addr2, addr3], network)
# unspent(addr1, addr2, addr3)
# unspent(addr1, addr2, addr3, network)
addr_args = args
network = "btc"
if len(args) == 0:
return [], 'btc'
if len(args) >= 1 and args[-1] in ('testnet', 'btc'):
network = args[-1]
addr_args = args[:-1]
if len(addr_args) == 1 and isinstance(addr_args, list):
network = set_network(*addr_args[0])
addr_args = addr_args[0]
if addr_args and isinstance(addr_args, tuple) and isinstance(addr_args[0], list):
addr_args = addr_args[0]
network = set_network(addr_args)
return addr_args, network # note params are "reversed" now
# Gets the unspent outputs of one or more addresses
def bci_unspent(*args):
addrs, network = parse_addr_args(*args)
u = []
for a in addrs:
try:
data = make_request('https://blockchain.info/unspent?active='+a)
except Exception as e:
if str(e) == 'No free outputs to spend':
continue
else:
raise Exception(e)
try:
jsonobj = json.loads(data.decode("utf-8"))
for o in jsonobj["unspent_outputs"]:
h = o['tx_hash'].decode('hex')[::-1].encode('hex')
u.append({
"output": h+':'+str(o['tx_output_n']),
"value": o['value']
})
except:
raise Exception("Failed to decode data: "+data)
return u
def blockr_unspent(*args):
# Valid input formats: blockr_unspent([addr1, addr2,addr3])
# blockr_unspent(addr1, addr2, addr3)
# blockr_unspent([addr1, addr2, addr3], network)
# blockr_unspent(addr1, addr2, addr3, network)
# Where network is 'btc' or 'testnet'
network, addr_args = parse_addr_args(*args)
if network == 'testnet':
blockr_url = 'http://tbtc.blockr.io/api/v1/address/unspent/'
elif network == 'btc':
blockr_url = 'http://btc.blockr.io/api/v1/address/unspent/'
else:
raise Exception(
'Unsupported network {0} for blockr_unspent'.format(network))
if len(addr_args) == 0:
return []
elif isinstance(addr_args[0], list):
addrs = addr_args[0]
else:
addrs = addr_args
res = make_request(blockr_url+','.join(addrs))
data = json.loads(res.decode("utf-8"))['data']
o = []
if 'unspent' in data:
data = [data]
for dat in data:
for u in dat['unspent']:
o.append({
"output": u['tx']+':'+str(u['n']),
"value": int(u['amount'].replace('.', ''))
})
return o
def helloblock_unspent(*args):
addrs, network = parse_addr_args(*args)
if network == 'testnet':
url = 'https://testnet.helloblock.io/v1/addresses/%s/unspents?limit=500&offset=%s'
elif network == 'btc':
url = 'https://mainnet.helloblock.io/v1/addresses/%s/unspents?limit=500&offset=%s'
o = []
for addr in addrs:
for offset in xrange(0, 10**9, 500):
res = make_request(url % (addr, offset))
data = json.loads(res.decode("utf-8"))["data"]
if not len(data["unspents"]):
break
elif offset:
sys.stderr.write("Getting more unspents: %d\n" % offset)
for dat in data["unspents"]:
o.append({
"output": dat["txHash"]+':'+str(dat["index"]),
"value": dat["value"],
})
return o
unspent_getters = {
'bci': bci_unspent,
'blockr': blockr_unspent,
'helloblock': helloblock_unspent
}
def unspent(*args, **kwargs):
f = unspent_getters.get(kwargs.get('source', ''), bci_unspent)
return f(*args)
# Gets the transaction output history of a given set of addresses,
# including whether or not they have been spent
def history(*args):
# Valid input formats: history([addr1, addr2,addr3])
# history(addr1, addr2, addr3)
if len(args) == 0:
return []
elif isinstance(args[0], list):
addrs = args[0]
else:
addrs = args
txs = []
for addr in addrs:
offset = 0
while 1:
gathered = False
while not gathered:
try:
data = make_request(
'https://blockchain.info/address/%s?format=json&offset=%s' %
(addr, offset))
gathered = True
except Exception as e:
try:
sys.stderr.write(e.read().strip())
except:
sys.stderr.write(str(e))
gathered = False
try:
jsonobj = json.loads(data.decode("utf-8"))
except:
raise Exception("Failed to decode data: "+data)
txs.extend(jsonobj["txs"])
if len(jsonobj["txs"]) < 50:
break
offset += 50
sys.stderr.write("Fetching more transactions... "+str(offset)+'\n')
outs = {}
for tx in txs:
for o in tx["out"]:
if o.get('addr', None) in addrs:
key = str(tx["tx_index"])+':'+str(o["n"])
outs[key] = {
"address": o["addr"],
"value": o["value"],
"output": tx["hash"]+':'+str(o["n"]),
"block_height": tx.get("block_height", None)
}
for tx in txs:
for i, inp in enumerate(tx["inputs"]):
if "prev_out" in inp:
if inp["prev_out"].get("addr", None) in addrs:
key = str(inp["prev_out"]["tx_index"]) + \
':'+str(inp["prev_out"]["n"])
if outs.get(key):
outs[key]["spend"] = tx["hash"]+':'+str(i)
return [outs[k] for k in outs]
# Pushes a transaction to the network using https://blockchain.info/pushtx
def bci_pushtx(tx):
if not re.match('^[0-9a-fA-F]*$', tx):
tx = tx.encode('hex')
return make_request('https://blockchain.info/pushtx', 'tx='+tx)
def eligius_pushtx(tx):
if not re.match('^[0-9a-fA-F]*$', tx):
tx = tx.encode('hex')
s = make_request(
'http://eligius.st/~wizkid057/newstats/pushtxn.php',
'transaction='+tx+'&send=Push')
strings = re.findall('string[^"]*"[^"]*"', s)
for string in strings:
quote = re.findall('"[^"]*"', string)[0]
if len(quote) >= 5:
return quote[1:-1]
def blockr_pushtx(tx, network='btc'):
if network == 'testnet':
blockr_url = 'http://tbtc.blockr.io/api/v1/tx/push'
elif network == 'btc':
blockr_url = 'http://btc.blockr.io/api/v1/tx/push'
else:
raise Exception(
'Unsupported network {0} for blockr_pushtx'.format(network))
if not re.match('^[0-9a-fA-F]*$', tx):
tx = tx.encode('hex')
return make_request(blockr_url, '{"hex":"%s"}' % tx)
def helloblock_pushtx(tx):
if not re.match('^[0-9a-fA-F]*$', tx):
tx = tx.encode('hex')
return make_request('https://mainnet.helloblock.io/v1/transactions',
'rawTxHex='+tx)
pushtx_getters = {
'bci': bci_pushtx,
'blockr': blockr_pushtx,
'helloblock': helloblock_pushtx
}
def pushtx(*args, **kwargs):
f = pushtx_getters.get(kwargs.get('source', ''), bci_pushtx)
return f(*args)
def last_block_height(network='btc'):
if network == 'testnet':
data = make_request('http://tbtc.blockr.io/api/v1/block/info/last')
jsonobj = json.loads(data.decode("utf-8"))
return jsonobj["data"]["nb"]
data = make_request('https://blockchain.info/latestblock')
jsonobj = json.loads(data.decode("utf-8"))
return jsonobj["height"]
# Gets a specific transaction
def bci_fetchtx(txhash):
if isinstance(txhash, list):
return [bci_fetchtx(h) for h in txhash]
if not re.match('^[0-9a-fA-F]*$', txhash):
txhash = txhash.encode('hex')
data = make_request('https://blockchain.info/rawtx/'+txhash+'?format=hex')
return data
def blockr_fetchtx(txhash, network='btc'):
if network == 'testnet':
blockr_url = 'http://tbtc.blockr.io/api/v1/tx/raw/'
elif network == 'btc':
blockr_url = 'http://btc.blockr.io/api/v1/tx/raw/'
else:
raise Exception(
'Unsupported network {0} for blockr_fetchtx'.format(network))
if isinstance(txhash, list):
txhash = ','.join([x.encode('hex') if not re.match('^[0-9a-fA-F]*$', x)
else x for x in txhash])
jsondata = json.loads(make_request(blockr_url+txhash).decode("utf-8"))
return [d['tx']['hex'] for d in jsondata['data']]
else:
if not re.match('^[0-9a-fA-F]*$', txhash):
txhash = txhash.encode('hex')
jsondata = json.loads(make_request(blockr_url+txhash).decode("utf-8"))
return jsondata['data']['tx']['hex']
def helloblock_fetchtx(txhash, network='btc'):
if isinstance(txhash, list):
return [helloblock_fetchtx(h) for h in txhash]
if not re.match('^[0-9a-fA-F]*$', txhash):
txhash = txhash.encode('hex')
if network == 'testnet':
url = 'https://testnet.helloblock.io/v1/transactions/'
elif network == 'btc':
url = 'https://mainnet.helloblock.io/v1/transactions/'
else:
raise Exception(
'Unsupported network {0} for helloblock_fetchtx'.format(network))
data = json.loads(make_request(url + txhash).decode("utf-8"))["data"]["transaction"]
o = {
"locktime": data["locktime"],
"version": data["version"],
"ins": [],
"outs": []
}
for inp in data["inputs"]:
o["ins"].append({
"script": inp["scriptSig"],
"outpoint": {
"index": inp["prevTxoutIndex"],
"hash": inp["prevTxHash"],
},
"sequence": 4294967295
})
for outp in data["outputs"]:
o["outs"].append({
"value": outp["value"],
"script": outp["scriptPubKey"]
})
from bitcoin.transaction import serialize
from bitcoin.transaction import txhash as TXHASH
tx = serialize(o)
assert TXHASH(tx) == txhash
return tx
fetchtx_getters = {
'bci': bci_fetchtx,
'blockr': blockr_fetchtx,
'helloblock': helloblock_fetchtx
}
def fetchtx(*args, **kwargs):
f = fetchtx_getters.get(kwargs.get('source', ''), bci_fetchtx)
return f(*args)
def firstbits(address):
if len(address) >= 25:
return make_request('https://blockchain.info/q/getfirstbits/'+address)
else:
return make_request(
'https://blockchain.info/q/resolvefirstbits/'+address)
def get_block_at_height(height):
j = json.loads(make_request("https://blockchain.info/block-height/" +
str(height)+"?format=json").decode("utf-8"))
for b in j['blocks']:
if b['main_chain'] is True:
return b
raise Exception("Block at this height not found")
def _get_block(inp):
if len(str(inp)) < 64:
return get_block_at_height(inp)
else:
return json.loads(make_request(
'https://blockchain.info/rawblock/'+inp).decode("utf-8"))
def bci_get_block_header_data(inp):
j = _get_block(inp)
return {
'version': j['ver'],
'hash': j['hash'],
'prevhash': j['prev_block'],
'timestamp': j['time'],
'merkle_root': j['mrkl_root'],
'bits': j['bits'],
'nonce': j['nonce'],
}
def blockr_get_block_header_data(height, network='btc'):
if network == 'testnet':
blockr_url = "http://tbtc.blockr.io/api/v1/block/raw/"
elif network == 'btc':
blockr_url = "http://btc.blockr.io/api/v1/block/raw/"
else:
raise Exception(
'Unsupported network {0} for blockr_get_block_header_data'.format(network))
k = json.loads(make_request(blockr_url + str(height)).decode("utf-8"))
j = k['data']
return {
'version': j['version'],
'hash': j['hash'],
'prevhash': j['previousblockhash'],
'timestamp': j['time'],
'merkle_root': j['merkleroot'],
'bits': int(j['bits'], 16),
'nonce': j['nonce'],
}
def get_block_timestamp(height, network='btc'):
if network == 'testnet':
blockr_url = "http://tbtc.blockr.io/api/v1/block/info/"
elif network == 'btc':
blockr_url = "http://btc.blockr.io/api/v1/block/info/"
else:
raise Exception(
'Unsupported network {0} for get_block_timestamp'.format(network))
import time, calendar
if isinstance(height, list):
k = json.loads(make_request(blockr_url + ','.join([str(x) for x in height])).decode("utf-8"))
o = {x['nb']: calendar.timegm(time.strptime(x['time_utc'],
"%Y-%m-%dT%H:%M:%SZ")) for x in k['data']}
return [o[x] for x in height]
else:
k = json.loads(make_request(blockr_url + str(height)).decode("utf-8"))
j = k['data']['time_utc']
return calendar.timegm(time.strptime(j, "%Y-%m-%dT%H:%M:%SZ"))
block_header_data_getters = {
'bci': bci_get_block_header_data,
'blockr': blockr_get_block_header_data
}
def get_block_header_data(inp, **kwargs):
f = block_header_data_getters.get(kwargs.get('source', ''),
bci_get_block_header_data)
return f(inp, **kwargs)
def get_txs_in_block(inp):
j = _get_block(inp)
hashes = [t['hash'] for t in j['tx']]
return hashes
def get_block_height(txhash):
j = json.loads(make_request('https://blockchain.info/rawtx/'+txhash).decode("utf-8"))
return j['block_height']
# fromAddr, toAddr, 12345, changeAddress
def get_tx_composite(inputs, outputs, output_value, change_address=None, network=None):
"""mktx using blockcypher API"""
inputs = [inputs] if not isinstance(inputs, list) else inputs
outputs = [outputs] if not isinstance(outputs, list) else outputs
network = set_network(change_address or inputs) if not network else network.lower()
url = "http://api.blockcypher.com/v1/btc/{network}/txs/new?includeToSignTx=true".format(
network=('test3' if network=='testnet' else 'main'))
is_address = lambda a: bool(re.match("^[123mn][a-km-zA-HJ-NP-Z0-9]{26,33}$", a))
if any([is_address(x) for x in inputs]):
inputs_type = 'addresses' # also accepts UTXOs, only addresses supported presently
if any([is_address(x) for x in outputs]):
outputs_type = 'addresses' # TODO: add UTXO support
data = {
'inputs': [{inputs_type: inputs}],
'confirmations': 0,
'preference': 'high',
'outputs': [{outputs_type: outputs, "value": output_value}]
}
if change_address:
data["change_address"] = change_address #
jdata = json.loads(make_request(url, data))
hash, txh = jdata.get("tosign")[0], jdata.get("tosign_tx")[0]
assert bin_dbl_sha256(txh.decode('hex')).encode('hex') == hash, "checksum mismatch %s" % hash
return txh.encode("utf-8")
blockcypher_mktx = get_tx_composite
|
rupran/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/webfaction/webfaction_site.py
|
6
|
#!/usr/bin/python
#
# Create Webfaction website using Ansible and the Webfaction API
#
# ------------------------------------------
#
# (c) Quentin Stafford-Fraser 2015
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: webfaction_site
short_description: Add or remove a website on a Webfaction host
description:
- Add or remove a website on a Webfaction host. Further documentation at http://github.com/quentinsf/ansible-webfaction.
author: Quentin Stafford-Fraser (@quentinsf)
version_added: "2.0"
notes:
- Sadly, you I(do) need to know your webfaction hostname for the C(host) parameter. But at least, unlike the API, you don't need to know the IP address - you can use a DNS name.
- If a site of the same name exists in the account but on a different host, the operation will exit.
- "You can run playbooks that use this on a local machine, or on a Webfaction host, or elsewhere, since the scripts use the remote webfaction API - the location is not important. However, running them on multiple hosts I(simultaneously) is best avoided. If you don't specify I(localhost) as your host, you may want to add C(serial: 1) to the plays."
- See `the webfaction API <http://docs.webfaction.com/xmlrpc-api/>`_ for more info.
options:
name:
description:
- The name of the website
required: true
state:
description:
- Whether the website should exist
required: false
choices: ['present', 'absent']
default: "present"
host:
description:
- The webfaction host on which the site should be created.
required: true
https:
description:
- Whether or not to use HTTPS
required: false
choices:
- true
- false
default: 'false'
site_apps:
description:
- A mapping of URLs to apps
required: false
subdomains:
description:
- A list of subdomains associated with this site.
required: false
default: null
login_name:
description:
- The webfaction account to use
required: true
login_password:
description:
- The webfaction password to use
required: true
'''
EXAMPLES = '''
- name: create website
webfaction_site:
name: testsite1
state: present
host: myhost.webfaction.com
subdomains:
- 'testsite1.my_domain.org'
site_apps:
- ['testapp1', '/']
https: no
login_name: "{{webfaction_user}}"
login_password: "{{webfaction_passwd}}"
'''
import socket
import xmlrpclib
webfaction = xmlrpclib.ServerProxy('https://api.webfaction.com/')
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(required=False, choices=['present', 'absent'], default='present'),
# You can specify an IP address or hostname.
host = dict(required=True),
https = dict(required=False, type='bool', default=False),
subdomains = dict(required=False, type='list', default=[]),
site_apps = dict(required=False, type='list', default=[]),
login_name = dict(required=True),
login_password = dict(required=True, no_log=True),
),
supports_check_mode=True
)
site_name = module.params['name']
site_state = module.params['state']
site_host = module.params['host']
site_ip = socket.gethostbyname(site_host)
session_id, account = webfaction.login(
module.params['login_name'],
module.params['login_password']
)
site_list = webfaction.list_websites(session_id)
site_map = dict([(i['name'], i) for i in site_list])
existing_site = site_map.get(site_name)
result = {}
# Here's where the real stuff happens
if site_state == 'present':
# Does a site with this name already exist?
if existing_site:
# If yes, but it's on a different IP address, then fail.
# If we wanted to allow relocation, we could add a 'relocate=true' option
# which would get the existing IP address, delete the site there, and create it
# at the new address. A bit dangerous, perhaps, so for now we'll require manual
# deletion if it's on another host.
if existing_site['ip'] != site_ip:
module.fail_json(msg="Website already exists with a different IP address. Please fix by hand.")
# If it's on this host and the key parameters are the same, nothing needs to be done.
if (existing_site['https'] == module.boolean(module.params['https'])) and \
(set(existing_site['subdomains']) == set(module.params['subdomains'])) and \
(dict(existing_site['website_apps']) == dict(module.params['site_apps'])):
module.exit_json(
changed = False
)
positional_args = [
session_id, site_name, site_ip,
module.boolean(module.params['https']),
module.params['subdomains'],
]
for a in module.params['site_apps']:
positional_args.append( (a[0], a[1]) )
if not module.check_mode:
# If this isn't a dry run, create or modify the site
result.update(
webfaction.create_website(
*positional_args
) if not existing_site else webfaction.update_website (
*positional_args
)
)
elif site_state == 'absent':
# If the site's already not there, nothing changed.
if not existing_site:
module.exit_json(
changed = False,
)
if not module.check_mode:
# If this isn't a dry run, delete the site
result.update(
webfaction.delete_website(session_id, site_name, site_ip)
)
else:
module.fail_json(msg="Unknown state specified: {}".format(site_state))
module.exit_json(
changed = True,
result = result
)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
apache/incubator-airflow
|
refs/heads/master
|
airflow/contrib/hooks/ssh_hook.py
|
7
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.ssh.hooks.ssh`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.ssh.hooks.ssh import SSHHook # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.ssh.hooks.ssh`.",
DeprecationWarning,
stacklevel=2,
)
|
EdgarSun/Django-Demo
|
refs/heads/master
|
django/contrib/localflavor/ch/ch_states.py
|
544
|
# -*- coding: utf-8 -*
from django.utils.translation import ugettext_lazy as _
STATE_CHOICES = (
('AG', _('Aargau')),
('AI', _('Appenzell Innerrhoden')),
('AR', _('Appenzell Ausserrhoden')),
('BS', _('Basel-Stadt')),
('BL', _('Basel-Land')),
('BE', _('Berne')),
('FR', _('Fribourg')),
('GE', _('Geneva')),
('GL', _('Glarus')),
('GR', _('Graubuenden')),
('JU', _('Jura')),
('LU', _('Lucerne')),
('NE', _('Neuchatel')),
('NW', _('Nidwalden')),
('OW', _('Obwalden')),
('SH', _('Schaffhausen')),
('SZ', _('Schwyz')),
('SO', _('Solothurn')),
('SG', _('St. Gallen')),
('TG', _('Thurgau')),
('TI', _('Ticino')),
('UR', _('Uri')),
('VS', _('Valais')),
('VD', _('Vaud')),
('ZG', _('Zug')),
('ZH', _('Zurich'))
)
|
dhylands/micropython
|
refs/heads/master
|
tests/basics/dict_intern.py
|
117
|
# check that interned strings are compared against non-interned strings
di = {"key1": "value"}
# lookup interned string
k = "key1"
print(k in di)
# lookup non-interned string
k2 = "key" + "1"
print(k == k2)
print(k2 in di)
# lookup non-interned string
print("".join(['k', 'e', 'y', '1']) in di)
|
Captain-Coder/tribler
|
refs/heads/devel
|
Tribler/Core/RemoteTorrentHandler.py
|
1
|
"""
Handles the case where the user did a remote query and now selected one of the
returned torrents for download.
Author(s): Niels Zeilemaker
"""
import logging
import sys
import urllib
from abc import ABCMeta, abstractmethod
from binascii import hexlify, unhexlify
from collections import deque
from decorator import decorator
from twisted.internet import reactor
from twisted.internet.task import LoopingCall
from Tribler.Core.TFTP.handler import METADATA_PREFIX
from Tribler.Core.TorrentDef import TorrentDef
from Tribler.Core.simpledefs import INFOHASH_LENGTH, NTFY_TORRENTS
from Tribler.pyipv8.ipv8.taskmanager import TaskManager
TORRENT_OVERFLOW_CHECKING_INTERVAL = 30 * 60
LOW_PRIO_COLLECTING = 0
MAGNET_TIMEOUT = 5.0
MAX_PRIORITY = 1
@decorator
def pass_when_stopped(f, self, *argv, **kwargs):
if self.running:
return f(self, *argv, **kwargs)
class RemoteTorrentHandler(TaskManager):
def __init__(self, session):
super(RemoteTorrentHandler, self).__init__()
self._logger = logging.getLogger(self.__class__.__name__)
self.running = False
self.torrent_callbacks = {}
self.metadata_callbacks = {}
self.torrent_requesters = {}
self.torrent_message_requesters = {}
self.magnet_requesters = {}
self.metadata_requester = None
self.num_torrents = 0
self.session = session
self.dispersy = None
self.max_num_torrents = 0
self.tor_col_dir = None
self.torrent_db = None
def initialize(self):
self.dispersy = self.session.get_dispersy_instance()
self.max_num_torrents = self.session.config.get_torrent_collecting_max_torrents()
self.torrent_db = None
if self.session.config.get_megacache_enabled():
self.torrent_db = self.session.open_dbhandler(NTFY_TORRENTS)
self.__check_overflow()
self.running = True
for priority in (0, 1):
self.magnet_requesters[priority] = MagnetRequester(self.session, self, priority)
self.torrent_requesters[priority] = TftpRequester(u"tftp_torrent_%s" % priority,
self.session, self, priority)
self.torrent_message_requesters[priority] = TorrentMessageRequester(self.session, self, priority)
self.metadata_requester = TftpRequester(u"tftp_metadata_%s" % 0, self.session, self, 0)
def shutdown(self):
self.running = False
for requester in self.torrent_requesters.itervalues():
requester.stop()
self.shutdown_task_manager()
def set_max_num_torrents(self, max_num_torrents):
self.max_num_torrents = max_num_torrents
def __check_overflow(self):
def clean_until_done(num_delete, deletions_per_step):
"""
Delete torrents in steps to avoid too much IO at once.
"""
if num_delete > 0:
to_remove = min(num_delete, deletions_per_step)
num_delete -= to_remove
self.torrent_db.freeSpace(to_remove)
self.register_task(u"remote_torrent clean_until_done",
reactor.callLater(5, clean_until_done, num_delete, deletions_per_step))
def torrent_overflow_check():
"""
Check if we have reached the collected torrent limit and throttle its collection if so.
"""
self.num_torrents = self.torrent_db.getNumberCollectedTorrents()
self._logger.debug(u"check overflow: current %d max %d", self.num_torrents, self.max_num_torrents)
if self.num_torrents > self.max_num_torrents:
num_delete = int(self.num_torrents - self.max_num_torrents * 0.95)
deletions_per_step = max(25, num_delete / 180)
clean_until_done(num_delete, deletions_per_step)
self._logger.info(u"** limit space:: %d %d %d", self.num_torrents, self.max_num_torrents, num_delete)
self.register_task(u"remote_torrent overflow_check",
LoopingCall(torrent_overflow_check)).start(TORRENT_OVERFLOW_CHECKING_INTERVAL, now=True)
def schedule_task(self, name, task, delay_time=0.0, *args, **kwargs):
self.register_task(name, reactor.callLater(delay_time, task, *args, **kwargs))
def download_torrent(self, candidate, infohash, user_callback=None, priority=1, timeout=None):
assert isinstance(infohash, str), u"infohash has invalid type: %s" % type(infohash)
assert len(infohash) == INFOHASH_LENGTH, u"infohash has invalid length: %s" % len(infohash)
# fix prio levels to 1 and 0
priority = min(priority, 1)
# we use DHT if we don't have candidate
if candidate:
self.torrent_requesters[priority].add_request(infohash, candidate, timeout)
else:
self.magnet_requesters[priority].add_request(infohash)
if user_callback:
callback = lambda ih = infohash: user_callback(ih)
self.torrent_callbacks.setdefault(infohash, set()).add(callback)
def save_torrent(self, tdef, callback=None):
infohash = tdef.get_infohash()
infohash_str = hexlify(infohash)
if self.session.lm.torrent_store is None:
self._logger.error("Torrent store is not loaded")
return
# TODO(emilon): could we check the database instead of the store?
# Checking if a key is present fetches the whole torrent from disk if its
# not on the writeback cache.
if infohash_str not in self.session.lm.torrent_store:
# save torrent to file
try:
bdata = tdef.encode()
except Exception as e:
self._logger.error(u"failed to encode torrent %s: %s", infohash_str, e)
return
try:
self.session.lm.torrent_store[infohash_str] = bdata
except Exception as e:
self._logger.error(u"failed to store torrent data for %s, exception was: %s", infohash_str, e)
# add torrent to database
if self.torrent_db.hasTorrent(infohash):
self.torrent_db.updateTorrent(infohash, is_collected=1)
else:
self.torrent_db.addExternalTorrent(tdef, extra_info={u"is_collected": 1, u"status": u"good"})
if callback:
# TODO(emilon): should we catch exceptions from the callback?
callback()
# notify all
self.notify_possible_torrent_infohash(infohash)
def download_torrentmessage(self, candidate, infohash, user_callback=None, priority=1):
assert isinstance(infohash, str), u"infohash has invalid type: %s" % type(infohash)
assert len(infohash) == INFOHASH_LENGTH, u"infohash has invalid length: %s" % len(infohash)
if user_callback:
callback = lambda ih = infohash: user_callback(ih)
self.torrent_callbacks.setdefault(infohash, set()).add(callback)
requester = self.torrent_message_requesters[priority]
# make request
requester.add_request(infohash, candidate)
self._logger.debug(u"adding torrent messages request: %s %s %s", hexlify(infohash), candidate, priority)
def has_metadata(self, thumb_hash):
thumb_hash_str = hexlify(thumb_hash)
return thumb_hash_str in self.session.lm.metadata_store
def get_metadata(self, thumb_hash):
thumb_hash_str = hexlify(thumb_hash)
return self.session.lm.metadata_store[thumb_hash_str]
def download_metadata(self, candidate, thumb_hash, usercallback=None, timeout=None):
if self.has_metadata(thumb_hash):
return
if usercallback:
self.metadata_callbacks.setdefault(thumb_hash, set()).add(usercallback)
self.metadata_requester.add_request(thumb_hash, candidate, timeout, is_metadata=True)
self._logger.debug(u"added metadata request: %s %s", hexlify(thumb_hash), candidate)
def save_metadata(self, thumb_hash, data):
# save data to a temporary tarball and extract it to the torrent collecting directory
thumb_hash_str = hexlify(thumb_hash)
if thumb_hash_str not in self.session.lm.metadata_store:
self.session.lm.metadata_store[thumb_hash_str] = data
# notify about the new metadata
if thumb_hash in self.metadata_callbacks:
for callback in self.metadata_callbacks[thumb_hash]:
reactor.callInThread(callback, hexlify(thumb_hash))
del self.metadata_callbacks[thumb_hash]
def notify_possible_torrent_infohash(self, infohash):
if infohash not in self.torrent_callbacks:
return
for callback in self.torrent_callbacks[infohash]:
reactor.callInThread(callback, hexlify(infohash))
del self.torrent_callbacks[infohash]
def get_queue_size_stats(self):
def get_queue_size_stats(qname, requesters):
qsize = {}
for requester in requesters.itervalues():
qsize[requester.priority] = requester.pending_request_queue_size
items = qsize.items()
items.sort()
return {"type": qname, "size_stats": [{"priority": prio, "size": size} for prio, size in items]}
return [stats_dict for stats_dict in (get_queue_size_stats("TFTP", self.torrent_requesters),
get_queue_size_stats("DHT", self.magnet_requesters),
get_queue_size_stats("Msg", self.torrent_message_requesters))]
def get_queue_stats(self):
def get_queue_stats(qname, requesters):
pending_requests = success = failed = 0
for requester in requesters.itervalues():
pending_requests += requester.pending_request_queue_size
success += requester.requests_succeeded
failed += requester.requests_failed
total_requests = pending_requests + success + failed
return {"type": qname, "total": total_requests, "success": success,
"pending": pending_requests, "failed": failed}
return [stats_dict for stats_dict in [get_queue_stats("TFTP", self.torrent_requesters),
get_queue_stats("DHT", self.magnet_requesters),
get_queue_stats("Msg", self.torrent_message_requesters)]]
def get_bandwidth_stats(self):
def get_bandwidth_stats(qname, requesters):
bw = 0
for requester in requesters.itervalues():
bw += requester.total_bandwidth
return {"type": qname, "bandwidth": bw}
return [stats_dict for stats_dict in [get_bandwidth_stats("TQueue", self.torrent_requesters),
get_bandwidth_stats("DQueue", self.magnet_requesters)]]
class Requester(object):
__metaclass__ = ABCMeta
REQUEST_INTERVAL = 0.5
def __init__(self, name, session, remote_torrent_handler, priority):
self._logger = logging.getLogger(self.__class__.__name__)
self._name = name
self._session = session
self._remote_torrent_handler = remote_torrent_handler
self._priority = priority
self._pending_request_queue = deque()
self._requests_succeeded = 0
self._requests_failed = 0
self._total_bandwidth = 0
self.running = True
def stop(self):
self._remote_torrent_handler.cancel_pending_task(self._name)
self.running = False
@property
def priority(self):
return self._priority
@property
def pending_request_queue_size(self):
return len(self._pending_request_queue)
@property
def requests_succeeded(self):
return self._requests_succeeded
@property
def requests_failed(self):
return self._requests_failed
@property
def total_bandwidth(self):
return self._total_bandwidth
@pass_when_stopped
def schedule_task(self, task, delay_time=0.0, *args, **kwargs):
"""
Uses RemoteTorrentHandler to schedule a task.
"""
self._remote_torrent_handler.schedule_task(self._name, task, delay_time=delay_time, *args, **kwargs)
@pass_when_stopped
def _start_pending_requests(self):
"""
Starts pending requests.
"""
if self._remote_torrent_handler.is_pending_task_active(self._name):
return
if self._pending_request_queue:
self.schedule_task(self._do_request,
delay_time=Requester.REQUEST_INTERVAL * (MAX_PRIORITY - self._priority))
@abstractmethod
def add_request(self, key, candidate, timeout=None):
"""
Adds a new request.
"""
pass
@abstractmethod
def _do_request(self):
"""
Starts processing pending requests.
"""
pass
class TorrentMessageRequester(Requester):
def __init__(self, session, remote_torrent_handler, priority):
super(TorrentMessageRequester, self).__init__(u"torrent_message_requester",
session, remote_torrent_handler, priority)
if sys.platform == "darwin":
# Mac has just 256 fds per process, be less aggressive
self.REQUEST_INTERVAL = 1.0
self._source_dict = {}
self._search_community = None
@pass_when_stopped
def add_request(self, infohash, candidate, timeout=None):
addr = candidate.sock_addr
queue_was_empty = len(self._pending_request_queue) == 0
if infohash in self._source_dict and candidate in self._source_dict[infohash]:
self._logger.debug(u"already has request %s from %s:%s, skip", hexlify(infohash), addr[0], addr[1])
if infohash not in self._pending_request_queue:
self._pending_request_queue.append(infohash)
self._source_dict[infohash] = []
if candidate in self._source_dict[infohash]:
self._logger.warn(u"ignore duplicate torrent message request %s from %s:%s",
hexlify(infohash), addr[0], addr[1])
return
self._source_dict[infohash].append(candidate)
self._logger.debug(u"added request %s from %s:%s", hexlify(infohash), addr[0], addr[1])
# start scheduling tasks if the queue was empty, which means there was no task running previously
if queue_was_empty:
self._start_pending_requests()
@pass_when_stopped
def _do_request(self):
# find search community
if not self._search_community:
for community in self._session.lm.dispersy.get_communities():
from Tribler.community.search.community import SearchCommunity
if isinstance(community, SearchCommunity):
self._search_community = community
break
if not self._search_community:
self._logger.error(u"no SearchCommunity found.")
return
# requesting messages
while self._pending_request_queue:
infohash = self._pending_request_queue.popleft()
for candidate in self._source_dict[infohash]:
self._logger.debug(u"requesting torrent message %s from %s:%s",
hexlify(infohash), candidate.sock_addr[0], candidate.sock_addr[1])
self._search_community.create_torrent_request(infohash, candidate)
del self._source_dict[infohash]
class MagnetRequester(Requester):
MAX_CONCURRENT = 1
TIMEOUT = 30.0
def __init__(self, session, remote_torrent_handler, priority):
super(MagnetRequester, self).__init__(u"magnet_requester", session, remote_torrent_handler, priority)
if sys.platform == "darwin":
# Mac has just 256 fds per process, be less aggressive
self.REQUEST_INTERVAL = 15.0
if priority <= 1 and not sys.platform == "darwin":
self.MAX_CONCURRENT = 3
self._torrent_db_handler = session.open_dbhandler(NTFY_TORRENTS)
self._running_requests = []
@pass_when_stopped
def add_request(self, infohash, candidate=None, timeout=None):
queue_was_empty = len(self._pending_request_queue) == 0
if infohash not in self._pending_request_queue and infohash not in self._running_requests:
self._pending_request_queue.append(infohash)
# start scheduling tasks if the queue was empty, which means there was no task running previously
if queue_was_empty:
self._start_pending_requests()
@pass_when_stopped
def _do_request(self):
while self._pending_request_queue and self.running:
if len(self._running_requests) >= self.MAX_CONCURRENT:
self._logger.debug(u"max concurrency %s reached, request later", self.MAX_CONCURRENT)
return
infohash = self._pending_request_queue.popleft()
infohash_str = hexlify(infohash)
# try magnet link
magnetlink = "magnet:?xt=urn:btih:" + infohash_str
# see if we know any trackers for this magnet
trackers = self._torrent_db_handler.getTrackerListByInfohash(infohash)
for tracker in trackers:
if tracker not in (u"no-DHT", u"DHT"):
magnetlink += "&tr=" + urllib.quote_plus(tracker)
self._logger.debug(u"requesting %s priority %s through magnet link %s",
infohash_str, self._priority, magnetlink)
self._session.lm.ltmgr.get_metainfo(magnetlink, self._success_callback,
timeout=self.TIMEOUT, timeout_callback=self._failure_callback)
self._running_requests.append(infohash)
def _success_callback(self, meta_info):
"""
The callback that will be called by LibtorrentMgr when a download was successful.
"""
tdef = TorrentDef.load_from_dict(meta_info)
assert tdef.get_infohash() in self._running_requests
infohash = tdef.get_infohash()
self._logger.debug(u"received torrent %s through magnet", hexlify(infohash))
self._remote_torrent_handler.save_torrent(tdef)
self._running_requests.remove(infohash)
self._requests_succeeded += 1
self._total_bandwidth += tdef.get_torrent_size()
self._start_pending_requests()
def _failure_callback(self, infohash):
"""
The callback that will be called by LibtorrentMgr when a download failed.
"""
if infohash not in self._running_requests:
self._logger.debug(u"++ failed INFOHASH: %s", hexlify(infohash))
for ih in self._running_requests:
self._logger.debug(u"++ INFOHASH in running_requests: %s", hexlify(ih))
self._logger.debug(u"failed to retrieve torrent %s through magnet", hexlify(infohash))
self._running_requests.remove(infohash)
self._requests_failed += 1
self._start_pending_requests()
class TftpRequester(Requester):
def __init__(self, name, session, remote_torrent_handler, priority):
super(TftpRequester, self).__init__(name, session, remote_torrent_handler, priority)
self.REQUEST_INTERVAL = 5.0
self._active_request_list = []
self._untried_sources = {}
self._tried_sources = {}
@pass_when_stopped
def add_request(self, key, candidate, timeout=None, is_metadata=False):
ip, port = candidate.sock_addr
# no binary for keys
if is_metadata:
key = "%s%s" % (METADATA_PREFIX, hexlify(key))
key_str = key
else:
key = hexlify(key)
key_str = hexlify(key)
if key in self._pending_request_queue or key in self._active_request_list:
# append to the active one
if candidate in self._untried_sources[key] or candidate in self._tried_sources[key]:
self._logger.debug(u"already has request %s from %s:%s, skip", key_str, ip, port)
return
self._untried_sources[key].append(candidate)
self._logger.debug(u"appending to existing request: %s from %s:%s", key_str, ip, port)
else:
# new request
self._logger.debug(u"adding new request: %s from %s:%s", key_str, ip, port)
self._pending_request_queue.append(key)
self._untried_sources[key] = deque([candidate])
self._tried_sources[key] = deque()
# start pending tasks if there is no task running
if not self._active_request_list:
self._start_pending_requests()
@pass_when_stopped
def _do_request(self):
assert not self._active_request_list, "active_request_list is not empty = %s" % repr(self._active_request_list)
# starts to download a torrent
key = self._pending_request_queue.popleft()
candidate = self._untried_sources[key].popleft()
self._tried_sources[key].append(candidate)
ip, port = candidate.sock_addr
if key.startswith(METADATA_PREFIX):
# metadata requests has a METADATA_PREFIX prefix
thumb_hash = unhexlify(key[len(METADATA_PREFIX):])
file_name = key
extra_info = {u'key': key, u'thumb_hash': thumb_hash}
else:
# key is the hexlified info hash
info_hash = unhexlify(key)
file_name = hexlify(info_hash) + u'.torrent'
extra_info = {u'key': key, u'info_hash': info_hash}
self._logger.debug(u"start TFTP download for %s from %s:%s", file_name, ip, port)
# do not download if TFTP has been shutdown
if self._session.lm.tftp_handler is None:
return
self._session.lm.tftp_handler.download_file(file_name, ip, port, extra_info=extra_info,
success_callback=self._on_download_successful,
failure_callback=self._on_download_failed)
self._active_request_list.append(key)
def _clear_active_request(self, key):
del self._untried_sources[key]
del self._tried_sources[key]
self._active_request_list.remove(key)
def _on_download_successful(self, address, file_name, file_data, extra_info):
self._logger.debug(u"successfully downloaded %s from %s:%s", file_name, address[0], address[1])
key = extra_info[u'key']
info_hash = extra_info.get(u"info_hash")
thumb_hash = extra_info.get(u"thumb_hash")
assert key in self._active_request_list, u"key = %s, active_request_list = %s" % (repr(key),
self._active_request_list)
self._requests_succeeded += 1
self._total_bandwidth += len(file_data)
# save data
try:
if info_hash is not None:
# save torrent
tdef = TorrentDef.load_from_memory(file_data)
self._remote_torrent_handler.save_torrent(tdef)
elif thumb_hash is not None:
# save metadata
self._remote_torrent_handler.save_metadata(thumb_hash, file_data)
except ValueError:
self._logger.warning("Remote peer sent us invalid (torrent) content over TFTP socket, ignoring it.")
finally:
# start the next request
self._clear_active_request(key)
self._start_pending_requests()
def _on_download_failed(self, address, file_name, error_msg, extra_info):
self._logger.debug(u"failed to download %s from %s:%s: %s", file_name, address[0], address[1], error_msg)
key = extra_info[u'key']
assert key in self._active_request_list, u"key = %s, active_request_list = %s" % (repr(key),
self._active_request_list)
self._requests_failed += 1
if self._untried_sources[key]:
# try to download this data from another candidate
self._logger.debug(u"scheduling next try for %s", repr(key))
self._pending_request_queue.appendleft(key)
self._active_request_list.remove(key)
self.schedule_task(self._do_request)
else:
# no more available candidates, download the next requested infohash
self._clear_active_request(key)
self._start_pending_requests()
|
blzr/enigma2
|
refs/heads/develop
|
lib/python/Components/Sources/Clock.py
|
7
|
from Components.Element import cached
from enigma import eTimer
from time import time as getTime
from Source import Source
class Clock(Source):
def __init__(self):
Source.__init__(self)
self.clock_timer = eTimer()
self.clock_timer.callback.append(self.poll)
self.clock_timer.start(1000)
@cached
def getClock(self):
return getTime()
time = property(getClock)
def poll(self):
self.changed((self.CHANGED_POLL,))
def doSuspend(self, suspended):
if suspended:
self.clock_timer.stop()
else:
self.clock_timer.start(1000)
self.poll()
def destroy(self):
self.clock_timer.callback.remove(self.poll)
Source.destroy(self)
|
mats116/gae-boilerplate
|
refs/heads/master
|
bp_includes/external/babel/messages/tests/catalog.py
|
30
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
import copy
import datetime
import doctest
import unittest
from babel.messages import catalog
class MessageTestCase(unittest.TestCase):
def test_python_format(self):
assert catalog.PYTHON_FORMAT.search('foo %d bar')
assert catalog.PYTHON_FORMAT.search('foo %s bar')
assert catalog.PYTHON_FORMAT.search('foo %r bar')
assert catalog.PYTHON_FORMAT.search('foo %(name).1f')
assert catalog.PYTHON_FORMAT.search('foo %(name)3.3f')
assert catalog.PYTHON_FORMAT.search('foo %(name)3f')
assert catalog.PYTHON_FORMAT.search('foo %(name)06d')
assert catalog.PYTHON_FORMAT.search('foo %(name)Li')
assert catalog.PYTHON_FORMAT.search('foo %(name)#d')
assert catalog.PYTHON_FORMAT.search('foo %(name)-4.4hs')
assert catalog.PYTHON_FORMAT.search('foo %(name)*.3f')
assert catalog.PYTHON_FORMAT.search('foo %(name).*f')
assert catalog.PYTHON_FORMAT.search('foo %(name)3.*f')
assert catalog.PYTHON_FORMAT.search('foo %(name)*.*f')
assert catalog.PYTHON_FORMAT.search('foo %()s')
def test_translator_comments(self):
mess = catalog.Message('foo', user_comments=['Comment About `foo`'])
self.assertEqual(mess.user_comments, ['Comment About `foo`'])
mess = catalog.Message('foo',
auto_comments=['Comment 1 About `foo`',
'Comment 2 About `foo`'])
self.assertEqual(mess.auto_comments, ['Comment 1 About `foo`',
'Comment 2 About `foo`'])
def test_clone_message_object(self):
msg = catalog.Message('foo', locations=[('foo.py', 42)])
clone = msg.clone()
clone.locations.append(('bar.py', 42))
self.assertEqual(msg.locations, [('foo.py', 42)])
msg.flags.add('fuzzy')
assert not clone.fuzzy and msg.fuzzy
class CatalogTestCase(unittest.TestCase):
def test_two_messages_with_same_singular(self):
cat = catalog.Catalog()
cat.add('foo')
cat.add(('foo', 'foos'))
self.assertEqual(1, len(cat))
def test_duplicate_auto_comment(self):
cat = catalog.Catalog()
cat.add('foo', auto_comments=['A comment'])
cat.add('foo', auto_comments=['A comment', 'Another comment'])
self.assertEqual(['A comment', 'Another comment'],
cat['foo'].auto_comments)
def test_duplicate_user_comment(self):
cat = catalog.Catalog()
cat.add('foo', user_comments=['A comment'])
cat.add('foo', user_comments=['A comment', 'Another comment'])
self.assertEqual(['A comment', 'Another comment'],
cat['foo'].user_comments)
def test_duplicate_location(self):
cat = catalog.Catalog()
cat.add('foo', locations=[('foo.py', 1)])
cat.add('foo', locations=[('foo.py', 1)])
self.assertEqual([('foo.py', 1)], cat['foo'].locations)
def test_update_message_changed_to_plural(self):
cat = catalog.Catalog()
cat.add(u'foo', u'Voh')
tmpl = catalog.Catalog()
tmpl.add((u'foo', u'foos'))
cat.update(tmpl)
self.assertEqual((u'Voh', ''), cat['foo'].string)
assert cat['foo'].fuzzy
def test_update_message_changed_to_simple(self):
cat = catalog.Catalog()
cat.add((u'foo' u'foos'), (u'Voh', u'Vöhs'))
tmpl = catalog.Catalog()
tmpl.add(u'foo')
cat.update(tmpl)
self.assertEqual(u'Voh', cat['foo'].string)
assert cat['foo'].fuzzy
def test_update_message_updates_comments(self):
cat = catalog.Catalog()
cat[u'foo'] = catalog.Message('foo', locations=[('main.py', 5)])
self.assertEqual(cat[u'foo'].auto_comments, [])
self.assertEqual(cat[u'foo'].user_comments, [])
# Update cat[u'foo'] with a new location and a comment
cat[u'foo'] = catalog.Message('foo', locations=[('main.py', 7)],
user_comments=['Foo Bar comment 1'])
self.assertEqual(cat[u'foo'].user_comments, ['Foo Bar comment 1'])
# now add yet another location with another comment
cat[u'foo'] = catalog.Message('foo', locations=[('main.py', 9)],
auto_comments=['Foo Bar comment 2'])
self.assertEqual(cat[u'foo'].auto_comments, ['Foo Bar comment 2'])
def test_update_fuzzy_matching_with_case_change(self):
cat = catalog.Catalog()
cat.add('foo', 'Voh')
cat.add('bar', 'Bahr')
tmpl = catalog.Catalog()
tmpl.add('Foo')
cat.update(tmpl)
self.assertEqual(1, len(cat.obsolete))
assert 'foo' not in cat
self.assertEqual('Voh', cat['Foo'].string)
self.assertEqual(True, cat['Foo'].fuzzy)
def test_update_fuzzy_matching_with_char_change(self):
cat = catalog.Catalog()
cat.add('fo', 'Voh')
cat.add('bar', 'Bahr')
tmpl = catalog.Catalog()
tmpl.add('foo')
cat.update(tmpl)
self.assertEqual(1, len(cat.obsolete))
assert 'fo' not in cat
self.assertEqual('Voh', cat['foo'].string)
self.assertEqual(True, cat['foo'].fuzzy)
def test_update_fuzzy_matching_no_msgstr(self):
cat = catalog.Catalog()
cat.add('fo', '')
tmpl = catalog.Catalog()
tmpl.add('fo')
tmpl.add('foo')
cat.update(tmpl)
assert 'fo' in cat
assert 'foo' in cat
self.assertEqual('', cat['fo'].string)
self.assertEqual(False, cat['fo'].fuzzy)
self.assertEqual(None, cat['foo'].string)
self.assertEqual(False, cat['foo'].fuzzy)
def test_update_fuzzy_matching_no_cascading(self):
cat = catalog.Catalog()
cat.add('fo', 'Voh')
cat.add('foo', 'Vohe')
tmpl = catalog.Catalog()
tmpl.add('fo')
tmpl.add('foo')
tmpl.add('fooo')
cat.update(tmpl)
assert 'fo' in cat
assert 'foo' in cat
self.assertEqual('Voh', cat['fo'].string)
self.assertEqual(False, cat['fo'].fuzzy)
self.assertEqual('Vohe', cat['foo'].string)
self.assertEqual(False, cat['foo'].fuzzy)
self.assertEqual('Vohe', cat['fooo'].string)
self.assertEqual(True, cat['fooo'].fuzzy)
def test_update_without_fuzzy_matching(self):
cat = catalog.Catalog()
cat.add('fo', 'Voh')
cat.add('bar', 'Bahr')
tmpl = catalog.Catalog()
tmpl.add('foo')
cat.update(tmpl, no_fuzzy_matching=True)
self.assertEqual(2, len(cat.obsolete))
def test_fuzzy_matching_regarding_plurals(self):
cat = catalog.Catalog()
cat.add(('foo', 'foh'), ('foo', 'foh'))
ru = copy.copy(cat)
ru.locale = 'ru_RU'
ru.update(cat)
self.assertEqual(True, ru['foo'].fuzzy)
ru = copy.copy(cat)
ru.locale = 'ru_RU'
ru['foo'].string = ('foh', 'fohh', 'fohhh')
ru.update(cat)
self.assertEqual(False, ru['foo'].fuzzy)
def test_update_no_template_mutation(self):
tmpl = catalog.Catalog()
tmpl.add('foo')
cat1 = catalog.Catalog()
cat1.add('foo', 'Voh')
cat1.update(tmpl)
cat2 = catalog.Catalog()
cat2.update(tmpl)
self.assertEqual(None, cat2['foo'].string)
self.assertEqual(False, cat2['foo'].fuzzy)
def test_update_po_updates_pot_creation_date(self):
template = catalog.Catalog()
localized_catalog = copy.deepcopy(template)
localized_catalog.locale = 'de_DE'
self.assertNotEqual(template.mime_headers,
localized_catalog.mime_headers)
self.assertEqual(template.creation_date,
localized_catalog.creation_date)
template.creation_date = datetime.datetime.now() - \
datetime.timedelta(minutes=5)
localized_catalog.update(template)
self.assertEqual(template.creation_date,
localized_catalog.creation_date)
def test_update_po_keeps_po_revision_date(self):
template = catalog.Catalog()
localized_catalog = copy.deepcopy(template)
localized_catalog.locale = 'de_DE'
fake_rev_date = datetime.datetime.now() - datetime.timedelta(days=5)
localized_catalog.revision_date = fake_rev_date
self.assertNotEqual(template.mime_headers,
localized_catalog.mime_headers)
self.assertEqual(template.creation_date,
localized_catalog.creation_date)
template.creation_date = datetime.datetime.now() - \
datetime.timedelta(minutes=5)
localized_catalog.update(template)
self.assertEqual(localized_catalog.revision_date, fake_rev_date)
def test_stores_datetime_correctly(self):
localized = catalog.Catalog()
localized.locale = 'de_DE'
localized[''] = catalog.Message('',
"POT-Creation-Date: 2009-03-09 15:47-0700\n" +
"PO-Revision-Date: 2009-03-09 15:47-0700\n")
for key, value in localized.mime_headers:
if key in ('POT-Creation-Date', 'PO-Revision-Date'):
self.assertEqual(value, '2009-03-09 15:47-0700')
def suite():
suite = unittest.TestSuite()
if hasattr(doctest, 'ELLIPSIS'):
suite.addTest(doctest.DocTestSuite(catalog, optionflags=doctest.ELLIPSIS))
else:
# Python 2.3 has no doctest.ELLIPSIS option, it's implicit
suite.addTest(doctest.DocTestSuite(catalog))
suite.addTest(unittest.makeSuite(MessageTestCase))
suite.addTest(unittest.makeSuite(CatalogTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
MarcosCommunity/odoo
|
refs/heads/marcos-8.0
|
comunity_modules/website_facebook/__openerp__.py
|
1
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution, third party addon
# Copyright (C) 2004-2015 Vertel AB (<http://vertel.se>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Facebook',
'version': '0.1',
'category': 'Sales',
'description': """
Share Your Website Posts On Facebook With Control.
===================================================
""",
'author': 'Vertel AB',
'website': 'http://www.vertel.se',
'depends': ['base','website'],
'data': ['website_facebook_view.xml',
'views/snippet.xml'
],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4s:softtabstop=4:shiftwidth=4:
|
storm-computers/odoo
|
refs/heads/9.0
|
addons/website_sale/controllers/website_mail.py
|
26
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import urlparse
from openerp import SUPERUSER_ID
from openerp import http
from openerp.addons.web.http import request
from openerp.addons.website_mail.controllers.main import WebsiteMail
class WebsiteMailController(WebsiteMail):
@http.route(['/website_mail/post/json'], type='json', auth='public', website=True)
def chatter_json(self, res_model='', res_id=None, message='', **kw):
params = kw.copy()
params.pop('rating', False)
message_data = super(WebsiteMailController, self).chatter_json(res_model=res_model, res_id=res_id, message=message, **params)
if message_data and kw.get('rating') and res_model == 'product.template': # restrict rating only for product template
rating = request.env['rating.rating'].create({
'rating': float(kw.get('rating')),
'res_model': res_model,
'res_id': res_id,
'message_id': message_data['id'],
})
message_data.update({
'rating_default_value': rating.rating,
'rating_disabled': True,
})
return message_data
@http.route(['/website_mail/post/post'], type='http', methods=['POST'], auth='public', website=True)
def chatter_post(self, res_model='', res_id=None, message='', redirect=None, **kw):
params = kw.copy()
params.pop('rating')
response = super(WebsiteMailController, self).chatter_post(res_model=res_model, res_id=res_id, message=message, redirect=redirect, **params)
if kw.get('rating') and res_model == 'product.template': # restrict rating only for product template
try:
fragment = urlparse.urlparse(response.location).fragment
message_id = int(fragment.replace('message-', ''))
rating = request.env['rating.rating'].create({
'rating': float(kw.get('rating')),
'res_model': res_model,
'res_id': res_id,
'message_id': message_id,
})
except Exception:
pass
return response
|
DirtyUnicorns/android_external_chromium_org
|
refs/heads/lollipop
|
tools/telemetry/telemetry/core/platform/profiler/android_prebuilt_profiler_helper.py
|
32
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Android-specific, installs pre-built profilers."""
import logging
import os
from telemetry import decorators
from telemetry.util import support_binaries
_DEVICE_PROFILER_DIR = '/data/local/tmp/profilers/'
def GetDevicePath(profiler_binary):
return os.path.join(_DEVICE_PROFILER_DIR, os.path.basename(profiler_binary))
@decorators.Cache
def InstallOnDevice(device, profiler_binary):
host_path = support_binaries.FindPath(profiler_binary, 'android')
if not host_path:
logging.error('Profiler binary "%s" not found. Could not be installed',
host_path)
return False
device_binary_path = GetDevicePath(profiler_binary)
device.PushChangedFiles(host_path, device_binary_path)
device.RunShellCommand('chmod 777 ' + device_binary_path)
return True
|
ui/django-post_office
|
refs/heads/master
|
post_office/tasks.py
|
1
|
"""
Only define the tasks and handler if we can import celery.
This allows the module to be imported in environments without Celery, for
example by other task queue systems such as Huey, which use the same pattern
of auto-discovering tasks in "tasks" submodules.
"""
import datetime
from django.utils.timezone import now
from post_office.mail import send_queued_mail_until_done
from post_office.utils import cleanup_expired_mails
from .settings import get_celery_enabled
try:
if get_celery_enabled():
from celery import shared_task
else:
raise NotImplementedError()
except (ImportError, NotImplementedError):
def queued_mail_handler(sender, **kwargs):
"""
To be called by :func:`post_office.signals.email_queued.send()` for triggering asynchronous
mail delivery – if provided by an external queue, such as Celery.
"""
else:
@shared_task(ignore_result=True)
def send_queued_mail(*args, **kwargs):
"""
To be called by the Celery task manager.
"""
send_queued_mail_until_done()
def queued_mail_handler(sender, **kwargs):
"""
Trigger an asynchronous mail delivery.
"""
send_queued_mail.delay()
@shared_task(ignore_result=True)
def cleanup_mail(*args, **kwargs):
days = kwargs.get('days', 90)
cutoff_date = now() - datetime.timedelta(days)
delete_attachments = kwargs.get('delete_attachments', True)
cleanup_expired_mails(cutoff_date, delete_attachments)
|
ChristopherRabotin/bungiesearch
|
refs/heads/master
|
bungiesearch/logger.py
|
2
|
import logging
logger = logging.getLogger('bungiesearch')
|
mozilla/addons-server
|
refs/heads/master
|
src/olympia/zadmin/views.py
|
4
|
from django.contrib import admin
from django.core.files.storage import default_storage as storage
from django.shortcuts import get_object_or_404
import olympia.core.logger
from olympia.amo import messages
from olympia.amo.decorators import json_view, post_required
from olympia.files.models import File
log = olympia.core.logger.getLogger('z.zadmin')
@admin.site.admin_view
@post_required
@json_view
def recalc_hash(request, file_id):
file = get_object_or_404(File, pk=file_id)
file.size = storage.size(file.file_path)
file.hash = file.generate_hash()
file.save()
log.info('Recalculated hash for file ID %d' % file.id)
messages.success(request, 'File hash and size recalculated for file %d.' % file.id)
return {'success': 1}
|
razrichter/namebench
|
refs/heads/master
|
libnamebench/util.py
|
172
|
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Little utility functions."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import datetime
import math
import os.path
import sys
import time
import tempfile
def CalculateListAverage(values):
"""Computes the arithmetic mean of a list of numbers."""
if not values:
return 0
return sum(values) / float(len(values))
def DrawTextBar(value, max_value, max_width=53):
"""Return a simple ASCII bar graph, making sure it fits within max_width.
Args:
value: integer or float representing the value of this bar.
max_value: integer or float representing the largest bar.
max_width: How many characters this graph can use (int)
Returns:
string
"""
hash_width = max_value / max_width
return int(math.ceil(value/hash_width)) * '#'
def SecondsToMilliseconds(seconds):
return seconds * 1000
def SplitSequence(seq, size):
"""Split a list.
Args:
seq: sequence
size: int
Returns:
New list.
Recipe From http://code.activestate.com/recipes/425397/ (Modified to not return blank values)
"""
newseq = []
splitsize = 1.0/size*len(seq)
for i in range(size):
newseq.append(seq[int(round(i*splitsize)):int(round((i+1)*splitsize))])
return [x for x in newseq if x]
def FindDataFile(filename):
"""Find a datafile, searching various relative and OS paths."""
filename = os.path.expanduser(filename)
if os.path.exists(filename):
return filename
# If it's not a relative path, we can't do anything useful.
if os.path.isabs(filename):
return filename
other_places = [os.getcwd(),
os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'Contents', 'Resources'),
os.path.join(os.getcwd(), 'namebench.app', 'Contents', 'Resources'),
os.path.join(os.getcwd(), '..'),
os.path.join(sys.prefix, 'namebench'),
'/usr/local/share/namebench'
'/usr/local/etc/namebench',
'/usr/local/namebench',
'/etc/namebench',
'/usr/share/namebench',
'/usr/namebench']
for directory in reversed(sys.path):
other_places.append(directory)
other_places.append(os.path.join(directory, 'namebench'))
for place in other_places:
path = os.path.join(place, filename)
if os.path.exists(path):
return path
print 'I could not find "%s". Tried:' % filename
for path in other_places:
print ' %s' % path
return filename
def GenerateOutputFilename(extension):
"""Generate a decent default output filename for a given extensio."""
# used for resolv.conf
if '.' in extension:
filename = extension
else:
output_base = 'namebench_%s' % datetime.datetime.strftime(datetime.datetime.now(),
'%Y-%m-%d %H%M')
output_base = output_base.replace(':', '').replace(' ', '_')
filename = '.'.join((output_base, extension))
output_dir = tempfile.gettempdir()
return os.path.join(output_dir, filename)
def GetLastExceptionString():
"""Get the last exception and return a good looking string for it."""
(exc, error) = sys.exc_info()[0:2]
exc_msg = str(exc)
if '<class' in exc_msg:
exc_msg = exc_msg.split("'")[1]
exc_msg = exc_msg.replace('dns.exception.', '')
error = '%s %s' % (exc_msg, error)
# We need to remove the trailing space at some point.
return error.rstrip()
def DoesClockGoBackwards():
"""Detect buggy Windows systems where time.clock goes backwards"""
reference = 0
print "Checking if time.clock() goes backwards (broken hardware)..."
for x in range(0, 200):
counter = time.clock()
if counter < reference:
print "Clock went backwards by %fms" % (counter - reference)
return True
reference = counter
time.sleep(random.random() / 500)
return False
def GetMostAccurateTimerFunction():
"""Pick the most accurate timer for a platform."""
if sys.platform[:3] == 'win' and not _DoesClockGoBackwards():
return time.clock
else:
return time.time
def DistanceBetweenCoordinates(lat1, lon1, lat2, lon2):
"""Distance between two coordinate pairs (in km)
Based on:
http://cyberpython.wordpress.com/2010/03/31/python-calculate-the-distance-between-2-points-given-their-coordinates/
"""
lat1_r = math.radians(lat1)
lat2_r = math.radians(lat2)
lon_diff = math.radians(lon2 - lon1)
x = math.sin(lat1_r) * math.sin(lat2_r) + math.cos(lat1_r) * math.cos(lat2_r) * math.cos(lon_diff)
return math.degrees(math.acos(x)) * 60 * 1.852
|
project-lovelace/lovelace-engine
|
refs/heads/dependabot/pip/urllib3-1.26.5
|
tests/conftest.py
|
1
|
import base64
import json
import os
import time
import pytest
import requests
@pytest.fixture(scope="session")
def engine_uri():
uri = os.environ.get("LOVELACE_ENGINE_URI", "http://localhost:14714")
err_msg = (
"Cannot connect to lovelace engine at {}. Is it running? "
"Check if the env var LOVELACE_ENGINE_URI is set properly. ".format(uri)
)
try:
resp = requests.get(uri)
except requests.exceptions.ConnectionError:
raise ValueError(err_msg)
if resp.ok is not True:
raise ValueError(err_msg)
return uri
@pytest.fixture()
def engine_submit_uri(engine_uri):
return engine_uri + "/submit"
@pytest.fixture()
def submit_solution(engine_submit_uri):
def _submit_solution(file_path):
with open(file_path, "r") as solution_file:
code = solution_file.read()
code_b64 = base64.b64encode(code.encode("utf-8")).decode("utf-8")
problem_name, extension = os.path.basename(file_path).split(sep=".")
language = {"py": "python", "js": "javascript", "jl": "julia", "c": "c"}.get(extension)
if not language:
raise ValueError("Solution file has unrecognized extension: {}".format(file_path))
payload_dict = {"problem": problem_name, "language": language, "code": code_b64}
payload_json = json.dumps(payload_dict)
t1 = time.perf_counter()
response = requests.post(engine_submit_uri, data=payload_json)
t2 = time.perf_counter()
print(f"{t2 - t1 : .6f} seconds ", end='')
return response.json()
return _submit_solution
@pytest.fixture()
def submit_file(engine_submit_uri):
def _submit_solution(file_path, problem, language):
with open(file_path, "r") as solution_file:
code = solution_file.read()
code_b64 = base64.b64encode(code.encode("utf-8")).decode("utf-8")
payload_dict = {"problem": problem, "language": language, "code": code_b64}
payload_json = json.dumps(payload_dict)
t1 = time.perf_counter()
response = requests.post(engine_submit_uri, data=payload_json)
t2 = time.perf_counter()
print(f"{t2 - t1 : .6f} seconds ", end='')
return response.json()
return _submit_solution
|
for811/Ubuntu_kernel_golfu
|
refs/heads/master
|
scripts/tracing/draw_functrace.py
|
14679
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
MagicSolutions/django-cms
|
refs/heads/develop
|
cms/tests/static_placeholder.py
|
6
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
import json
from cms.api import add_plugin
from cms.constants import PLUGIN_MOVE_ACTION, PLUGIN_COPY_ACTION
from cms.models import StaticPlaceholder, Placeholder, CMSPlugin
from cms.tests.plugins import PluginsTestBaseCase
from cms.utils.compat.dj import force_unicode
from cms.utils.urlutils import admin_reverse
from django.contrib.admin.sites import site
from django.core.urlresolvers import reverse
from django.template import Context
from django.template.base import Template
URL_CMS_MOVE_PLUGIN = u'/en/admin/cms/page/%d/move-plugin/'
class StaticPlaceholderTestCase(PluginsTestBaseCase):
@property
def admin_class(self):
return site._registry[StaticPlaceholder]
def fill_placeholder(self, placeholder=None):
if placeholder is None:
placeholder = Placeholder(slot=u"some_slot")
placeholder.save() # a good idea, if not strictly necessary
# plugin in placeholder
plugin_1 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"01",
)
plugin_1.save()
# IMPORTANT: plugins must be reloaded, before they can be assigned
# as a parent. Otherwise, the MPTT structure doesn't seem to rebuild
# properly.
# child of plugin_1
plugin_2 = add_plugin(placeholder, u"TextPlugin", u"en",
body=u"02",
)
plugin_1 = self.reload(plugin_1)
plugin_2.parent = plugin_1
plugin_2.save()
return placeholder
def get_admin(self):
usr = self._create_user("admin", True, True)
return usr
def test_template_creation(self):
self.assertObjectDoesNotExist(StaticPlaceholder.objects.all(), code='foobar')
self.assertObjectDoesNotExist(Placeholder.objects.all(), slot='foobar')
t = Template('{% load cms_tags %}{% static_placeholder "foobar" %}')
t.render(self.get_context('/'))
self.assertObjectExist(StaticPlaceholder.objects.all(), code='foobar',
creation_method=StaticPlaceholder.CREATION_BY_TEMPLATE)
self.assertEqual(Placeholder.objects.filter(slot='foobar').count(), 2)
def test_empty(self):
self.assertObjectDoesNotExist(StaticPlaceholder.objects.all(), code='foobar')
self.assertObjectDoesNotExist(Placeholder.objects.all(), slot='foobar')
t = Template('{% load cms_tags %}{% static_placeholder "foobar" or %}No Content{% endstatic_placeholder %}')
rendered = t.render(self.get_context('/'))
self.assertIn("No Content", rendered)
t = Template('{% load cms_tags %}{% static_placeholder "" %}')
rendered = t.render(self.get_context('/'))
self.assertEqual("", rendered)
t = Template('{% load cms_tags %}{% static_placeholder code or %}No Content{% endstatic_placeholder %}')
rendered = t.render(Context({'code': StaticPlaceholder.objects.all()[0]}))
self.assertIn("No Content", rendered)
for p in Placeholder.objects.all():
add_plugin(p, 'TextPlugin', 'en', body='test')
t = Template('{% load cms_tags %}{% static_placeholder "foobar" or %}No Content{% endstatic_placeholder %}')
rendered = t.render(self.get_context('/'))
self.assertNotIn("No Content", rendered)
self.assertEqual(StaticPlaceholder.objects.filter(site_id__isnull=True, code='foobar').count(), 1)
def test_local(self):
self.assertObjectDoesNotExist(StaticPlaceholder.objects.all(), code='foobar')
self.assertObjectDoesNotExist(Placeholder.objects.all(), slot='foobar')
t = Template('{% load cms_tags %}{% static_placeholder "foobar" site or %}No Content{% endstatic_placeholder %}')
rendered = t.render(self.get_context('/'))
self.assertIn("No Content", rendered)
for p in Placeholder.objects.all():
add_plugin(p, 'TextPlugin', 'en', body='test')
rendered = t.render(self.get_context('/'))
self.assertNotIn("No Content", rendered)
self.assertEqual(StaticPlaceholder.objects.filter(site_id__isnull=False, code='foobar').count(), 1)
def test_publish_stack(self):
static_placeholder = StaticPlaceholder.objects.create(name='foo', code='bar', site_id=1)
self.fill_placeholder(static_placeholder.draft)
static_placeholder.dirty = True
static_placeholder.save()
self.assertEqual(static_placeholder.draft.cmsplugin_set.all().count(), 2)
self.assertEqual(static_placeholder.public.cmsplugin_set.all().count(), 0)
with self.login_user_context(self.get_superuser()):
response = self.client.get(reverse("admin:cms_page_publish_page", args=[1, 'en']), {'statics':[static_placeholder.pk]})
self.assertEqual(response.status_code, 302)
def test_permissions(self):
static_placeholder = StaticPlaceholder.objects.create(name='foo', code='bar', site_id=1)
request = self.get_request()
request.user = self._create_user('user_a', is_staff=True, is_superuser=False, permissions=['change_staticplaceholder'])
self.assertTrue( static_placeholder.has_change_permission(request) )
self.assertFalse( static_placeholder.has_publish_permission(request) )
request.user = self._create_user('user_b', is_staff=True, is_superuser=False, permissions=['change_staticplaceholder', 'publish_page'])
self.assertTrue( static_placeholder.has_change_permission(request) )
self.assertTrue( static_placeholder.has_publish_permission(request) )
request.user = self.get_superuser()
self.assertTrue( static_placeholder.has_change_permission(request) )
self.assertTrue( static_placeholder.has_publish_permission(request) )
def test_move_plugin(self):
static_placeholder_source = StaticPlaceholder.objects.create(name='foobar', code='foobar', site_id=1)
static_placeholder_target = StaticPlaceholder.objects.create(name='foofoo', code='foofoo', site_id=1)
sourceplugin = add_plugin(static_placeholder_source.draft, 'TextPlugin', 'en', body='test')
plugin_class = sourceplugin.get_plugin_class_instance()
expected = {'reload': plugin_class.requires_reload(PLUGIN_MOVE_ACTION)}
admin = self.get_admin()
with self.login_user_context(admin):
request = self.get_request(post_data={'plugin_id': sourceplugin.pk,
'placeholder_id': static_placeholder_target.draft.id,
'plugin_parent': '', 'plugin_language': 'en'})
response = self.admin_class.move_plugin(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content.decode('utf8')), expected)
source = StaticPlaceholder.objects.get(pk=static_placeholder_source.pk)
target = StaticPlaceholder.objects.get(pk=static_placeholder_target.pk)
self.assertTrue(source.dirty)
self.assertTrue(target.dirty)
def test_copy_plugin(self):
static_placeholder_source = StaticPlaceholder.objects.create(name='foobar', code='foobar', site_id=1)
static_placeholder_target = StaticPlaceholder.objects.create(name='foofoo', code='foofoo', site_id=1)
sourceplugin = add_plugin(static_placeholder_source.draft, 'TextPlugin', 'en', body='test source')
targetplugin = add_plugin(static_placeholder_target.draft, 'TextPlugin', 'en', body='test dest')
StaticPlaceholder.objects.filter(pk=static_placeholder_source.pk).update(dirty=False)
plugin_class = sourceplugin.get_plugin_class_instance()
admin = self.get_admin()
with self.login_user_context(admin):
request = self.get_request(post_data={
'source_language': 'en',
'source_placeholder_id': static_placeholder_source.draft.pk,
'source_plugin_id': sourceplugin.pk,
'target_language': 'en',
'target_placeholder_id': static_placeholder_target.draft.pk,
'targetplugin_id': targetplugin.pk,
})
response = self.admin_class.copy_plugins(request)
# generate the expected response
plugin_list = CMSPlugin.objects.filter(
language='en', placeholder_id=static_placeholder_target.draft.pk).order_by(
'depth', 'position')
reduced_list = []
for plugin in plugin_list:
reduced_list.append(
{
'id': plugin.pk, 'type': plugin.plugin_type, 'parent': plugin.parent_id,
'position': plugin.position, 'desc': force_unicode(plugin.get_short_description()),
'language': plugin.language, 'placeholder_id': static_placeholder_target.draft.pk
}
)
expected = json.loads(
json.dumps({'plugin_list': reduced_list, 'reload': plugin_class.requires_reload(PLUGIN_COPY_ACTION)}))
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content.decode('utf8')), expected)
# Check dirty bit
source = StaticPlaceholder.objects.get(pk=static_placeholder_source.pk)
target = StaticPlaceholder.objects.get(pk=static_placeholder_target.pk)
self.assertFalse(source.dirty)
self.assertTrue(target.dirty)
def test_create_by_admin(self):
url = admin_reverse("cms_staticplaceholder_add")
with self.login_user_context(self.get_superuser()):
response = self.client.post(url, data={'name': 'Name', 'code': 'content'})
self.assertEqual(response.status_code, 302)
|
Jumpscale/web
|
refs/heads/master
|
pythonlib/gdata/apps/groups/service.py
|
94
|
#!/usr/bin/python
#
# Copyright (C) 2008 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Allow Google Apps domain administrators to manage groups, group members and group owners.
GroupsService: Provides methods to manage groups, members and owners.
"""
__author__ = 'google-apps-apis@googlegroups.com'
import urllib
import gdata.apps
import gdata.apps.service
import gdata.service
API_VER = '2.0'
BASE_URL = '/a/feeds/group/' + API_VER + '/%s'
GROUP_MEMBER_URL = BASE_URL + '?member=%s'
GROUP_MEMBER_DIRECT_URL = GROUP_MEMBER_URL + '&directOnly=%s'
GROUP_ID_URL = BASE_URL + '/%s'
MEMBER_URL = BASE_URL + '/%s/member'
MEMBER_WITH_SUSPENDED_URL = MEMBER_URL + '?includeSuspendedUsers=%s'
MEMBER_ID_URL = MEMBER_URL + '/%s'
OWNER_URL = BASE_URL + '/%s/owner'
OWNER_WITH_SUSPENDED_URL = OWNER_URL + '?includeSuspendedUsers=%s'
OWNER_ID_URL = OWNER_URL + '/%s'
PERMISSION_OWNER = 'Owner'
PERMISSION_MEMBER = 'Member'
PERMISSION_DOMAIN = 'Domain'
PERMISSION_ANYONE = 'Anyone'
class GroupsService(gdata.apps.service.PropertyService):
"""Client for the Google Apps Groups service."""
def _ServiceUrl(self, service_type, is_existed, group_id, member_id, owner_email,
direct_only=False, domain=None, suspended_users=False):
if domain is None:
domain = self.domain
if service_type == 'group':
if group_id != '' and is_existed:
return GROUP_ID_URL % (domain, group_id)
elif member_id != '':
if direct_only:
return GROUP_MEMBER_DIRECT_URL % (domain, urllib.quote_plus(member_id),
self._Bool2Str(direct_only))
else:
return GROUP_MEMBER_URL % (domain, urllib.quote_plus(member_id))
else:
return BASE_URL % (domain)
if service_type == 'member':
if member_id != '' and is_existed:
return MEMBER_ID_URL % (domain, group_id, urllib.quote_plus(member_id))
elif suspended_users:
return MEMBER_WITH_SUSPENDED_URL % (domain, group_id,
self._Bool2Str(suspended_users))
else:
return MEMBER_URL % (domain, group_id)
if service_type == 'owner':
if owner_email != '' and is_existed:
return OWNER_ID_URL % (domain, group_id, urllib.quote_plus(owner_email))
elif suspended_users:
return OWNER_WITH_SUSPENDED_URL % (domain, group_id,
self._Bool2Str(suspended_users))
else:
return OWNER_URL % (domain, group_id)
def _Bool2Str(self, b):
if b is None:
return None
return str(b is True).lower()
def _IsExisted(self, uri):
try:
self._GetProperties(uri)
return True
except gdata.apps.service.AppsForYourDomainException, e:
if e.error_code == gdata.apps.service.ENTITY_DOES_NOT_EXIST:
return False
else:
raise e
def CreateGroup(self, group_id, group_name, description, email_permission):
"""Create a group.
Args:
group_id: The ID of the group (e.g. us-sales).
group_name: The name of the group.
description: A description of the group
email_permission: The subscription permission of the group.
Returns:
A dict containing the result of the create operation.
"""
uri = self._ServiceUrl('group', False, group_id, '', '')
properties = {}
properties['groupId'] = group_id
properties['groupName'] = group_name
properties['description'] = description
properties['emailPermission'] = email_permission
return self._PostProperties(uri, properties)
def UpdateGroup(self, group_id, group_name, description, email_permission):
"""Update a group's name, description and/or permission.
Args:
group_id: The ID of the group (e.g. us-sales).
group_name: The name of the group.
description: A description of the group
email_permission: The subscription permission of the group.
Returns:
A dict containing the result of the update operation.
"""
uri = self._ServiceUrl('group', True, group_id, '', '')
properties = {}
properties['groupId'] = group_id
properties['groupName'] = group_name
properties['description'] = description
properties['emailPermission'] = email_permission
return self._PutProperties(uri, properties)
def RetrieveGroup(self, group_id):
"""Retrieve a group based on its ID.
Args:
group_id: The ID of the group (e.g. us-sales).
Returns:
A dict containing the result of the retrieve operation.
"""
uri = self._ServiceUrl('group', True, group_id, '', '')
return self._GetProperties(uri)
def RetrieveAllGroups(self):
"""Retrieve all groups in the domain.
Args:
None
Returns:
A list containing the result of the retrieve operation.
"""
uri = self._ServiceUrl('group', True, '', '', '')
return self._GetPropertiesList(uri)
def RetrievePageOfGroups(self, start_group=None):
"""Retrieve one page of groups in the domain.
Args:
start_group: The key to continue for pagination through all groups.
Returns:
A feed object containing the result of the retrieve operation.
"""
uri = self._ServiceUrl('group', True, '', '', '')
if start_group is not None:
uri += "?start="+start_group
property_feed = self._GetPropertyFeed(uri)
return property_feed
def RetrieveGroups(self, member_id, direct_only=False):
"""Retrieve all groups that belong to the given member_id.
Args:
member_id: The member's email address (e.g. member@example.com).
direct_only: Boolean whether only return groups that this member directly belongs to.
Returns:
A list containing the result of the retrieve operation.
"""
uri = self._ServiceUrl('group', True, '', member_id, '', direct_only=direct_only)
return self._GetPropertiesList(uri)
def DeleteGroup(self, group_id):
"""Delete a group based on its ID.
Args:
group_id: The ID of the group (e.g. us-sales).
Returns:
A dict containing the result of the delete operation.
"""
uri = self._ServiceUrl('group', True, group_id, '', '')
return self._DeleteProperties(uri)
def AddMemberToGroup(self, member_id, group_id):
"""Add a member to a group.
Args:
member_id: The member's email address (e.g. member@example.com).
group_id: The ID of the group (e.g. us-sales).
Returns:
A dict containing the result of the add operation.
"""
uri = self._ServiceUrl('member', False, group_id, member_id, '')
properties = {}
properties['memberId'] = member_id
return self._PostProperties(uri, properties)
def IsMember(self, member_id, group_id):
"""Check whether the given member already exists in the given group.
Args:
member_id: The member's email address (e.g. member@example.com).
group_id: The ID of the group (e.g. us-sales).
Returns:
True if the member exists in the group. False otherwise.
"""
uri = self._ServiceUrl('member', True, group_id, member_id, '')
return self._IsExisted(uri)
def RetrieveMember(self, member_id, group_id):
"""Retrieve the given member in the given group.
Args:
member_id: The member's email address (e.g. member@example.com).
group_id: The ID of the group (e.g. us-sales).
Returns:
A dict containing the result of the retrieve operation.
"""
uri = self._ServiceUrl('member', True, group_id, member_id, '')
return self._GetProperties(uri)
def RetrieveAllMembers(self, group_id, suspended_users=False):
"""Retrieve all members in the given group.
Args:
group_id: The ID of the group (e.g. us-sales).
suspended_users: A boolean; should we include any suspended users in
the membership list returned?
Returns:
A list containing the result of the retrieve operation.
"""
uri = self._ServiceUrl('member', True, group_id, '', '',
suspended_users=suspended_users)
return self._GetPropertiesList(uri)
def RetrievePageOfMembers(self, group_id, suspended_users=False, start=None):
"""Retrieve one page of members of a given group.
Args:
group_id: The ID of the group (e.g. us-sales).
suspended_users: A boolean; should we include any suspended users in
the membership list returned?
start: The key to continue for pagination through all members.
Returns:
A feed object containing the result of the retrieve operation.
"""
uri = self._ServiceUrl('member', True, group_id, '', '',
suspended_users=suspended_users)
if start is not None:
if suspended_users:
uri += "&start="+start
else:
uri += "?start="+start
property_feed = self._GetPropertyFeed(uri)
return property_feed
def RemoveMemberFromGroup(self, member_id, group_id):
"""Remove the given member from the given group.
Args:
member_id: The member's email address (e.g. member@example.com).
group_id: The ID of the group (e.g. us-sales).
Returns:
A dict containing the result of the remove operation.
"""
uri = self._ServiceUrl('member', True, group_id, member_id, '')
return self._DeleteProperties(uri)
def AddOwnerToGroup(self, owner_email, group_id):
"""Add an owner to a group.
Args:
owner_email: The email address of a group owner.
group_id: The ID of the group (e.g. us-sales).
Returns:
A dict containing the result of the add operation.
"""
uri = self._ServiceUrl('owner', False, group_id, '', owner_email)
properties = {}
properties['email'] = owner_email
return self._PostProperties(uri, properties)
def IsOwner(self, owner_email, group_id):
"""Check whether the given member an owner of the given group.
Args:
owner_email: The email address of a group owner.
group_id: The ID of the group (e.g. us-sales).
Returns:
True if the member is an owner of the given group. False otherwise.
"""
uri = self._ServiceUrl('owner', True, group_id, '', owner_email)
return self._IsExisted(uri)
def RetrieveOwner(self, owner_email, group_id):
"""Retrieve the given owner in the given group.
Args:
owner_email: The email address of a group owner.
group_id: The ID of the group (e.g. us-sales).
Returns:
A dict containing the result of the retrieve operation.
"""
uri = self._ServiceUrl('owner', True, group_id, '', owner_email)
return self._GetProperties(uri)
def RetrieveAllOwners(self, group_id, suspended_users=False):
"""Retrieve all owners of the given group.
Args:
group_id: The ID of the group (e.g. us-sales).
suspended_users: A boolean; should we include any suspended users in
the ownership list returned?
Returns:
A list containing the result of the retrieve operation.
"""
uri = self._ServiceUrl('owner', True, group_id, '', '',
suspended_users=suspended_users)
return self._GetPropertiesList(uri)
def RetrievePageOfOwners(self, group_id, suspended_users=False, start=None):
"""Retrieve one page of owners of the given group.
Args:
group_id: The ID of the group (e.g. us-sales).
suspended_users: A boolean; should we include any suspended users in
the ownership list returned?
start: The key to continue for pagination through all owners.
Returns:
A feed object containing the result of the retrieve operation.
"""
uri = self._ServiceUrl('owner', True, group_id, '', '',
suspended_users=suspended_users)
if start is not None:
if suspended_users:
uri += "&start="+start
else:
uri += "?start="+start
property_feed = self._GetPropertyFeed(uri)
return property_feed
def RemoveOwnerFromGroup(self, owner_email, group_id):
"""Remove the given owner from the given group.
Args:
owner_email: The email address of a group owner.
group_id: The ID of the group (e.g. us-sales).
Returns:
A dict containing the result of the remove operation.
"""
uri = self._ServiceUrl('owner', True, group_id, '', owner_email)
return self._DeleteProperties(uri)
|
benob/chainer
|
refs/heads/master
|
chainer/testing/condition.py
|
39
|
import functools
import unittest
import six
class QuietTestRunner(object):
def run(self, suite):
result = unittest.TestResult()
suite(result)
return result
def repeat_with_success_at_least(times, min_success):
"""Decorator for multiple trial of the test case.
The decorated test case is launched multiple times.
The case is judged as passed at least specified number of trials.
If the number of successful trials exceeds `min_success`,
the remaining trials are skipped.
Args:
times(int): The number of trials.
min_success(int): Threshold that the decorated test
case is regarded as passed.
"""
assert times >= min_success
def _repeat_with_success_at_least(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
assert len(args) > 0
instance = args[0]
assert isinstance(instance, unittest.TestCase)
success_counter = 0
failure_counter = 0
results = []
def fail():
msg = '\nFail: {0}, Success: {1}'.format(
failure_counter, success_counter)
if len(results) > 0:
first = results[0]
errs = first.failures + first.errors
if len(errs) > 0:
err_msg = '\n'.join(fail[1] for fail in errs)
msg += '\n\nThe first error message:\n' + err_msg
instance.fail(msg)
for _ in six.moves.range(times):
suite = unittest.TestSuite()
suite.addTest(
unittest.FunctionTestCase(
lambda: f(*args, **kwargs),
setUp=instance.setUp,
tearDown=instance.tearDown))
result = QuietTestRunner().run(suite)
if result.wasSuccessful():
success_counter += 1
else:
results.append(result)
failure_counter += 1
if success_counter >= min_success:
instance.assertTrue(True)
return
if failure_counter > times - min_success:
fail()
return
fail()
return wrapper
return _repeat_with_success_at_least
def repeat(times):
"""Decorator that imposes the test to be successful in a row.
Decorated test case is launched multiple times.
The case is regarded as passed only if it is successful
specified times in a row.
.. note::
In current implementation, this decorator grasps the
failure information of each trial.
Args:
times(int): The number of trials.
"""
return repeat_with_success_at_least(times, times)
def retry(times):
"""Decorator that imposes the test to be successful at least once.
Decorated test case is launched multiple times.
The case is regarded as passed if it is successful
at least once.
.. note::
In current implementation, this decorator grasps the
failure information of each trial.
Args:
times(int): The number of trials.
"""
return repeat_with_success_at_least(times, 1)
|
rghe/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/slxos/slxos_interface.py
|
91
|
#!/usr/bin/python
#
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: slxos_interface
version_added: "2.6"
author: "Lindsay Hill (@LindsayHill)"
short_description: Manage Interfaces on Extreme SLX-OS network devices
description:
- This module provides declarative management of Interfaces
on Extreme SLX-OS network devices.
notes:
- Tested against SLX-OS 17s.1.02
options:
name:
description:
- Name of the Interface.
required: true
description:
description:
- Description of Interface.
enabled:
description:
- Interface link status.
default: True
type: bool
speed:
description:
- Interface link speed.
mtu:
description:
- Maximum size of transmit packet.
tx_rate:
description:
- Transmit rate in bits per second (bps).
rx_rate:
description:
- Receiver rate in bits per second (bps).
neighbors:
description:
- Check the operational state of given interface C(name) for LLDP neighbor.
- The following suboptions are available.
suboptions:
host:
description:
- "LLDP neighbor host for given interface C(name)."
port:
description:
- "LLDP neighbor port to which given interface C(name) is connected."
aggregate:
description: List of Interfaces definitions.
delay:
description:
- Time in seconds to wait before checking for the operational state on remote
device. This wait is applicable for operational state argument which are
I(state) with values C(up)/C(down), I(tx_rate) and I(rx_rate).
default: 10
state:
description:
- State of the Interface configuration, C(up) means present and
operationally up and C(down) means present and operationally C(down)
default: present
choices: ['present', 'absent', 'up', 'down']
"""
EXAMPLES = """
- name: configure interface
slxos_interface:
name: Ethernet 0/2
description: test-interface
speed: 1000
mtu: 9216
- name: remove interface
slxos_interface:
name: Loopback 9
state: absent
- name: make interface up
slxos_interface:
name: Ethernet 0/2
enabled: True
- name: make interface down
slxos_interface:
name: Ethernet 0/2
enabled: False
- name: Check intent arguments
slxos_interface:
name: Ethernet 0/2
state: up
tx_rate: ge(0)
rx_rate: le(0)
- name: Check neighbors intent arguments
slxos_interface:
name: Ethernet 0/41
neighbors:
- port: Ethernet 0/41
host: SLX
- name: Config + intent
slxos_interface:
name: Ethernet 0/2
enabled: False
state: down
- name: Add interface using aggregate
slxos_interface:
aggregate:
- { name: Ethernet 0/1, mtu: 1548, description: test-interface-1 }
- { name: Ethernet 0/2, mtu: 1548, description: test-interface-2 }
speed: 10000
state: present
- name: Delete interface using aggregate
slxos_interface:
aggregate:
- name: Loopback 9
- name: Loopback 10
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device.
returned: always, except for the platforms that use Netconf transport to manage the device.
type: list
sample:
- interface Ethernet 0/2
- description test-interface
- mtu 1548
"""
import re
from copy import deepcopy
from time import sleep
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import exec_command
from ansible.module_utils.network.slxos.slxos import get_config, load_config
from ansible.module_utils.network.common.config import NetworkConfig
from ansible.module_utils.network.common.utils import conditional, remove_default_spec
def validate_mtu(value, module):
if value and not 1548 <= int(value) <= 9216:
module.fail_json(msg='mtu must be between 1548 and 9216')
def validate_param_values(module, obj, param=None):
if param is None:
param = module.params
for key in obj:
# validate the param value (if validator func exists)
validator = globals().get('validate_%s' % key)
if callable(validator):
validator(param.get(key), module)
def parse_shutdown(configobj, name):
cfg = configobj['interface %s' % name]
cfg = '\n'.join(cfg.children)
match = re.search(r'^shutdown', cfg, re.M)
if match:
return True
else:
return False
def parse_config_argument(configobj, name, arg=None):
cfg = configobj['interface %s' % name]
cfg = '\n'.join(cfg.children)
match = re.search(r'%s (.+)$' % arg, cfg, re.M)
if match:
return match.group(1)
def search_obj_in_list(name, lst):
for o in lst:
if o['name'] == name:
return o
return None
def add_command_to_interface(interface, cmd, commands):
if interface not in commands:
commands.append(interface)
commands.append(cmd)
def map_config_to_obj(module):
config = get_config(module)
configobj = NetworkConfig(indent=1, contents=config)
match = re.findall(r'^interface (\S+ \S+)', config, re.M)
if not match:
return list()
instances = list()
for item in set(match):
obj = {
'name': item,
'description': parse_config_argument(configobj, item, 'description'),
'speed': parse_config_argument(configobj, item, 'speed'),
'mtu': parse_config_argument(configobj, item, 'mtu'),
'disable': True if parse_shutdown(configobj, item) else False,
'state': 'present'
}
instances.append(obj)
return instances
def map_params_to_obj(module):
obj = []
aggregate = module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module.params[key]
validate_param_values(module, item, item)
d = item.copy()
if d['enabled']:
d['disable'] = False
else:
d['disable'] = True
obj.append(d)
else:
params = {
'name': module.params['name'],
'description': module.params['description'],
'speed': module.params['speed'],
'mtu': module.params['mtu'],
'state': module.params['state'],
'delay': module.params['delay'],
'tx_rate': module.params['tx_rate'],
'rx_rate': module.params['rx_rate'],
'neighbors': module.params['neighbors']
}
validate_param_values(module, params)
if module.params['enabled']:
params.update({'disable': False})
else:
params.update({'disable': True})
obj.append(params)
return obj
def map_obj_to_commands(updates):
commands = list()
want, have = updates
args = ('speed', 'description', 'mtu')
for w in want:
name = w['name']
disable = w['disable']
state = w['state']
obj_in_have = search_obj_in_list(name, have)
interface = 'interface ' + name
if state == 'absent' and obj_in_have:
commands.append('no ' + interface)
elif state in ('present', 'up', 'down'):
if obj_in_have:
for item in args:
candidate = w.get(item)
running = obj_in_have.get(item)
if candidate != running:
if candidate:
cmd = item + ' ' + str(candidate)
add_command_to_interface(interface, cmd, commands)
if disable and not obj_in_have.get('disable', False):
add_command_to_interface(interface, 'shutdown', commands)
elif not disable and obj_in_have.get('disable', False):
add_command_to_interface(interface, 'no shutdown', commands)
else:
commands.append(interface)
for item in args:
value = w.get(item)
if value:
commands.append(item + ' ' + str(value))
if disable:
commands.append('no shutdown')
return commands
def check_declarative_intent_params(module, want, result):
failed_conditions = []
have_neighbors = None
for w in want:
want_state = w.get('state')
want_tx_rate = w.get('tx_rate')
want_rx_rate = w.get('rx_rate')
want_neighbors = w.get('neighbors')
if want_state not in ('up', 'down') and not want_tx_rate and not want_rx_rate and not want_neighbors:
continue
if result['changed']:
sleep(w['delay'])
command = 'show interface %s' % w['name']
rc, out, err = exec_command(module, command)
if rc != 0:
module.fail_json(msg=to_text(err, errors='surrogate_then_replace'), command=command, rc=rc)
if want_state in ('up', 'down'):
match = re.search(r'%s (\w+)' % 'line protocol is', out, re.M)
have_state = None
if match:
have_state = match.group(1)
if have_state is None or not conditional(want_state, have_state.strip()):
failed_conditions.append('state ' + 'eq(%s)' % want_state)
if want_tx_rate:
match = re.search(r'%s (\d+)' % 'Output', out, re.M)
have_tx_rate = None
if match:
have_tx_rate = match.group(1)
if have_tx_rate is None or not conditional(want_tx_rate, have_tx_rate.strip(), cast=int):
failed_conditions.append('tx_rate ' + want_tx_rate)
if want_rx_rate:
match = re.search(r'%s (\d+)' % 'Input', out, re.M)
have_rx_rate = None
if match:
have_rx_rate = match.group(1)
if have_rx_rate is None or not conditional(want_rx_rate, have_rx_rate.strip(), cast=int):
failed_conditions.append('rx_rate ' + want_rx_rate)
if want_neighbors:
have_host = []
have_port = []
if have_neighbors is None:
rc, have_neighbors, err = exec_command(module, 'show lldp neighbors detail')
if rc != 0:
module.fail_json(msg=to_text(err, errors='surrogate_then_replace'), command=command, rc=rc)
if have_neighbors:
lines = have_neighbors.strip().split('Local Interface: ')
short_name = w['name'].replace('Ethernet', 'Eth')
for line in lines:
field = line.split('\n')
if field[0].split('(')[0].strip() == short_name:
for item in field:
if item.startswith('System Name:'):
have_host.append(item.split(':')[1].strip())
if item.startswith('Remote Interface:'):
have_port.append(item.split(':')[1].split('(')[0].strip())
for item in want_neighbors:
host = item.get('host')
port = item.get('port')
if host and host not in have_host:
failed_conditions.append('host ' + host)
if port and port not in have_port:
failed_conditions.append('port ' + port)
return failed_conditions
def main():
""" main entry point for module execution
"""
neighbors_spec = dict(
host=dict(),
port=dict()
)
element_spec = dict(
name=dict(),
description=dict(),
speed=dict(),
mtu=dict(),
enabled=dict(default=True, type='bool'),
tx_rate=dict(),
rx_rate=dict(),
neighbors=dict(type='list', elements='dict', options=neighbors_spec),
delay=dict(default=10, type='int'),
state=dict(default='present',
choices=['present', 'absent', 'up', 'down'])
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['name'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
)
argument_spec.update(element_spec)
required_one_of = [['name', 'aggregate']]
mutually_exclusive = [['name', 'aggregate']]
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands((want, have))
result['commands'] = commands
if commands:
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
failed_conditions = check_declarative_intent_params(module, want, result)
if failed_conditions:
msg = 'One or more conditional statements have not been satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions, changed=result['changed'])
module.exit_json(**result)
if __name__ == '__main__':
main()
|
YuxuanLing/trunk
|
refs/heads/master
|
trunk/code/study/python/Fluent-Python-example-code/20-descriptor/bulkfood/model_v5.py
|
1
|
# BEGIN MODEL_V5
import abc
class AutoStorage: # <1>
__counter = 0
def __init__(self):
cls = self.__class__
prefix = cls.__name__
index = cls.__counter
self.storage_name = '_{}#{}'.format(prefix, index)
cls.__counter += 1
def __get__(self, instance, owner):
if instance is None:
return self
else:
return getattr(instance, self.storage_name)
def __set__(self, instance, value):
setattr(instance, self.storage_name, value) # <2>
class Validated(abc.ABC, AutoStorage): # <3>
def __set__(self, instance, value):
value = self.validate(instance, value) # <4>
super().__set__(instance, value) # <5>
@abc.abstractmethod
def validate(self, instance, value): # <6>
"""return validated value or raise ValueError"""
class Quantity(Validated): # <7>
"""a number greater than zero"""
def validate(self, instance, value):
if value <= 0:
raise ValueError('value must be > 0')
return value
class NonBlank(Validated):
"""a string with at least one non-space character"""
def validate(self, instance, value):
value = value.strip()
if len(value) == 0:
raise ValueError('value cannot be empty or blank')
return value # <8>
# END MODEL_V5
|
greent2008/os_lab
|
refs/heads/master
|
related_info/lab7/ipc/pipe-ex1.py
|
48
|
#!/usr/bin/env python
# -*- encoding: utf8 -*-
import os, sys
print "I'm going to fork now - the child will write something to a pipe, and the parent will read it back"
r, w = os.pipe() # r,w是文件描述符, 不是文件对象
pid = os.fork()
if pid:
# 父进程
os.close(w) # 关闭一个文件描述符
r = os.fdopen(r) # 将r转化为文件对象
print "parent: reading"
txt = r.read()
os.waitpid(pid, 0) # 确保子进程被撤销
else:
# 子进程
os.close(r)
w = os.fdopen(w, 'w')
print "child: writing"
w.write("here's some text from the child")
w.close()
print "child: closing"
sys.exit(0)
print "parent: got it; text =", txt
|
litong01/python-monasca
|
refs/heads/master
|
kiloeyes/openstack/common/strutils.py
|
1
|
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
System-level utilities and helper functions.
"""
import math
import re
import sys
import unicodedata
import six
from kiloeyes.openstack.common.gettextutils import _
UNIT_PREFIX_EXPONENT = {
'k': 1,
'K': 1,
'Ki': 1,
'M': 2,
'Mi': 2,
'G': 3,
'Gi': 3,
'T': 4,
'Ti': 4,
}
UNIT_SYSTEM_INFO = {
'IEC': (1024, re.compile(r'(^[-+]?\d*\.?\d+)([KMGT]i?)?(b|bit|B)$')),
'SI': (1000, re.compile(r'(^[-+]?\d*\.?\d+)([kMGT])?(b|bit|B)$')),
}
TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes')
FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no')
SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]")
SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+")
# NOTE(flaper87): The following globals are used by `mask_password`
_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password']
# NOTE(ldbragst): Let's build a list of regex objects using the list of
# _SANITIZE_KEYS we already have. This way, we only have to add the new key
# to the list of _SANITIZE_KEYS and we can generate regular expressions
# for XML and JSON automatically.
_SANITIZE_PATTERNS_2 = []
_SANITIZE_PATTERNS_1 = []
# NOTE(amrith): Some regular expressions have only one parameter, some
# have two parameters. Use different lists of patterns here.
_FORMAT_PATTERNS_1 = [r'(%(key)s\s*[=]\s*)[^\s^\'^\"]+']
_FORMAT_PATTERNS_2 = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
r'(%(key)s\s+[\"\']).*?([\"\'])',
r'([-]{2}%(key)s\s+)[^\'^\"^=^\s]+([\s]*)',
r'(<%(key)s>).*?(</%(key)s>)',
r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])',
r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?'
'[\'"]).*?([\'"])',
r'(%(key)s\s*--?[A-z]+\s*)\S+(\s*)']
for key in _SANITIZE_KEYS:
for pattern in _FORMAT_PATTERNS_2:
reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
_SANITIZE_PATTERNS_2.append(reg_ex)
for pattern in _FORMAT_PATTERNS_1:
reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
_SANITIZE_PATTERNS_1.append(reg_ex)
def int_from_bool_as_string(subject):
"""Interpret a string as a boolean and return either 1 or 0.
Any string value in:
('True', 'true', 'On', 'on', '1')
is interpreted as a boolean True.
Useful for JSON-decoded stuff and config file parsing
"""
return bool_from_string(subject) and 1 or 0
def bool_from_string(subject, strict=False, default=False):
"""Interpret a string as a boolean.
A case-insensitive match is performed such that strings matching 't',
'true', 'on', 'y', 'yes', or '1' are considered True and, when
`strict=False`, anything else returns the value specified by 'default'.
Useful for JSON-decoded stuff and config file parsing.
If `strict=True`, unrecognized values, including None, will raise a
ValueError which is useful when parsing values passed in from an API call.
Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
"""
if not isinstance(subject, six.string_types):
subject = six.text_type(subject)
lowered = subject.strip().lower()
if lowered in TRUE_STRINGS:
return True
elif lowered in FALSE_STRINGS:
return False
elif strict:
acceptable = ', '.join(
"'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
msg = _("Unrecognized value '%(val)s', acceptable values are:"
" %(acceptable)s") % {'val': subject,
'acceptable': acceptable}
raise ValueError(msg)
else:
return default
def safe_decode(text, incoming=None, errors='strict'):
"""Decodes incoming text/bytes string using `incoming` if they're not
already unicode.
:param incoming: Text's current encoding
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: text or a unicode `incoming` encoded
representation of it.
:raises TypeError: If text is not an instance of str
"""
if not isinstance(text, (six.string_types, six.binary_type)):
raise TypeError("%s can't be decoded" % type(text))
if isinstance(text, six.text_type):
return text
if not incoming:
incoming = (sys.stdin.encoding or
sys.getdefaultencoding())
try:
return text.decode(incoming, errors)
except UnicodeDecodeError:
# Note(flaper87) If we get here, it means that
# sys.stdin.encoding / sys.getdefaultencoding
# didn't return a suitable encoding to decode
# text. This happens mostly when global LANG
# var is not set correctly and there's no
# default encoding. In this case, most likely
# python will use ASCII or ANSI encoders as
# default encodings but they won't be capable
# of decoding non-ASCII characters.
#
# Also, UTF-8 is being used since it's an ASCII
# extension.
return text.decode('utf-8', errors)
def safe_encode(text, incoming=None,
encoding='utf-8', errors='strict'):
"""Encodes incoming text/bytes string using `encoding`.
If incoming is not specified, text is expected to be encoded with
current python's default encoding. (`sys.getdefaultencoding`)
:param incoming: Text's current encoding
:param encoding: Expected encoding for text (Default UTF-8)
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: text or a bytestring `encoding` encoded
representation of it.
:raises TypeError: If text is not an instance of str
"""
if not isinstance(text, (six.string_types, six.binary_type)):
raise TypeError("%s can't be encoded" % type(text))
if not incoming:
incoming = (sys.stdin.encoding or
sys.getdefaultencoding())
if isinstance(text, six.text_type):
return text.encode(encoding, errors)
elif text and encoding != incoming:
# Decode text before encoding it with `encoding`
text = safe_decode(text, incoming, errors)
return text.encode(encoding, errors)
else:
return text
def string_to_bytes(text, unit_system='IEC', return_int=False):
"""Converts a string into an float representation of bytes.
The units supported for IEC ::
Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it)
KB, KiB, MB, MiB, GB, GiB, TB, TiB
The units supported for SI ::
kb(it), Mb(it), Gb(it), Tb(it)
kB, MB, GB, TB
Note that the SI unit system does not support capital letter 'K'
:param text: String input for bytes size conversion.
:param unit_system: Unit system for byte size conversion.
:param return_int: If True, returns integer representation of text
in bytes. (default: decimal)
:returns: Numerical representation of text in bytes.
:raises ValueError: If text has an invalid value.
"""
try:
base, reg_ex = UNIT_SYSTEM_INFO[unit_system]
except KeyError:
msg = _('Invalid unit system: "%s"') % unit_system
raise ValueError(msg)
match = reg_ex.match(text)
if match:
magnitude = float(match.group(1))
unit_prefix = match.group(2)
if match.group(3) in ['b', 'bit']:
magnitude /= 8
else:
msg = _('Invalid string format: %s') % text
raise ValueError(msg)
if not unit_prefix:
res = magnitude
else:
res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix])
if return_int:
return int(math.ceil(res))
return res
def to_slug(value, incoming=None, errors="strict"):
"""Normalize string.
Convert to lowercase, remove non-word characters, and convert spaces
to hyphens.
Inspired by Django's `slugify` filter.
:param value: Text to slugify
:param incoming: Text's current encoding
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: slugified unicode representation of `value`
:raises TypeError: If text is not an instance of str
"""
value = safe_decode(value, incoming, errors)
# NOTE(aababilov): no need to use safe_(encode|decode) here:
# encodings are always "ascii", error handling is always "ignore"
# and types are always known (first: unicode; second: str)
value = unicodedata.normalize("NFKD", value).encode(
"ascii", "ignore").decode("ascii")
value = SLUGIFY_STRIP_RE.sub("", value).strip().lower()
return SLUGIFY_HYPHENATE_RE.sub("-", value)
def mask_password(message, secret="***"):
"""Replace password with 'secret' in message.
:param message: The string which includes security information.
:param secret: value with which to replace passwords.
:returns: The unicode value of message with the password fields masked.
For example:
>>> mask_password("'adminPass' : 'aaaaa'")
"'adminPass' : '***'"
>>> mask_password("'admin_pass' : 'aaaaa'")
"'admin_pass' : '***'"
>>> mask_password('"password" : "aaaaa"')
'"password" : "***"'
>>> mask_password("'original_password' : 'aaaaa'")
"'original_password' : '***'"
>>> mask_password("u'original_password' : u'aaaaa'")
"u'original_password' : u'***'"
"""
message = six.text_type(message)
# NOTE(ldbragst): Check to see if anything in message contains any key
# specified in _SANITIZE_KEYS, if not then just return the message since
# we don't have to mask any passwords.
if not any(key in message for key in _SANITIZE_KEYS):
return message
substitute = r'\g<1>' + secret + r'\g<2>'
for pattern in _SANITIZE_PATTERNS_2:
message = re.sub(pattern, substitute, message)
substitute = r'\g<1>' + secret
for pattern in _SANITIZE_PATTERNS_1:
message = re.sub(pattern, substitute, message)
return message
|
aequitas/home-assistant
|
refs/heads/dev
|
homeassistant/components/unifi/const.py
|
1
|
"""Constants for the UniFi component."""
import logging
LOGGER = logging.getLogger(__package__)
DOMAIN = 'unifi'
CONTROLLER_ID = '{host}-{site}'
CONF_CONTROLLER = 'controller'
CONF_POE_CONTROL = 'poe_control'
CONF_SITE_ID = 'site'
|
meletakis/collato
|
refs/heads/master
|
lib/python2.7/site-packages/PIL/GbrImagePlugin.py
|
40
|
#
# The Python Imaging Library
# $Id$
#
# load a GIMP brush file
#
# History:
# 96-03-14 fl Created
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1996.
#
# See the README file for information on usage and redistribution.
#
import Image, ImageFile
def i32(c):
return ord(c[3]) + (ord(c[2])<<8) + (ord(c[1])<<16) + (ord(c[0])<<24L)
def _accept(prefix):
return i32(prefix) >= 20 and i32(prefix[4:8]) == 1
##
# Image plugin for the GIMP brush format.
class GbrImageFile(ImageFile.ImageFile):
format = "GBR"
format_description = "GIMP brush file"
def _open(self):
header_size = i32(self.fp.read(4))
version = i32(self.fp.read(4))
if header_size < 20 or version != 1:
raise SyntaxError, "not a GIMP brush"
width = i32(self.fp.read(4))
height = i32(self.fp.read(4))
bytes = i32(self.fp.read(4))
if width <= 0 or height <= 0 or bytes != 1:
raise SyntaxError, "not a GIMP brush"
comment = self.fp.read(header_size - 20)[:-1]
self.mode = "L"
self.size = width, height
self.info["comment"] = comment
# Since the brush is so small, we read the data immediately
self.data = self.fp.read(width * height)
def load(self):
if not self.data:
return
# create an image out of the brush data block
self.im = Image.core.new(self.mode, self.size)
self.im.fromstring(self.data)
self.data = ""
#
# registry
Image.register_open("GBR", GbrImageFile, _accept)
Image.register_extension("GBR", ".gbr")
|
entropy1337/infernal-twin
|
refs/heads/master
|
Modules/build/reportlab/build/lib.linux-i686-2.7/reportlab/lib/textsplit.py
|
32
|
#Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/lib/textsplit.py
"""Helpers for text wrapping, hyphenation, Asian text splitting and kinsoku shori.
How to split a 'big word' depends on the language and the writing system. This module
works on a Unicode string. It ought to grow by allowing ore algoriths to be plugged
in based on possible knowledge of the language and desirable 'niceness' of the algorithm.
"""
__version__=''' $Id$ '''
from unicodedata import category
from reportlab.pdfbase.pdfmetrics import stringWidth
from reportlab.rl_config import _FUZZ
from reportlab.lib.utils import isUnicode
CANNOT_START_LINE = [
#strongly prohibited e.g. end brackets, stop, exclamation...
u'!\',.:;?!")]\u3001\u3002\u300d\u300f\u3011\u3015\uff3d\u3011\uff09',
#middle priority e.g. continuation small vowels - wrapped on two lines but one string...
u'\u3005\u2015\u3041\u3043\u3045\u3047\u3049\u3063\u3083\u3085\u3087\u308e\u30a1\u30a3'
u'\u30a5\u30a7\u30a9\u30c3\u30e3\u30e5\u30e7\u30ee\u30fc\u30f5\u30f6',
#weakly prohibited - continuations, celsius symbol etc.
u'\u309b\u309c\u30fb\u30fd\u30fe\u309d\u309e\u2015\u2010\xb0\u2032\u2033\u2103\uffe0\uff05\u2030'
]
ALL_CANNOT_START = u''.join(CANNOT_START_LINE)
CANNOT_END_LINE = [
#strongly prohibited
u'\u2018\u201c\uff08[{\uff08\u3014\uff3b\uff5b\u3008\u300a\u300c\u300e\u3010',
#weaker - currency symbols, hash, postcode - prefixes
u'$\u00a3@#\uffe5\uff04\uffe1\uff20\u3012\u00a7'
]
ALL_CANNOT_END = u''.join(CANNOT_END_LINE)
def is_multi_byte(ch):
"Is this an Asian character?"
return (ord(ch) >= 0x3000)
def getCharWidths(word, fontName, fontSize):
"""Returns a list of glyph widths.
>>> getCharWidths('Hello', 'Courier', 10)
[6.0, 6.0, 6.0, 6.0, 6.0]
>>> from reportlab.pdfbase.cidfonts import UnicodeCIDFont
>>> from reportlab.pdfbase.pdfmetrics import registerFont
>>> registerFont(UnicodeCIDFont('HeiseiMin-W3'))
>>> getCharWidths(u'\u6771\u4EAC', 'HeiseiMin-W3', 10) #most kanji are 100 ems
[10.0, 10.0]
"""
#character-level function call; the performance is going to SUCK
return [stringWidth(uChar, fontName, fontSize) for uChar in word]
def wordSplit(word, maxWidths, fontName, fontSize, encoding='utf8'):
"""Attempts to break a word which lacks spaces into two parts, the first of which
fits in the remaining space. It is allowed to add hyphens or whatever it wishes.
This is intended as a wrapper for some language- and user-choice-specific splitting
algorithms. It should only be called after line breaking on spaces, which covers western
languages and is highly optimised already. It works on the 'last unsplit word'.
Presumably with further study one could write a Unicode splitting algorithm for text
fragments whick was much faster.
Courier characters should be 6 points wide.
>>> wordSplit('HelloWorld', 30, 'Courier', 10)
[[0.0, 'Hello'], [0.0, 'World']]
>>> wordSplit('HelloWorld', 31, 'Courier', 10)
[[1.0, 'Hello'], [1.0, 'World']]
"""
if not isUnicode(word):
uword = word.decode(encoding)
else:
uword = word
charWidths = getCharWidths(uword, fontName, fontSize)
lines = dumbSplit(uword, charWidths, maxWidths)
if not isUnicode(word):
lines2 = []
#convert back
for (extraSpace, text) in lines:
lines2.append([extraSpace, text.encode(encoding)])
lines = lines2
return lines
def dumbSplit(word, widths, maxWidths):
"""This function attempts to fit as many characters as possible into the available
space, cutting "like a knife" between characters. This would do for Chinese.
It returns a list of (text, extraSpace) items where text is a Unicode string,
and extraSpace is the points of unused space available on the line. This is a
structure which is fairly easy to display, and supports 'backtracking' approaches
after the fact.
Test cases assume each character is ten points wide...
>>> dumbSplit(u'Hello', [10]*5, 60)
[[10, u'Hello']]
>>> dumbSplit(u'Hello', [10]*5, 50)
[[0, u'Hello']]
>>> dumbSplit(u'Hello', [10]*5, 40)
[[0, u'Hell'], [30, u'o']]
"""
_more = """
#>>> dumbSplit(u'Hello', [10]*5, 4) # less than one character
#(u'', u'Hello')
# this says 'Nihongo wa muzukashii desu ne!' (Japanese is difficult isn't it?) in 12 characters
>>> jtext = u'\u65e5\u672c\u8a9e\u306f\u96e3\u3057\u3044\u3067\u3059\u306d\uff01'
>>> dumbSplit(jtext, [10]*11, 30) #
(u'\u65e5\u672c\u8a9e', u'\u306f\u96e3\u3057\u3044\u3067\u3059\u306d\uff01')
"""
if not isinstance(maxWidths,(list,tuple)): maxWidths = [maxWidths]
assert isUnicode(word)
lines = []
i = widthUsed = lineStartPos = 0
maxWidth = maxWidths[0]
nW = len(word)
while i<nW:
w = widths[i]
c = word[i]
widthUsed += w
i += 1
if widthUsed > maxWidth + _FUZZ and widthUsed>0:
extraSpace = maxWidth - widthUsed
if ord(c)<0x3000:
# we appear to be inside a non-Asian script section.
# (this is a very crude test but quick to compute).
# This is likely to be quite rare so the speed of the
# code below is hopefully not a big issue. The main
# situation requiring this is that a document title
# with an english product name in it got cut.
# we count back and look for
# - a space-like character
# - reversion to Kanji (which would be a good split point)
# - in the worst case, roughly half way back along the line
limitCheck = (lineStartPos+i)>>1 #(arbitrary taste issue)
for j in range(i-1,limitCheck,-1):
cj = word[j]
if category(cj)=='Zs' or ord(cj)>=0x3000:
k = j+1
if k<i:
j = k+1
extraSpace += sum(widths[j:i])
w = widths[k]
c = word[k]
i = j
break
#end of English-within-Asian special case
#we are pushing this character back, but
#the most important of the Japanese typography rules
#if this character cannot start a line, wrap it up to this line so it hangs
#in the right margin. We won't do two or more though - that's unlikely and
#would result in growing ugliness.
#and increase the extra space
#bug fix contributed by Alexander Vasilenko <alexs.vasilenko@gmail.com>
if c not in ALL_CANNOT_START and i>lineStartPos+1:
#otherwise we need to push the character back
#the i>lineStart+1 condition ensures progress
i -= 1
extraSpace += w
#lines.append([maxWidth-sum(widths[lineStartPos:i]), word[lineStartPos:i].strip()])
lines.append([extraSpace, word[lineStartPos:i].strip()])
try:
maxWidth = maxWidths[len(lines)]
except IndexError:
maxWidth = maxWidths[-1] # use the last one
lineStartPos = i
widthUsed = 0
#any characters left?
if widthUsed > 0:
lines.append([maxWidth - widthUsed, word[lineStartPos:]])
return lines
def kinsokuShoriSplit(word, widths, availWidth):
#NOT USED OR FINISHED YET!
"""Split according to Japanese rules according to CJKV (Lunde).
Essentially look for "nice splits" so that we don't end a line
with an open bracket, or start one with a full stop, or stuff like
that. There is no attempt to try to split compound words into
constituent kanji. It currently uses wrap-down: packs as much
on a line as possible, then backtracks if needed
This returns a number of words each of which should just about fit
on a line. If you give it a whole paragraph at once, it will
do all the splits.
It's possible we might slightly step over the width limit
if we do hanging punctuation marks in future (e.g. dangle a Japanese
full stop in the right margin rather than using a whole character
box.
"""
lines = []
assert len(word) == len(widths)
curWidth = 0.0
curLine = []
i = 0 #character index - we backtrack at times so cannot use for loop
while 1:
ch = word[i]
w = widths[i]
if curWidth + w < availWidth:
curLine.append(ch)
curWidth += w
else:
#end of line. check legality
if ch in CANNOT_END_LINE[0]:
pass
#to be completed
# This recipe refers:
#
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/148061
import re
rx=re.compile("([\u2e80-\uffff])", re.UNICODE)
def cjkwrap(text, width, encoding="utf8"):
return reduce(lambda line, word, width=width: '%s%s%s' %
(line,
[' ','\n', ''][(len(line)-line.rfind('\n')-1
+ len(word.split('\n',1)[0] ) >= width) or
line[-1:] == '\0' and 2],
word),
rx.sub(r'\1\0 ', str(text,encoding)).split(' ')
).replace('\0', '').encode(encoding)
if __name__=='__main__':
import doctest
from reportlab.lib import textsplit
doctest.testmod(textsplit)
|
igord-daynix/virt-test
|
refs/heads/master
|
virttest/propcan_unittest.py
|
15
|
#!/usr/bin/python
import unittest
import logging
import common
import propcan
class TestPropCanBase(unittest.TestCase):
def test_empty_init(self):
self.assertRaises(NotImplementedError, propcan.PropCanBase)
def test_empty_params_init(self):
self.assertRaises(NotImplementedError,
propcan.PropCanBase,
{'foo': 'bar'})
def test_single_init(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCanBase):
__slots__ = ('foo',)
testcan = FooBar(foo='bar')
self.assertEqual(len(testcan), 1)
self.assertEqual(testcan['foo'], 'bar')
self.assertEqual(testcan.foo, 'bar')
def test_double_init(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCanBase):
__slots__ = ('foo', 'bar')
testcan = FooBar(foo='bar', bar='foo')
self.assertEqual(len(testcan), 2)
self.assertEqual(testcan['foo'], 'bar')
self.assertEqual(testcan['bar'], 'foo')
self.assertEqual(len(testcan), 2)
self.assertEqual(testcan.foo, 'bar')
self.assertEqual(testcan.bar, 'foo')
def test_slots_restrict(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCanBase):
__slots__ = ('foo',)
testcan = FooBar(foo='bar')
self.assertEqual(len(testcan), 1)
self.assertEqual(testcan['foo'], 'bar')
self.assertEqual(testcan.foo, 'bar')
self.assertRaises(AttributeError, setattr, testcan, 'bar', 'foo')
self.assertRaises(KeyError, testcan.__setitem__, 'bar', 'foo')
def test_mixed_init(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCanBase):
__slots__ = ('foo', 'bar')
testcan = FooBar({'foo': 'bar'})
self.assertEqual(len(testcan), 1)
self.assertEqual(testcan['foo'], 'bar')
self.assertEqual(len(testcan), 1)
self.assertEqual(testcan.foo, 'bar')
self.assertRaises(KeyError, testcan.__getitem__, 'bar')
self.assertRaises(AttributeError, getattr, testcan, 'bar')
self.assertRaises(KeyError, testcan.__delitem__, 'bar')
self.assertRaises(AttributeError, delattr, testcan, 'bar')
def test_subclass_single_init_setter(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCanBase):
__slots__ = ('foo', )
it_works = False
def set_foo(self, value):
self.__dict_set__('foo', value)
if value == 'bar':
self.__super_set__('it_works', True)
testcan = FooBar()
self.assertEqual(len(testcan), 0)
self.assertFalse(testcan.it_works)
self.assertRaises(KeyError, testcan.__getitem__, 'foo')
self.assertRaises(AttributeError, getattr, testcan, 'foo')
testcan['foo'] = 'bar'
self.assertEqual(len(testcan), 1)
self.assertTrue(testcan.it_works)
def test_subclass_single_init_getter(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCanBase):
__slots__ = ('foo', )
it_works = False
def get_foo(self):
value = self.__dict_get__('foo')
if value == 'bar':
self.__super_set__('it_works', True)
return value
testcan = FooBar()
self.assertFalse(testcan.it_works)
self.assertEqual(len(testcan), 0)
testcan['foo'] = 'bar'
self.assertEqual(len(testcan), 1)
# verify __super_set__() doesn't call getter
self.assertFalse(testcan.it_works)
self.assertEqual(testcan['foo'], 'bar')
self.assertEqual(testcan.foo, 'bar')
self.assertTrue(testcan.it_works)
def test_subclass_single_init_delter(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCanBase):
__slots__ = ('foo', )
it_works = False
def del_foo(self):
value = self.__dict_get__('foo')
if value == 'bar':
self.__super_set__('it_works', True)
self.__dict_del__('foo')
testcan = FooBar()
self.assertEqual(len(testcan), 0)
self.assertFalse(testcan.it_works)
self.assertFalse(hasattr(testcan, 'foo'))
self.assertFalse(testcan.has_key('foo'))
testcan['foo'] = 'bar'
self.assertEqual(len(testcan), 1)
self.assertEqual(testcan['foo'], 'bar')
self.assertEqual(testcan.foo, 'bar')
del testcan['foo']
self.assertEqual(len(testcan), 0)
self.assertTrue(testcan.it_works)
def test_subclass_no_mask_attributeerror(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCanBase):
__slots__ = ('foo', )
def del_foo(self):
raise AttributeError("Del Test")
def set_foo(self, value):
raise AttributeError("Set Test")
def get_foo(self):
raise AttributeError("Get Test")
testcan = FooBar()
self.assertRaises(AttributeError, testcan.__getitem__, 'foo')
self.assertRaises(AttributeError, testcan.__setitem__, 'foo', None)
self.assertRaises(AttributeError, testcan.__delitem__, 'foo')
self.assertRaises(AttributeError, testcan.__getattr__, 'foo')
self.assertRaises(AttributeError, testcan.__setattr__, 'foo', None)
self.assertRaises(AttributeError, testcan.__delattr__, 'foo')
def test_dict_methods_1(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCanBase):
__slots__ = ('foo', 'bar')
testcan = FooBar(foo='bar', bar='foo')
testdict = {}
for key, value in testcan.items():
testdict[key] = value
self.assertEqual(testcan, testdict)
def test_dict_methods_2(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCanBase):
__slots__ = ('foo', 'bar')
testcan = FooBar(foo='bar', bar='foo')
testdict = testcan.copy()
self.assertEqual(testcan, testdict)
testcan['foo'] = 'foo'
testcan['bar'] = 'bar'
self.assertTrue(testcan.foo != testdict.foo)
self.assertTrue(testcan.bar != testdict.bar)
testdict['foo'] = 'foo'
testdict['bar'] = 'bar'
self.assertTrue(testcan.foo == testdict.foo)
self.assertTrue(testcan.bar == testdict.bar)
def test_update(self):
class FooBar(propcan.PropCanBase):
__slots__ = ('foo', 'bar')
testdict = FooBar()
other = {'foo': 1, 'bar': 2}
testdict.update(other)
self.assertEqual(testdict, other)
other = 'string'
self.assertRaises(ValueError, testdict.update, other)
other = {'foo': 1, 'bar': 2, 'v3': 3}
self.assertRaises(KeyError, testdict.update, other)
kwargs = {'foo': "foo", 'bar': "bar"}
testdict.update(**kwargs)
self.assertEqual(testdict, kwargs)
class TestPropCan(unittest.TestCase):
def setUp(self):
logging.disable(logging.INFO)
def test_extranious_init(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCan):
__slots__ = ('foo', )
testcan = FooBar((('foo', 'bar'), ('bar', 'foo'),))
self.assertEqual(len(testcan), 1)
testcan = FooBar(bar='foo')
self.assertEqual(len(testcan), 0)
def test_init_None_value(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCan):
__slots__ = ('foo', )
testcan = FooBar(foo=None)
self.assertEqual(len(testcan), 0)
self.assertEqual(testcan['foo'], None)
self.assertEqual(testcan.foo, None)
def test_compare(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCan):
__slots__ = ('foo', 'bar')
testcan = FooBar(foo=None, bar='foo')
self.assertEqual(len(testcan), 1)
self.assertTrue(testcan == {'bar': 'foo'})
testcan.foo = 'bar'
self.assertEqual(len(testcan), 2)
self.assertTrue(testcan == {'bar': 'foo', 'foo': 'bar'})
self.assertTrue(testcan == {'foo': 'bar', 'bar': 'foo'})
testcan.bar = None
self.assertEqual(len(testcan), 1)
self.assertTrue(testcan == {'foo': 'bar'})
def test_odd_values(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCan):
__slots__ = ('foo', 'bar', 'baz')
testcan = FooBar()
self.assertEqual(len(testcan), 0)
testcan.foo = type('blah', (), {})
self.assertEqual(len(testcan), 1)
testcan['bar'] = testcan
self.assertEqual(len(testcan), 2)
setattr(testcan, 'baz', lambda self: str(self))
self.assertEqual(len(testcan), 3)
def test_printables(self):
# Pylint false negative
# pylint: disable=E1001
class FooBar(propcan.PropCan):
__slots__ = ('foo', 'bar', 'baz')
testcan = FooBar()
self.assertEqual(len(testcan), 0)
for value in ('foobar', u'foobar', 1, 1.1, 12345L, ):
setattr(testcan, 'bar', value)
self.assertEqual(len(testcan), 1)
self.assertTrue(testcan == {'bar': value})
self.assertEqual(str(testcan), str({'bar': value}))
if __name__ == '__main__':
unittest.main()
|
renatofilho/QtCreator
|
refs/heads/master
|
src/libs/3rdparty/botan/wrappers/boost-python/botan/__init__.py
|
14
|
from _botan import *
# Initialize the library when the module is imported
init = LibraryInitializer()
class SymmetricKey(OctetString):
pass
class InitializationVector(OctetString):
pass
def Filter(name, key = None, iv = None, dir = None):
if key != None and iv != None and dir != None:
return make_filter(name, key, iv, dir)
elif key != None and dir != None:
return make_filter(name, key, dir)
elif key != None:
return make_filter(name, key)
else:
return make_filter(name)
def Pipe(*filters):
pipe = PipeObj()
for filter in filters:
if filter:
pipe.append(filter)
return pipe
|
johnkeepmoving/oss-ftp
|
refs/heads/master
|
python27/win32/Lib/email/errors.py
|
468
|
# Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: email-sig@python.org
"""email package exception classes."""
class MessageError(Exception):
"""Base class for errors in the email package."""
class MessageParseError(MessageError):
"""Base class for message parsing errors."""
class HeaderParseError(MessageParseError):
"""Error while parsing headers."""
class BoundaryError(MessageParseError):
"""Couldn't find terminating boundary."""
class MultipartConversionError(MessageError, TypeError):
"""Conversion to a multipart is prohibited."""
class CharsetError(MessageError):
"""An illegal charset was given."""
# These are parsing defects which the parser was able to work around.
class MessageDefect:
"""Base class for a message defect."""
def __init__(self, line=None):
self.line = line
class NoBoundaryInMultipartDefect(MessageDefect):
"""A message claimed to be a multipart but had no boundary parameter."""
class StartBoundaryNotFoundDefect(MessageDefect):
"""The claimed start boundary was never found."""
class FirstHeaderLineIsContinuationDefect(MessageDefect):
"""A message had a continuation line as its first header line."""
class MisplacedEnvelopeHeaderDefect(MessageDefect):
"""A 'Unix-from' header was found in the middle of a header block."""
class MalformedHeaderDefect(MessageDefect):
"""Found a header that was missing a colon, or was otherwise malformed."""
class MultipartInvariantViolationDefect(MessageDefect):
"""A message claimed to be a multipart but no subparts were found."""
|
ferabra/edx-platform
|
refs/heads/master
|
lms/djangoapps/shoppingcart/migrations/0019_auto__add_donationconfiguration.py
|
120
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'DonationConfiguration'
db.create_table('shoppingcart_donationconfiguration', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('change_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('changed_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, on_delete=models.PROTECT)),
('enabled', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('shoppingcart', ['DonationConfiguration'])
def backwards(self, orm):
# Deleting model 'DonationConfiguration'
db.delete_table('shoppingcart_donationconfiguration')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shoppingcart.certificateitem': {
'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']},
'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.coupon': {
'Meta': {'object_name': 'Coupon'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 10, 3, 0, 0)'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'percentage_discount': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'shoppingcart.couponredemption': {
'Meta': {'object_name': 'CouponRedemption'},
'coupon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Coupon']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.courseregistrationcode': {
'Meta': {'object_name': 'CourseRegistrationCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 10, 3, 0, 0)'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_by_user'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Invoice']", 'null': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'purchase_order'", 'null': 'True', 'to': "orm['shoppingcart.Order']"})
},
'shoppingcart.donation': {
'Meta': {'object_name': 'Donation', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'donation_type': ('django.db.models.fields.CharField', [], {'default': "'general'", 'max_length': '32'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.donationconfiguration': {
'Meta': {'object_name': 'DonationConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'shoppingcart.invoice': {
'Meta': {'object_name': 'Invoice'},
'address_line_1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'address_line_2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'address_line_3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'company_contact_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'company_contact_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'customer_reference_number': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_reference': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'recipient_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'recipient_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'total_amount': ('django.db.models.fields.FloatField', [], {}),
'zip': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True'})
},
'shoppingcart.order': {
'Meta': {'object_name': 'Order'},
'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'refunded_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'fulfilled_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}),
'list_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '30', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'refund_requested_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'report_comments': ('django.db.models.fields.TextField', [], {'default': "''"}),
'service_fee': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32', 'db_index': 'True'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.paidcourseregistration': {
'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.paidcourseregistrationannotation': {
'Meta': {'object_name': 'PaidCourseRegistrationAnnotation'},
'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'shoppingcart.registrationcoderedemption': {
'Meta': {'object_name': 'RegistrationCodeRedemption'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']", 'null': 'True'}),
'redeemed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 10, 3, 0, 0)', 'null': 'True'}),
'redeemed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'registration_code': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.CourseRegistrationCode']"})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['shoppingcart']
|
Samuel789/MediPi
|
refs/heads/master
|
MedManagementWeb/env/lib/python3.5/site-packages/django/contrib/auth/views.py
|
10
|
import functools
import warnings
from django.conf import settings
# Avoid shadowing the login() and logout() views below.
from django.contrib.auth import (
REDIRECT_FIELD_NAME, get_user_model, login as auth_login,
logout as auth_logout, update_session_auth_hash,
)
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import (
AuthenticationForm, PasswordChangeForm, PasswordResetForm, SetPasswordForm,
)
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.http import HttpResponseRedirect, QueryDict
from django.shortcuts import resolve_url
from django.template.response import TemplateResponse
from django.urls import reverse, reverse_lazy
from django.utils.decorators import method_decorator
from django.utils.deprecation import (
RemovedInDjango20Warning, RemovedInDjango21Warning,
)
from django.utils.encoding import force_text
from django.utils.http import is_safe_url, urlsafe_base64_decode
from django.utils.six.moves.urllib.parse import urlparse, urlunparse
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from django.views.generic.base import TemplateView
from django.views.generic.edit import FormView
UserModel = get_user_model()
def deprecate_current_app(func):
"""
Handle deprecation of the current_app parameter of the views.
"""
@functools.wraps(func)
def inner(*args, **kwargs):
if 'current_app' in kwargs:
warnings.warn(
"Passing `current_app` as a keyword argument is deprecated. "
"Instead the caller of `{0}` should set "
"`request.current_app`.".format(func.__name__),
RemovedInDjango20Warning
)
current_app = kwargs.pop('current_app')
request = kwargs.get('request', None)
if request and current_app is not None:
request.current_app = current_app
return func(*args, **kwargs)
return inner
class SuccessURLAllowedHostsMixin(object):
success_url_allowed_hosts = set()
def get_success_url_allowed_hosts(self):
allowed_hosts = {self.request.get_host()}
allowed_hosts.update(self.success_url_allowed_hosts)
return allowed_hosts
class LoginView(SuccessURLAllowedHostsMixin, FormView):
"""
Displays the login form and handles the login action.
"""
form_class = AuthenticationForm
authentication_form = None
redirect_field_name = REDIRECT_FIELD_NAME
template_name = 'registration/login.html'
redirect_authenticated_user = False
extra_context = None
@method_decorator(sensitive_post_parameters())
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
if self.redirect_authenticated_user and self.request.user.is_authenticated:
redirect_to = self.get_success_url()
if redirect_to == self.request.path:
raise ValueError(
"Redirection loop for authenticated user detected. Check that "
"your LOGIN_REDIRECT_URL doesn't point to a login page."
)
return HttpResponseRedirect(redirect_to)
return super(LoginView, self).dispatch(request, *args, **kwargs)
def get_success_url(self):
url = self.get_redirect_url()
return url or resolve_url(settings.LOGIN_REDIRECT_URL)
def get_redirect_url(self):
"""Return the user-originating redirect URL if it's safe."""
redirect_to = self.request.POST.get(
self.redirect_field_name,
self.request.GET.get(self.redirect_field_name, '')
)
url_is_safe = is_safe_url(
url=redirect_to,
allowed_hosts=self.get_success_url_allowed_hosts(),
require_https=self.request.is_secure(),
)
return redirect_to if url_is_safe else ''
def get_form_class(self):
return self.authentication_form or self.form_class
def get_form_kwargs(self):
kwargs = super(LoginView, self).get_form_kwargs()
kwargs['request'] = self.request
return kwargs
def form_valid(self, form):
"""Security check complete. Log the user in."""
auth_login(self.request, form.get_user())
return HttpResponseRedirect(self.get_success_url())
def get_context_data(self, **kwargs):
context = super(LoginView, self).get_context_data(**kwargs)
current_site = get_current_site(self.request)
context.update({
self.redirect_field_name: self.get_redirect_url(),
'site': current_site,
'site_name': current_site.name,
})
if self.extra_context is not None:
context.update(self.extra_context)
return context
@deprecate_current_app
def login(request, *args, **kwargs):
warnings.warn(
'The login() view is superseded by the class-based LoginView().',
RemovedInDjango21Warning, stacklevel=2
)
return LoginView.as_view(**kwargs)(request, *args, **kwargs)
class LogoutView(SuccessURLAllowedHostsMixin, TemplateView):
"""
Logs out the user and displays 'You are logged out' message.
"""
next_page = None
redirect_field_name = REDIRECT_FIELD_NAME
template_name = 'registration/logged_out.html'
extra_context = None
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
auth_logout(request)
next_page = self.get_next_page()
if next_page:
# Redirect to this page until the session has been cleared.
return HttpResponseRedirect(next_page)
return super(LogoutView, self).dispatch(request, *args, **kwargs)
def get_next_page(self):
if self.next_page is not None:
next_page = resolve_url(self.next_page)
elif settings.LOGOUT_REDIRECT_URL:
next_page = resolve_url(settings.LOGOUT_REDIRECT_URL)
else:
next_page = self.next_page
if (self.redirect_field_name in self.request.POST or
self.redirect_field_name in self.request.GET):
next_page = self.request.POST.get(
self.redirect_field_name,
self.request.GET.get(self.redirect_field_name)
)
url_is_safe = is_safe_url(
url=next_page,
allowed_hosts=self.get_success_url_allowed_hosts(),
require_https=self.request.is_secure(),
)
# Security check -- Ensure the user-originating redirection URL is
# safe.
if not url_is_safe:
next_page = self.request.path
return next_page
def get_context_data(self, **kwargs):
context = super(LogoutView, self).get_context_data(**kwargs)
current_site = get_current_site(self.request)
context.update({
'site': current_site,
'site_name': current_site.name,
'title': _('Logged out'),
})
if self.extra_context is not None:
context.update(self.extra_context)
return context
@deprecate_current_app
def logout(request, *args, **kwargs):
warnings.warn(
'The logout() view is superseded by the class-based LogoutView().',
RemovedInDjango21Warning, stacklevel=2
)
return LogoutView.as_view(**kwargs)(request, *args, **kwargs)
_sentinel = object()
@deprecate_current_app
def logout_then_login(request, login_url=None, extra_context=_sentinel):
"""
Logs out the user if they are logged in. Then redirects to the log-in page.
"""
if extra_context is not _sentinel:
warnings.warn(
"The unused `extra_context` parameter to `logout_then_login` "
"is deprecated.", RemovedInDjango21Warning
)
if not login_url:
login_url = settings.LOGIN_URL
login_url = resolve_url(login_url)
return LogoutView.as_view(next_page=login_url)(request)
def redirect_to_login(next, login_url=None,
redirect_field_name=REDIRECT_FIELD_NAME):
"""
Redirects the user to the login page, passing the given 'next' page
"""
resolved_url = resolve_url(login_url or settings.LOGIN_URL)
login_url_parts = list(urlparse(resolved_url))
if redirect_field_name:
querystring = QueryDict(login_url_parts[4], mutable=True)
querystring[redirect_field_name] = next
login_url_parts[4] = querystring.urlencode(safe='/')
return HttpResponseRedirect(urlunparse(login_url_parts))
# 4 views for password reset:
# - password_reset sends the mail
# - password_reset_done shows a success message for the above
# - password_reset_confirm checks the link the user clicked and
# prompts for a new password
# - password_reset_complete shows a success message for the above
@deprecate_current_app
@csrf_protect
def password_reset(request,
template_name='registration/password_reset_form.html',
email_template_name='registration/password_reset_email.html',
subject_template_name='registration/password_reset_subject.txt',
password_reset_form=PasswordResetForm,
token_generator=default_token_generator,
post_reset_redirect=None,
from_email=None,
extra_context=None,
html_email_template_name=None,
extra_email_context=None):
warnings.warn("The password_reset() view is superseded by the "
"class-based PasswordResetView().",
RemovedInDjango21Warning, stacklevel=2)
if post_reset_redirect is None:
post_reset_redirect = reverse('password_reset_done')
else:
post_reset_redirect = resolve_url(post_reset_redirect)
if request.method == "POST":
form = password_reset_form(request.POST)
if form.is_valid():
opts = {
'use_https': request.is_secure(),
'token_generator': token_generator,
'from_email': from_email,
'email_template_name': email_template_name,
'subject_template_name': subject_template_name,
'request': request,
'html_email_template_name': html_email_template_name,
'extra_email_context': extra_email_context,
}
form.save(**opts)
return HttpResponseRedirect(post_reset_redirect)
else:
form = password_reset_form()
context = {
'form': form,
'title': _('Password reset'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
@deprecate_current_app
def password_reset_done(request,
template_name='registration/password_reset_done.html',
extra_context=None):
warnings.warn("The password_reset_done() view is superseded by the "
"class-based PasswordResetDoneView().",
RemovedInDjango21Warning, stacklevel=2)
context = {
'title': _('Password reset sent'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
# Doesn't need csrf_protect since no-one can guess the URL
@sensitive_post_parameters()
@never_cache
@deprecate_current_app
def password_reset_confirm(request, uidb64=None, token=None,
template_name='registration/password_reset_confirm.html',
token_generator=default_token_generator,
set_password_form=SetPasswordForm,
post_reset_redirect=None,
extra_context=None):
"""
View that checks the hash in a password reset link and presents a
form for entering a new password.
"""
warnings.warn("The password_reset_confirm() view is superseded by the "
"class-based PasswordResetConfirmView().",
RemovedInDjango21Warning, stacklevel=2)
assert uidb64 is not None and token is not None # checked by URLconf
if post_reset_redirect is None:
post_reset_redirect = reverse('password_reset_complete')
else:
post_reset_redirect = resolve_url(post_reset_redirect)
try:
# urlsafe_base64_decode() decodes to bytestring on Python 3
uid = force_text(urlsafe_base64_decode(uidb64))
user = UserModel._default_manager.get(pk=uid)
except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist):
user = None
if user is not None and token_generator.check_token(user, token):
validlink = True
title = _('Enter new password')
if request.method == 'POST':
form = set_password_form(user, request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(post_reset_redirect)
else:
form = set_password_form(user)
else:
validlink = False
form = None
title = _('Password reset unsuccessful')
context = {
'form': form,
'title': title,
'validlink': validlink,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
@deprecate_current_app
def password_reset_complete(request,
template_name='registration/password_reset_complete.html',
extra_context=None):
warnings.warn("The password_reset_complete() view is superseded by the "
"class-based PasswordResetCompleteView().",
RemovedInDjango21Warning, stacklevel=2)
context = {
'login_url': resolve_url(settings.LOGIN_URL),
'title': _('Password reset complete'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
# Class-based password reset views
# - PasswordResetView sends the mail
# - PasswordResetDoneView shows a success message for the above
# - PasswordResetConfirmView checks the link the user clicked and
# prompts for a new password
# - PasswordResetCompleteView shows a success message for the above
class PasswordContextMixin(object):
extra_context = None
def get_context_data(self, **kwargs):
context = super(PasswordContextMixin, self).get_context_data(**kwargs)
context['title'] = self.title
if self.extra_context is not None:
context.update(self.extra_context)
return context
class PasswordResetView(PasswordContextMixin, FormView):
email_template_name = 'registration/password_reset_email.html'
extra_email_context = None
form_class = PasswordResetForm
from_email = None
html_email_template_name = None
subject_template_name = 'registration/password_reset_subject.txt'
success_url = reverse_lazy('password_reset_done')
template_name = 'registration/password_reset_form.html'
title = _('Password reset')
token_generator = default_token_generator
@method_decorator(csrf_protect)
def dispatch(self, *args, **kwargs):
return super(PasswordResetView, self).dispatch(*args, **kwargs)
def form_valid(self, form):
opts = {
'use_https': self.request.is_secure(),
'token_generator': self.token_generator,
'from_email': self.from_email,
'email_template_name': self.email_template_name,
'subject_template_name': self.subject_template_name,
'request': self.request,
'html_email_template_name': self.html_email_template_name,
'extra_email_context': self.extra_email_context,
}
form.save(**opts)
return super(PasswordResetView, self).form_valid(form)
INTERNAL_RESET_URL_TOKEN = 'set-password'
INTERNAL_RESET_SESSION_TOKEN = '_password_reset_token'
class PasswordResetDoneView(PasswordContextMixin, TemplateView):
template_name = 'registration/password_reset_done.html'
title = _('Password reset sent')
class PasswordResetConfirmView(PasswordContextMixin, FormView):
form_class = SetPasswordForm
post_reset_login = False
post_reset_login_backend = None
success_url = reverse_lazy('password_reset_complete')
template_name = 'registration/password_reset_confirm.html'
title = _('Enter new password')
token_generator = default_token_generator
@method_decorator(sensitive_post_parameters())
@method_decorator(never_cache)
def dispatch(self, *args, **kwargs):
assert 'uidb64' in kwargs and 'token' in kwargs
self.validlink = False
self.user = self.get_user(kwargs['uidb64'])
if self.user is not None:
token = kwargs['token']
if token == INTERNAL_RESET_URL_TOKEN:
session_token = self.request.session.get(INTERNAL_RESET_SESSION_TOKEN)
if self.token_generator.check_token(self.user, session_token):
# If the token is valid, display the password reset form.
self.validlink = True
return super(PasswordResetConfirmView, self).dispatch(*args, **kwargs)
else:
if self.token_generator.check_token(self.user, token):
# Store the token in the session and redirect to the
# password reset form at a URL without the token. That
# avoids the possibility of leaking the token in the
# HTTP Referer header.
self.request.session[INTERNAL_RESET_SESSION_TOKEN] = token
redirect_url = self.request.path.replace(token, INTERNAL_RESET_URL_TOKEN)
return HttpResponseRedirect(redirect_url)
# Display the "Password reset unsuccessful" page.
return self.render_to_response(self.get_context_data())
def get_user(self, uidb64):
try:
# urlsafe_base64_decode() decodes to bytestring on Python 3
uid = force_text(urlsafe_base64_decode(uidb64))
user = UserModel._default_manager.get(pk=uid)
except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist):
user = None
return user
def get_form_kwargs(self):
kwargs = super(PasswordResetConfirmView, self).get_form_kwargs()
kwargs['user'] = self.user
return kwargs
def form_valid(self, form):
user = form.save()
del self.request.session[INTERNAL_RESET_SESSION_TOKEN]
if self.post_reset_login:
auth_login(self.request, user, self.post_reset_login_backend)
return super(PasswordResetConfirmView, self).form_valid(form)
def get_context_data(self, **kwargs):
context = super(PasswordResetConfirmView, self).get_context_data(**kwargs)
if self.validlink:
context['validlink'] = True
else:
context.update({
'form': None,
'title': _('Password reset unsuccessful'),
'validlink': False,
})
return context
class PasswordResetCompleteView(PasswordContextMixin, TemplateView):
template_name = 'registration/password_reset_complete.html'
title = _('Password reset complete')
def get_context_data(self, **kwargs):
context = super(PasswordResetCompleteView, self).get_context_data(**kwargs)
context['login_url'] = resolve_url(settings.LOGIN_URL)
return context
@sensitive_post_parameters()
@csrf_protect
@login_required
@deprecate_current_app
def password_change(request,
template_name='registration/password_change_form.html',
post_change_redirect=None,
password_change_form=PasswordChangeForm,
extra_context=None):
warnings.warn("The password_change() view is superseded by the "
"class-based PasswordChangeView().",
RemovedInDjango21Warning, stacklevel=2)
if post_change_redirect is None:
post_change_redirect = reverse('password_change_done')
else:
post_change_redirect = resolve_url(post_change_redirect)
if request.method == "POST":
form = password_change_form(user=request.user, data=request.POST)
if form.is_valid():
form.save()
# Updating the password logs out all other sessions for the user
# except the current one.
update_session_auth_hash(request, form.user)
return HttpResponseRedirect(post_change_redirect)
else:
form = password_change_form(user=request.user)
context = {
'form': form,
'title': _('Password change'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
@login_required
@deprecate_current_app
def password_change_done(request,
template_name='registration/password_change_done.html',
extra_context=None):
warnings.warn("The password_change_done() view is superseded by the "
"class-based PasswordChangeDoneView().",
RemovedInDjango21Warning, stacklevel=2)
context = {
'title': _('Password change successful'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
class PasswordChangeView(PasswordContextMixin, FormView):
form_class = PasswordChangeForm
success_url = reverse_lazy('password_change_done')
template_name = 'registration/password_change_form.html'
title = _('Password change')
@method_decorator(sensitive_post_parameters())
@method_decorator(csrf_protect)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PasswordChangeView, self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super(PasswordChangeView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def form_valid(self, form):
form.save()
# Updating the password logs out all other sessions for the user
# except the current one.
update_session_auth_hash(self.request, form.user)
return super(PasswordChangeView, self).form_valid(form)
class PasswordChangeDoneView(PasswordContextMixin, TemplateView):
template_name = 'registration/password_change_done.html'
title = _('Password change successful')
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PasswordChangeDoneView, self).dispatch(*args, **kwargs)
|
antiagainst/shaderc
|
refs/heads/master
|
glslc/test/option_dash_E.py
|
8
|
# Copyright 2015 The Shaderc Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import expect
from glslc_test_framework import inside_glslc_testsuite
from placeholder import FileShader, StdinShader
@inside_glslc_testsuite('OptionCapE')
class TestDashCapENoDefs(expect.StdoutMatch):
"""Tests -E without any defines."""
shader = FileShader('#version 140\nvoid main(){}', '.vert')
expected_stdout = '#version 140\nvoid main(){ }\n'
glslc_args = ['-E', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEGlslFileAccepted(expect.StdoutMatch):
"""Tests -E if we provide a .glsl file without an explicit stage."""
shader = FileShader('#version 140\nvoid main(){}', '.glsl')
expected_stdout = '#version 140\nvoid main(){ }\n'
glslc_args = ['-E', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapESingleDefine(expect.StdoutMatch):
"""Tests -E with command-line define."""
shader = FileShader('#version 140\nvoid main(){ int a = X; }', '.vert')
expected_stdout = '#version 140\nvoid main(){ int a = 4;}\n'
glslc_args = ['-DX=4', '-E', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEExpansion(expect.StdoutMatch):
"""Tests -E with macro expansion."""
shader = FileShader('''#version 140
#define X 4
void main() {
int a = X;
}
''', '.vert')
expected_stdout = '''#version 140
void main(){
int a = 4;
}
'''
glslc_args = ['-E', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEFunctionMacro(expect.StdoutMatch):
"""Tests -E with function-style macro expansion."""
shader = FileShader('''#version 140
#define X(A) 4+A
void main() {
int a = X(1);
}
''', '.vert')
expected_stdout = '''#version 140
void main(){
int a = 4 + 1;
}
'''
glslc_args = ['-E', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEPragma(expect.StdoutMatch):
"""Tests -E to make sure pragmas get retained."""
shader = FileShader('''#version 140
#pragma optimize(off)
void main() {
}
''', '.vert')
expected_stdout = '''#version 140
#pragma optimize(off)
void main(){
}
'''
glslc_args = ['-E', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEExtension(expect.StdoutMatch):
"""Tests -E to make sure extensions get retained."""
shader = FileShader('''#version 140
#extension foo: require
void main() {
}
''', '.vert')
expected_stdout = '''#version 140
#extension foo : require
void main(){
}
'''
glslc_args = ['-E', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapELine(expect.StdoutMatch):
"""Tests -E to make sure line numbers get retained."""
shader = FileShader('''#version 140
#define X 4
#line X
#line 2 3
void main() {
}
''', '.vert')
expected_stdout = '''#version 140
#line 4
#line 2 3
void main(){
}
'''
glslc_args = ['-E', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEError(expect.ErrorMessage):
"""Tests -E to make sure #errors get retained."""
shader = FileShader('''#version 140
#if 1
#error This is an error
#endif
void main() {
}
''', '.vert')
expected_error = [
shader, ':3: error: \'#error\' : This is an error\n',
'1 error generated.\n']
glslc_args = ['-E', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEStdin(expect.StdoutMatch):
"""Tests to make sure -E works with stdin."""
shader = StdinShader('''#version 140
void main() {
}
''')
expected_stdout = '''#version 140
void main(){
}
'''
glslc_args = ['-E', '-fshader-stage=vertex', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEStdinDoesNotRequireShaderStage(expect.StdoutMatch):
"""Tests to make sure -E works with stdin even when no shader-stage
is specified."""
shader = StdinShader('''#version 140
void main() {
}
''')
expected_stdout = '''#version 140
void main(){
}
'''
glslc_args = ['-E', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEMultipleFiles(expect.StdoutMatch):
"""Tests to make sure -E works with multiple files."""
shader = StdinShader('''#version 140
void main() {
}
''')
shader2 = FileShader('''#version 140
void function() {
}
''', '.vert')
expected_stdout = '''#version 140
void main(){
}
#version 140
void function(){
}
'''
glslc_args = ['-E', '-fshader-stage=vertex', shader, shader2]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEMultipleFilesWithoutStage(expect.StdoutMatch):
"""Tests to make sure -E works with multiple files even if we do not
specify a stage."""
shader = StdinShader('''#version 140
void main() {
}
''')
shader2 = FileShader('''#version 140
void function() {
}
''', '.glsl')
expected_stdout = '''#version 140
void main(){
}
#version 140
void function(){
}
'''
glslc_args = ['-E', shader, shader2]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEOutputFile(expect.SuccessfulReturn, expect.ValidFileContents):
"""Tests to make sure -E works with output files."""
shader = FileShader('''#version 140
void function() {
}
''', '.vert')
expected_file_contents = '''#version 140
void function(){
}
'''
target_filename = 'foo'
glslc_args = ['-E', shader, '-ofoo']
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEWithS(expect.StdoutMatch):
"""Tests -E in the presence of -S."""
shader = FileShader('#version 140\nvoid main(){}', '.vert')
expected_stdout = '#version 140\nvoid main(){ }\n'
glslc_args = ['-E', '-S', shader]
@inside_glslc_testsuite('OptionCapE')
class TestMultipileDashCapE(expect.StdoutMatch):
"""Tests that using -E multiple times works."""
shader = FileShader('#version 140\nvoid main(){}', '.vert')
expected_stdout = '#version 140\nvoid main(){ }\n'
glslc_args = ['-E', '-E', shader, '-E']
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEAfterFile(expect.StdoutMatch):
"""Tests that using -E after the filename also works."""
shader = FileShader('#version 140\nvoid main(){}', '.vert')
expected_stdout = '#version 140\nvoid main(){ }\n'
glslc_args = [shader, '-E']
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEWithDashC(expect.StdoutMatch):
"""Tests to make sure -E works in the presence of -c."""
shader = FileShader('''#version 140
void main() {
}
''', '.vert')
shader2 = FileShader('''#version 140
void function() {
}
''', '.vert')
expected_stdout = '''#version 140
void main(){
}
#version 140
void function(){
}
'''
glslc_args = ['-E', '-c', shader, shader2]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEWithPPErrors(expect.ErrorMessage):
"""Tests to make sure -E outputs error messages for preprocessing errors."""
shader = FileShader('''#version 310 es
#extension s enable // missing :
#defin A // Bad define
#if X // In glsl X must be defined for X to work.
// Lack of endif.
void main() {
}
''', '.vert')
expected_error = [
shader, ':2: error: \'#extension\' : \':\' missing after extension',
' name\n',
shader, ':3: error: \'#\' : invalid directive: defin\n',
shader, ':4: error: \'preprocessor evaluation\' : undefined macro in',
' expression not allowed in es profile X\n',
shader, ':8: error: \'\' : missing #endif\n',
'4 errors generated.\n']
glslc_args = ['-E', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEStdinErrors(expect.ErrorMessage):
"""Tests that -E outputs error messages correctly for stdin input."""
shader = StdinShader('''#version 310 es
#extension s enable // missing :
void main() {
}
''')
expected_error = [
'<stdin>:2: error: \'#extension\' : \':\' missing after extension',
' name\n',
'1 error generated.\n']
glslc_args = ['-E', shader]
# OpenGL compatibility fragment shader. Can be compiled to SPIR-V successfully
# when target environment is set to opengl_compat. Compilation will fail when
# target environment is set to other values. (gl_FragColor is predefined only
# in the compatibility profile.) But preprocessing should succeed with any
# target environment values.
def opengl_compat_frag_shader():
return '''#version 330
uniform highp sampler2D tex;
void main(){
gl_FragColor = texture2D(tex, vec2(0.0, 0.0));
}\n'''
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEIgnoresTargetEnvOpengl(expect.StdoutMatch):
"""Tests that --target-env=opengl is ignored when -E is set."""
shader = FileShader(opengl_compat_frag_shader(), '.frag')
expected_stdout = opengl_compat_frag_shader()
glslc_args = ['-E', '--target-env=opengl', shader]
@inside_glslc_testsuite('OptionCapE')
class TestDashCapEIgnoresTargetEnvOpenglCompat(expect.StdoutMatch):
"""Tests that --target-env=opengl_compat is ignored when -E is set."""
shader = FileShader(opengl_compat_frag_shader(), '.frag')
expected_stdout = opengl_compat_frag_shader()
glslc_args = ['-E', '--target-env=opengl_compat', shader]
|
Eloston/YAMCL
|
refs/heads/master
|
yamcl/libraries.py
|
1
|
'''
YAMCL is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
YAMCL is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with YAMCL. If not, see {http://www.gnu.org/licenses/}.
'''
import copy
import hashlib
import pathlib
from yamcl.globals import URL
from yamcl.tools import FileTools
class LibraryManager:
def __init__(self, launcher_obj):
self.Launcher = launcher_obj
self.download_exclusive = True
self.BASE_PATH = self.Launcher.ROOT_PATH.joinpath("lib")
self.index_path = str(self.BASE_PATH.joinpath("index.json"))
self.index = FileTools.read_json(self.index_path)
def _flush_index(self):
FileTools.write_json(self.index_path, self.index)
def get_all_library_ids(self):
return list(self.index.keys())
def is_download_exclusive(self):
return self.download_exclusive
def set_download_exclusive(self, value):
self.download_exclusive = value
def is_library_existant(self, library_metadata):
return library_metadata.get_id() in self.index
def is_natives_existant(self, library_metadata, natives_extension):
natives_directory = self.BASE_PATH.joinpath(*self.index[library_metadata.get_id()]["path"]).joinpath(natives_extension)
return natives_directory.exists()
def get_library_path(self, library_id):
return pathlib.Path(*self.index[library_id]["path"])
def _download_library(self, library_metadata):
if self.download_exclusive and not library_metadata.current_system_supported():
return
if library_metadata.is_natives():
if self.download_exclusive:
all_extensions = [library_metadata.get_current_system_natives_extension()]
else:
all_extensions = library_metadata.get_all_natives_extensions()
natives_list = all_extensions
if self.is_library_existant(library_metadata):
if library_metadata.is_natives():
natives_list = list()
for current_extension in all_extensions:
if not self.is_natives_existant(library_metadata, current_extension):
natives_list.append(current_extension)
if natives_list == list():
return # Natives already exists
else:
return # Library already exists
if library_metadata.is_natives():
download_list = library_metadata.get_download_list(natives_list)
else:
download_list = library_metadata.get_download_list()
for current_library in download_list:
current_tries = 1
while current_tries <= 3:
correct_hash = current_library["hash"].url_object().read().decode("UTF-8")
FileTools.write_object(str(self.BASE_PATH.joinpath(current_library["path"])), current_library["url"].url_object())
hasher = hashlib.sha1()
hasher.update(open(str(self.BASE_PATH.joinpath(current_library["path"])), mode="rb").read())
if hasher.hexdigest() == correct_hash:
if library_metadata.is_natives():
natives_directory = self.BASE_PATH.joinpath(current_library["path"].parent.joinpath(current_library["natives_extension"]))
jar_path = str(self.BASE_PATH.joinpath(current_library["path"]))
FileTools.extract_jar_files(FileTools.get_jar_object(jar_path), str(natives_directory), library_metadata.get_natives_exclude())
FileTools.delete_and_clean(jar_path)
break
else:
current_tries += 1
if current_tries == 3:
raise Exception("Failed to download library " + library_metadata.get_id()) # TODO: More appropriate exception
self.index[library_metadata.get_id()] = dict()
if library_metadata.is_natives():
self.index[library_metadata.get_id()]["path"] = download_list[0]["path"].parent.parts
else:
self.index[library_metadata.get_id()]["path"] = download_list[0]["path"].parts
def download_missing(self, library_metadata_list, progress_function=None):
if not (progress_function == None):
downloaded_count = 0
progress_function("Downloading libraries", 0)
for current_metadata in library_metadata_list:
self._download_library(current_metadata)
if not (progress_function == None):
downloaded_count += 1
progress_function("Downloading libraries", downloaded_count/len(library_metadata_list))
self._flush_index()
def get_platform_paths(self, library_metadata_list):
libraries_dict = dict()
libraries_dict["jars"] = list()
libraries_dict["natives"] = list()
for current_metadata in library_metadata_list:
if current_metadata.current_system_supported():
if not self.is_library_existant(current_metadata):
raise Exception("Library", current_metadata.get_id(), "does not exist") # TODO: More appropriate exception
base_path = self.BASE_PATH.joinpath(self.get_library_path(current_metadata.get_id()))
if current_metadata.is_natives():
current_extension = current_metadata.get_current_system_natives_extension()
if not self.is_natives_existant(current_metadata, current_extension):
raise Exception("Natives", current_extension, "for library", current_metadata.get_id(), "does not exist") # TODO: More appropriate exception
libraries_dict["natives"].append(str(base_path.joinpath(current_extension)))
else:
libraries_dict["jars"].append(str(base_path))
return libraries_dict
def add_local(self, library_id, is_natives, source_paths, destination_path):
'''
source_paths is a list
if regular library, then it should contain one string to a jar
if natives, then it should contain paths to all natives directories
Destination path is a path to a jar or directory (for regular or natives, respectively)
'''
if library_id in self.index:
raise Exception("Library already exists") # TODO: More appropriate exception
if isinstance(destination_path, list):
final_path = self.BASE_PATH.joinpath(*destination_path)
else:
final_path = pathlib.Path(destination_path) # Assuming absolute
if is_natives:
for current_source in source_paths:
try:
FileTools.copy(current_source, str(final_path.joinpath(FileTools.get_file_name(current_source))))
except FileExistsError:
pass
else:
FileTools.copy(source_paths[0], str(final_path))
self.index[library_id] = dict()
self.index[library_id]["path"] = list(final_path.relative_to(self.BASE_PATH).parts)
self._flush_index()
def delete(self, library_id):
if not library_id in self.index:
raise Exception("Library is not existant") # TODO: More appropriate exception
FileTools.delete_and_clean(str(self.BASE_PATH.joinpath(self.get_library_path(library_id))))
del self.index[library_id]
self._flush_index()
def rename(self, current_library_id, new_library_id):
if not current_library_id in self.index:
raise Exception("Cannot rename library: " + current_library_id + " does not exist")
if new_library_id in self.index:
raise Exception("Cannot rename library: " + new_library_id + " already exists")
self.index[new_library_id] = self.index[current_library_id]
del self.index[current_library_id]
self._flush_index()
def get_unused_libraries(self, binary_metadata_list):
'''
Returns a list of libraries not used in binary metadatas 'binary_metadata_list'
'''
used_library_ids = list()
for binary_metadata in binary_metadata_list:
for library_metadata in binary_metadata.get_library_metadatas():
if self.is_library_existant(library_metadata) and not (library_metadata.get_id() in used_library_ids):
used_library_ids.append(library_metadata.get_id())
junk_library_ids = list()
for installed_id in self.index:
if not installed_id in used_library_ids:
junk_library_ids.append(installed_id)
return junk_library_ids
class LibraryMetadata:
def __init__(self, launcher_obj, metadata_dict):
self.Launcher = launcher_obj
self.library_info = metadata_dict
self.ARCH_KEY = "${arch}"
def get_metadata_dict(self):
return self.library_info
def get_rules(self):
'''
Gets the platform rules (allow/disallow) for the library dictionary 'current_library_dict'
'''
tmp_rules = dict()
for current in self.Launcher.PLATFORM_LIST:
tmp_rules[current] = True
if ("rules" in self.library_info):
for current in self.Launcher.PLATFORM_LIST:
tmp_rules[current] = False
rules_list = self.library_info["rules"]
for current_rule in rules_list:
is_allow = current_rule["action"] == "allow"
if ("os" in current_rule):
tmp_rules[current_rule["os"]["name"]] = is_allow
else:
for current in self.Launcher.PLATFORM_LIST:
tmp_rules[current] = is_allow
return tmp_rules
def set_rules(self, os_rules):
self.library_info["rules"] = list()
for current_platform in os_rules:
new_platform_dict = dict()
if os_rules[current_platform] is True:
new_platform_dict["action"] = "allow"
elif os_rules[current_platform] is False:
new_platform_dict["action"] = "disallow"
else:
raise Exception("Invalid OS rule for " + str(current_platform))
new_platform_dict["os"] = dict()
new_platform_dict["os"]["name"] = current_platform
self.library_info["rules"].append(new_platform_dict)
def current_system_supported(self):
rules_dict = self.get_rules()
current_family = self.Launcher.PlatformTools.get_os_family()
return rules_dict[current_family]
def is_natives(self):
return "natives" in self.library_info
def get_natives_exclude(self):
return self.library_info["extract"]["exclude"]
def get_id(self):
return self.library_info["name"]
def _path_by_id(self, natives_extension=str()):
'''
Works only for official library IDs
Returns a list for a relative path
'''
parts = self.get_id().split(":")
new_path = parts[0].split(".")
del parts[0]
new_path += parts
if (len(natives_extension) > 0):
new_path.append("-".join(parts) + "-" + natives_extension + ".jar")
else:
new_path.append("-".join(parts) + ".jar")
return new_path
def set_natives_extensions(self, natives_extensions_dict):
self.library_info["natives"] = natives_extensions_dict
def get_raw_natives_extensions(self):
return self.library_info["natives"]
def get_current_system_natives_extension(self):
if not self.is_natives():
raise Exception("Current library is not natives") # TODO: More appropriate exception
if not self.current_system_supported():
raise Exception("Current system not supported") # TODO: More appropriate exception
current_family = self.Launcher.PlatformTools.get_os_family()
current_natives_extension = self.library_info["natives"][current_family]
if self.ARCH_KEY in current_natives_extension:
current_natives_extension = current_natives_extension.replace(self.ARCH_KEY, self.Launcher.PlatformTools.get_java_arch())
return current_natives_extension
def get_all_natives_extensions(self):
if not self.is_natives():
raise Exception("Current library is not natives") # TODO: More appropriate exception
natives_extension_list = list()
for current_platform in self.get_raw_natives_extensions().keys():
if self.get_rules()[current_platform]:
current_extension = self.get_raw_natives_extensions()[current_platform]
if self.ARCH_KEY in current_extension:
natives_extension_list.append(current_extension.replace(self.ARCH_KEY, "32"))
natives_extension_list.append(current_extension.replace(self.ARCH_KEY, "64"))
else:
natives_extension_list.append(current_extension)
return natives_extension_list
def get_download_list(self, natives_extension_list=None):
'''
Returns a list containing dictionaries in the form of:
[
{
"url": URL(), # The JAR file
"hash": URL(), # The SHA-1 sum file for the JAR
"path": Path() # The relative path
},
...
]
There can be more than one pair if library is natives
'''
download_list = list()
if self.is_natives():
if natives_extension_list == None:
raise Exception("No natives extensions specified for a natives library") # TODO: More appropriate exception
else:
natives_extension_list = [str()]
for current_extension in natives_extension_list:
relative_path = self._path_by_id(current_extension)
info_dict = dict()
info_dict["natives_extension"] = current_extension
info_dict["url"] = URL(relative_path, URL.LIBRARIES)
info_dict["path"] = pathlib.Path(*relative_path)
hash_path = copy.copy(relative_path)
hash_path[-1] += ".sha1"
info_dict["hash"] = URL(hash_path, URL.LIBRARIES)
download_list.append(info_dict)
return download_list
|
newyork167/volatility
|
refs/heads/master
|
volatility/plugins/overlays/windows/xp_sp3_x86_vtypes.py
|
58
|
ntkrnlmp_types = {
'LIST_ENTRY64' : [ 0x10, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['unsigned long long']],
} ],
'LIST_ENTRY32' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long']],
'Blink' : [ 0x4, ['unsigned long']],
} ],
'_LIST_ENTRY' : [ 0x8, {
'Flink' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
'Blink' : [ 0x4, ['pointer', ['_LIST_ENTRY']]],
} ],
'_IMAGE_NT_HEADERS' : [ 0xf8, {
'Signature' : [ 0x0, ['unsigned long']],
'FileHeader' : [ 0x4, ['_IMAGE_FILE_HEADER']],
'OptionalHeader' : [ 0x18, ['_IMAGE_OPTIONAL_HEADER']],
} ],
'__unnamed_1016' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_LARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
'u' : [ 0x0, ['__unnamed_1016']],
'QuadPart' : [ 0x0, ['long long']],
} ],
'__unnamed_101b' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
} ],
'_ULARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
'u' : [ 0x0, ['__unnamed_101b']],
'QuadPart' : [ 0x0, ['unsigned long long']],
} ],
'_LUID' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_KAPC' : [ 0x30, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'Spare0' : [ 0x4, ['unsigned long']],
'Thread' : [ 0x8, ['pointer', ['_KTHREAD']]],
'ApcListEntry' : [ 0xc, ['_LIST_ENTRY']],
'KernelRoutine' : [ 0x14, ['pointer', ['void']]],
'RundownRoutine' : [ 0x18, ['pointer', ['void']]],
'NormalRoutine' : [ 0x1c, ['pointer', ['void']]],
'NormalContext' : [ 0x20, ['pointer', ['void']]],
'SystemArgument1' : [ 0x24, ['pointer', ['void']]],
'SystemArgument2' : [ 0x28, ['pointer', ['void']]],
'ApcStateIndex' : [ 0x2c, ['unsigned char']],
'ApcMode' : [ 0x2d, ['unsigned char']],
'Inserted' : [ 0x2e, ['unsigned char']],
} ],
'_SINGLE_LIST_ENTRY' : [ 0x4, {
'Next' : [ 0x0, ['pointer', ['_SINGLE_LIST_ENTRY']]],
} ],
'_KPRCB' : [ 0xc50, {
'MinorVersion' : [ 0x0, ['unsigned short']],
'MajorVersion' : [ 0x2, ['unsigned short']],
'CurrentThread' : [ 0x4, ['pointer', ['_KTHREAD']]],
'NextThread' : [ 0x8, ['pointer', ['_KTHREAD']]],
'IdleThread' : [ 0xc, ['pointer', ['_KTHREAD']]],
'Number' : [ 0x10, ['unsigned char']],
'Reserved' : [ 0x11, ['unsigned char']],
'BuildType' : [ 0x12, ['unsigned short']],
'SetMember' : [ 0x14, ['unsigned long']],
'CpuType' : [ 0x18, ['unsigned char']],
'CpuID' : [ 0x19, ['unsigned char']],
'CpuStep' : [ 0x1a, ['unsigned short']],
'ProcessorState' : [ 0x1c, ['_KPROCESSOR_STATE']],
'KernelReserved' : [ 0x33c, ['array', 16, ['unsigned long']]],
'HalReserved' : [ 0x37c, ['array', 16, ['unsigned long']]],
'PrcbPad0' : [ 0x3bc, ['array', 92, ['unsigned char']]],
'LockQueue' : [ 0x418, ['array', 16, ['_KSPIN_LOCK_QUEUE']]],
'PrcbPad1' : [ 0x498, ['array', 8, ['unsigned char']]],
'NpxThread' : [ 0x4a0, ['pointer', ['_KTHREAD']]],
'InterruptCount' : [ 0x4a4, ['unsigned long']],
'KernelTime' : [ 0x4a8, ['unsigned long']],
'UserTime' : [ 0x4ac, ['unsigned long']],
'DpcTime' : [ 0x4b0, ['unsigned long']],
'DebugDpcTime' : [ 0x4b4, ['unsigned long']],
'InterruptTime' : [ 0x4b8, ['unsigned long']],
'AdjustDpcThreshold' : [ 0x4bc, ['unsigned long']],
'PageColor' : [ 0x4c0, ['unsigned long']],
'SkipTick' : [ 0x4c4, ['unsigned long']],
'MultiThreadSetBusy' : [ 0x4c8, ['unsigned char']],
'Spare2' : [ 0x4c9, ['array', 3, ['unsigned char']]],
'ParentNode' : [ 0x4cc, ['pointer', ['_KNODE']]],
'MultiThreadProcessorSet' : [ 0x4d0, ['unsigned long']],
'MultiThreadSetMaster' : [ 0x4d4, ['pointer', ['_KPRCB']]],
'ThreadStartCount' : [ 0x4d8, ['array', 2, ['unsigned long']]],
'CcFastReadNoWait' : [ 0x4e0, ['unsigned long']],
'CcFastReadWait' : [ 0x4e4, ['unsigned long']],
'CcFastReadNotPossible' : [ 0x4e8, ['unsigned long']],
'CcCopyReadNoWait' : [ 0x4ec, ['unsigned long']],
'CcCopyReadWait' : [ 0x4f0, ['unsigned long']],
'CcCopyReadNoWaitMiss' : [ 0x4f4, ['unsigned long']],
'KeAlignmentFixupCount' : [ 0x4f8, ['unsigned long']],
'KeContextSwitches' : [ 0x4fc, ['unsigned long']],
'KeDcacheFlushCount' : [ 0x500, ['unsigned long']],
'KeExceptionDispatchCount' : [ 0x504, ['unsigned long']],
'KeFirstLevelTbFills' : [ 0x508, ['unsigned long']],
'KeFloatingEmulationCount' : [ 0x50c, ['unsigned long']],
'KeIcacheFlushCount' : [ 0x510, ['unsigned long']],
'KeSecondLevelTbFills' : [ 0x514, ['unsigned long']],
'KeSystemCalls' : [ 0x518, ['unsigned long']],
'SpareCounter0' : [ 0x51c, ['array', 1, ['unsigned long']]],
'PPLookasideList' : [ 0x520, ['array', 16, ['_PP_LOOKASIDE_LIST']]],
'PPNPagedLookasideList' : [ 0x5a0, ['array', 32, ['_PP_LOOKASIDE_LIST']]],
'PPPagedLookasideList' : [ 0x6a0, ['array', 32, ['_PP_LOOKASIDE_LIST']]],
'PacketBarrier' : [ 0x7a0, ['unsigned long']],
'ReverseStall' : [ 0x7a4, ['unsigned long']],
'IpiFrame' : [ 0x7a8, ['pointer', ['void']]],
'PrcbPad2' : [ 0x7ac, ['array', 52, ['unsigned char']]],
'CurrentPacket' : [ 0x7e0, ['array', 3, ['pointer', ['void']]]],
'TargetSet' : [ 0x7ec, ['unsigned long']],
'WorkerRoutine' : [ 0x7f0, ['pointer', ['void']]],
'IpiFrozen' : [ 0x7f4, ['unsigned long']],
'PrcbPad3' : [ 0x7f8, ['array', 40, ['unsigned char']]],
'RequestSummary' : [ 0x820, ['unsigned long']],
'SignalDone' : [ 0x824, ['pointer', ['_KPRCB']]],
'PrcbPad4' : [ 0x828, ['array', 56, ['unsigned char']]],
'DpcListHead' : [ 0x860, ['_LIST_ENTRY']],
'DpcStack' : [ 0x868, ['pointer', ['void']]],
'DpcCount' : [ 0x86c, ['unsigned long']],
'DpcQueueDepth' : [ 0x870, ['unsigned long']],
'DpcRoutineActive' : [ 0x874, ['unsigned long']],
'DpcInterruptRequested' : [ 0x878, ['unsigned long']],
'DpcLastCount' : [ 0x87c, ['unsigned long']],
'DpcRequestRate' : [ 0x880, ['unsigned long']],
'MaximumDpcQueueDepth' : [ 0x884, ['unsigned long']],
'MinimumDpcRate' : [ 0x888, ['unsigned long']],
'QuantumEnd' : [ 0x88c, ['unsigned long']],
'PrcbPad5' : [ 0x890, ['array', 16, ['unsigned char']]],
'DpcLock' : [ 0x8a0, ['unsigned long']],
'PrcbPad6' : [ 0x8a4, ['array', 28, ['unsigned char']]],
'CallDpc' : [ 0x8c0, ['_KDPC']],
'ChainedInterruptList' : [ 0x8e0, ['pointer', ['void']]],
'LookasideIrpFloat' : [ 0x8e4, ['long']],
'SpareFields0' : [ 0x8e8, ['array', 6, ['unsigned long']]],
'VendorString' : [ 0x900, ['array', 13, ['unsigned char']]],
'InitialApicId' : [ 0x90d, ['unsigned char']],
'LogicalProcessorsPerPhysicalProcessor' : [ 0x90e, ['unsigned char']],
'MHz' : [ 0x910, ['unsigned long']],
'FeatureBits' : [ 0x914, ['unsigned long']],
'UpdateSignature' : [ 0x918, ['_LARGE_INTEGER']],
'NpxSaveArea' : [ 0x920, ['_FX_SAVE_AREA']],
'PowerState' : [ 0xb30, ['_PROCESSOR_POWER_STATE']],
} ],
'_SLIST_HEADER' : [ 0x8, {
'Alignment' : [ 0x0, ['unsigned long long']],
'Next' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x4, ['unsigned short']],
'Sequence' : [ 0x6, ['unsigned short']],
} ],
'_NPAGED_LOOKASIDE_LIST' : [ 0x100, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
'Lock__ObsoleteButDoNotDelete' : [ 0x80, ['unsigned long']],
} ],
'_PAGED_LOOKASIDE_LIST' : [ 0x100, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
'Lock__ObsoleteButDoNotDelete' : [ 0x80, ['_FAST_MUTEX']],
} ],
'_GENERAL_LOOKASIDE' : [ 0x80, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x8, ['unsigned short']],
'MaximumDepth' : [ 0xa, ['unsigned short']],
'TotalAllocates' : [ 0xc, ['unsigned long']],
'AllocateMisses' : [ 0x10, ['unsigned long']],
'AllocateHits' : [ 0x10, ['unsigned long']],
'TotalFrees' : [ 0x14, ['unsigned long']],
'FreeMisses' : [ 0x18, ['unsigned long']],
'FreeHits' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x20, ['unsigned long']],
'Size' : [ 0x24, ['unsigned long']],
'Allocate' : [ 0x28, ['pointer', ['void']]],
'Free' : [ 0x2c, ['pointer', ['void']]],
'ListEntry' : [ 0x30, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x38, ['unsigned long']],
'LastAllocateMisses' : [ 0x3c, ['unsigned long']],
'LastAllocateHits' : [ 0x3c, ['unsigned long']],
'Future' : [ 0x40, ['array', 2, ['unsigned long']]],
} ],
'_EX_RUNDOWN_REF' : [ 0x4, {
'Count' : [ 0x0, ['unsigned long']],
'Ptr' : [ 0x0, ['pointer', ['void']]],
} ],
'_EX_FAST_REF' : [ 0x4, {
'Object' : [ 0x0, ['pointer', ['void']]],
'RefCnt' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'Value' : [ 0x0, ['unsigned long']],
} ],
'_EX_PUSH_LOCK' : [ 0x4, {
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Exclusive' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
'Value' : [ 0x0, ['unsigned long']],
'Ptr' : [ 0x0, ['pointer', ['void']]],
} ],
'_EX_PUSH_LOCK_WAIT_BLOCK' : [ 0x1c, {
'WakeEvent' : [ 0x0, ['_KEVENT']],
'Next' : [ 0x10, ['pointer', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'ShareCount' : [ 0x14, ['unsigned long']],
'Exclusive' : [ 0x18, ['unsigned char']],
} ],
'_EX_PUSH_LOCK_CACHE_AWARE' : [ 0x80, {
'Locks' : [ 0x0, ['array', 32, ['pointer', ['_EX_PUSH_LOCK']]]],
} ],
'_ETHREAD' : [ 0x258, {
'Tcb' : [ 0x0, ['_KTHREAD']],
'CreateTime' : [ 0x1c0, ['_LARGE_INTEGER']],
'NestedFaultCount' : [ 0x1c0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'ApcNeeded' : [ 0x1c0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ExitTime' : [ 0x1c8, ['_LARGE_INTEGER']],
'LpcReplyChain' : [ 0x1c8, ['_LIST_ENTRY']],
'KeyedWaitChain' : [ 0x1c8, ['_LIST_ENTRY']],
'ExitStatus' : [ 0x1d0, ['long']],
'OfsChain' : [ 0x1d0, ['pointer', ['void']]],
'PostBlockList' : [ 0x1d4, ['_LIST_ENTRY']],
'TerminationPort' : [ 0x1dc, ['pointer', ['_TERMINATION_PORT']]],
'ReaperLink' : [ 0x1dc, ['pointer', ['_ETHREAD']]],
'KeyedWaitValue' : [ 0x1dc, ['pointer', ['void']]],
'ActiveTimerListLock' : [ 0x1e0, ['unsigned long']],
'ActiveTimerListHead' : [ 0x1e4, ['_LIST_ENTRY']],
'Cid' : [ 0x1ec, ['_CLIENT_ID']],
'LpcReplySemaphore' : [ 0x1f4, ['_KSEMAPHORE']],
'KeyedWaitSemaphore' : [ 0x1f4, ['_KSEMAPHORE']],
'LpcReplyMessage' : [ 0x208, ['pointer', ['void']]],
'LpcWaitingOnPort' : [ 0x208, ['pointer', ['void']]],
'ImpersonationInfo' : [ 0x20c, ['pointer', ['_PS_IMPERSONATION_INFORMATION']]],
'IrpList' : [ 0x210, ['_LIST_ENTRY']],
'TopLevelIrp' : [ 0x218, ['unsigned long']],
'DeviceToVerify' : [ 0x21c, ['pointer', ['_DEVICE_OBJECT']]],
'ThreadsProcess' : [ 0x220, ['pointer', ['_EPROCESS']]],
'StartAddress' : [ 0x224, ['pointer', ['void']]],
'Win32StartAddress' : [ 0x228, ['pointer', ['void']]],
'LpcReceivedMessageId' : [ 0x228, ['unsigned long']],
'ThreadListEntry' : [ 0x22c, ['_LIST_ENTRY']],
'RundownProtect' : [ 0x234, ['_EX_RUNDOWN_REF']],
'ThreadLock' : [ 0x238, ['_EX_PUSH_LOCK']],
'LpcReplyMessageId' : [ 0x23c, ['unsigned long']],
'ReadClusterSize' : [ 0x240, ['unsigned long']],
'GrantedAccess' : [ 0x244, ['unsigned long']],
'CrossThreadFlags' : [ 0x248, ['unsigned long']],
'Terminated' : [ 0x248, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeadThread' : [ 0x248, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HideFromDebugger' : [ 0x248, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ActiveImpersonationInfo' : [ 0x248, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SystemThread' : [ 0x248, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'HardErrorsAreDisabled' : [ 0x248, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x248, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SkipCreationMsg' : [ 0x248, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SkipTerminationMsg' : [ 0x248, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SameThreadPassiveFlags' : [ 0x24c, ['unsigned long']],
'ActiveExWorker' : [ 0x24c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ExWorkerCanWaitUser' : [ 0x24c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'MemoryMaker' : [ 0x24c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SameThreadApcFlags' : [ 0x250, ['unsigned long']],
'LpcReceivedMsgIdValid' : [ 0x250, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'LpcExitThreadCalled' : [ 0x250, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'AddressSpaceOwner' : [ 0x250, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'ForwardClusterOnly' : [ 0x254, ['unsigned char']],
'DisablePageFaultClustering' : [ 0x255, ['unsigned char']],
} ],
'_EPROCESS' : [ 0x260, {
'Pcb' : [ 0x0, ['_KPROCESS']],
'ProcessLock' : [ 0x6c, ['_EX_PUSH_LOCK']],
'CreateTime' : [ 0x70, ['_LARGE_INTEGER']],
'ExitTime' : [ 0x78, ['_LARGE_INTEGER']],
'RundownProtect' : [ 0x80, ['_EX_RUNDOWN_REF']],
'UniqueProcessId' : [ 0x84, ['pointer', ['void']]],
'ActiveProcessLinks' : [ 0x88, ['_LIST_ENTRY']],
'QuotaUsage' : [ 0x90, ['array', 3, ['unsigned long']]],
'QuotaPeak' : [ 0x9c, ['array', 3, ['unsigned long']]],
'CommitCharge' : [ 0xa8, ['unsigned long']],
'PeakVirtualSize' : [ 0xac, ['unsigned long']],
'VirtualSize' : [ 0xb0, ['unsigned long']],
'SessionProcessLinks' : [ 0xb4, ['_LIST_ENTRY']],
'DebugPort' : [ 0xbc, ['pointer', ['void']]],
'ExceptionPort' : [ 0xc0, ['pointer', ['void']]],
'ObjectTable' : [ 0xc4, ['pointer', ['_HANDLE_TABLE']]],
'Token' : [ 0xc8, ['_EX_FAST_REF']],
'WorkingSetLock' : [ 0xcc, ['_FAST_MUTEX']],
'WorkingSetPage' : [ 0xec, ['unsigned long']],
'AddressCreationLock' : [ 0xf0, ['_FAST_MUTEX']],
'HyperSpaceLock' : [ 0x110, ['unsigned long']],
'ForkInProgress' : [ 0x114, ['pointer', ['_ETHREAD']]],
'HardwareTrigger' : [ 0x118, ['unsigned long']],
'VadRoot' : [ 0x11c, ['pointer', ['void']]],
'VadHint' : [ 0x120, ['pointer', ['void']]],
'CloneRoot' : [ 0x124, ['pointer', ['void']]],
'NumberOfPrivatePages' : [ 0x128, ['unsigned long']],
'NumberOfLockedPages' : [ 0x12c, ['unsigned long']],
'Win32Process' : [ 0x130, ['pointer', ['void']]],
'Job' : [ 0x134, ['pointer', ['_EJOB']]],
'SectionObject' : [ 0x138, ['pointer', ['void']]],
'SectionBaseAddress' : [ 0x13c, ['pointer', ['void']]],
'QuotaBlock' : [ 0x140, ['pointer', ['_EPROCESS_QUOTA_BLOCK']]],
'WorkingSetWatch' : [ 0x144, ['pointer', ['_PAGEFAULT_HISTORY']]],
'Win32WindowStation' : [ 0x148, ['pointer', ['void']]],
'InheritedFromUniqueProcessId' : [ 0x14c, ['pointer', ['void']]],
'LdtInformation' : [ 0x150, ['pointer', ['void']]],
'VadFreeHint' : [ 0x154, ['pointer', ['void']]],
'VdmObjects' : [ 0x158, ['pointer', ['void']]],
'DeviceMap' : [ 0x15c, ['pointer', ['void']]],
'PhysicalVadList' : [ 0x160, ['_LIST_ENTRY']],
'PageDirectoryPte' : [ 0x168, ['_HARDWARE_PTE']],
'Filler' : [ 0x168, ['unsigned long long']],
'Session' : [ 0x170, ['pointer', ['void']]],
'ImageFileName' : [ 0x174, ['array', 16, ['unsigned char']]],
'JobLinks' : [ 0x184, ['_LIST_ENTRY']],
'LockedPagesList' : [ 0x18c, ['pointer', ['void']]],
'ThreadListHead' : [ 0x190, ['_LIST_ENTRY']],
'SecurityPort' : [ 0x198, ['pointer', ['void']]],
'PaeTop' : [ 0x19c, ['pointer', ['void']]],
'ActiveThreads' : [ 0x1a0, ['unsigned long']],
'GrantedAccess' : [ 0x1a4, ['unsigned long']],
'DefaultHardErrorProcessing' : [ 0x1a8, ['unsigned long']],
'LastThreadExitStatus' : [ 0x1ac, ['long']],
'Peb' : [ 0x1b0, ['pointer', ['_PEB']]],
'PrefetchTrace' : [ 0x1b4, ['_EX_FAST_REF']],
'ReadOperationCount' : [ 0x1b8, ['_LARGE_INTEGER']],
'WriteOperationCount' : [ 0x1c0, ['_LARGE_INTEGER']],
'OtherOperationCount' : [ 0x1c8, ['_LARGE_INTEGER']],
'ReadTransferCount' : [ 0x1d0, ['_LARGE_INTEGER']],
'WriteTransferCount' : [ 0x1d8, ['_LARGE_INTEGER']],
'OtherTransferCount' : [ 0x1e0, ['_LARGE_INTEGER']],
'CommitChargeLimit' : [ 0x1e8, ['unsigned long']],
'CommitChargePeak' : [ 0x1ec, ['unsigned long']],
'AweInfo' : [ 0x1f0, ['pointer', ['void']]],
'SeAuditProcessCreationInfo' : [ 0x1f4, ['_SE_AUDIT_PROCESS_CREATION_INFO']],
'Vm' : [ 0x1f8, ['_MMSUPPORT']],
'LastFaultCount' : [ 0x238, ['unsigned long']],
'ModifiedPageCount' : [ 0x23c, ['unsigned long']],
'NumberOfVads' : [ 0x240, ['unsigned long']],
'JobStatus' : [ 0x244, ['unsigned long']],
'Flags' : [ 0x248, ['unsigned long']],
'CreateReported' : [ 0x248, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoDebugInherit' : [ 0x248, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessExiting' : [ 0x248, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessDelete' : [ 0x248, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow64SplitPages' : [ 0x248, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'VmDeleted' : [ 0x248, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'OutswapEnabled' : [ 0x248, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Outswapped' : [ 0x248, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ForkFailed' : [ 0x248, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'HasPhysicalVad' : [ 0x248, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'AddressSpaceInitialized' : [ 0x248, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
'SetTimerResolution' : [ 0x248, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x248, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'SessionCreationUnderway' : [ 0x248, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WriteWatch' : [ 0x248, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessInSession' : [ 0x248, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'OverrideAddressSpace' : [ 0x248, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HasAddressSpace' : [ 0x248, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'LaunchPrefetched' : [ 0x248, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'InjectInpageErrors' : [ 0x248, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'VmTopDown' : [ 0x248, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Unused3' : [ 0x248, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'Unused4' : [ 0x248, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'VdmAllowed' : [ 0x248, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'Unused' : [ 0x248, ['BitField', dict(start_bit = 25, end_bit = 30, native_type='unsigned long')]],
'Unused1' : [ 0x248, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'Unused2' : [ 0x248, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'ExitStatus' : [ 0x24c, ['long']],
'NextPageColor' : [ 0x250, ['unsigned short']],
'SubSystemMinorVersion' : [ 0x252, ['unsigned char']],
'SubSystemMajorVersion' : [ 0x253, ['unsigned char']],
'SubSystemVersion' : [ 0x252, ['unsigned short']],
'PriorityClass' : [ 0x254, ['unsigned char']],
'WorkingSetAcquiredUnsafe' : [ 0x255, ['unsigned char']],
'Cookie' : [ 0x258, ['unsigned long']],
} ],
'_OBJECT_ATTRIBUTES' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x4, ['pointer', ['void']]],
'ObjectName' : [ 0x8, ['pointer', ['_UNICODE_STRING']]],
'Attributes' : [ 0xc, ['unsigned long']],
'SecurityDescriptor' : [ 0x10, ['pointer', ['void']]],
'SecurityQualityOfService' : [ 0x14, ['pointer', ['void']]],
} ],
'_OBJECT_TYPE' : [ 0x190, {
'Mutex' : [ 0x0, ['_ERESOURCE']],
'TypeList' : [ 0x38, ['_LIST_ENTRY']],
'Name' : [ 0x40, ['_UNICODE_STRING']],
'DefaultObject' : [ 0x48, ['pointer', ['void']]],
'Index' : [ 0x4c, ['unsigned long']],
'TotalNumberOfObjects' : [ 0x50, ['unsigned long']],
'TotalNumberOfHandles' : [ 0x54, ['unsigned long']],
'HighWaterNumberOfObjects' : [ 0x58, ['unsigned long']],
'HighWaterNumberOfHandles' : [ 0x5c, ['unsigned long']],
'TypeInfo' : [ 0x60, ['_OBJECT_TYPE_INITIALIZER']],
'Key' : [ 0xac, ['unsigned long']],
'ObjectLocks' : [ 0xb0, ['array', 4, ['_ERESOURCE']]],
} ],
'_OBJECT_HANDLE_INFORMATION' : [ 0x8, {
'HandleAttributes' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
} ],
'_KTHREAD' : [ 0x1c0, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListHead' : [ 0x10, ['_LIST_ENTRY']],
'InitialStack' : [ 0x18, ['pointer', ['void']]],
'StackLimit' : [ 0x1c, ['pointer', ['void']]],
'Teb' : [ 0x20, ['pointer', ['void']]],
'TlsArray' : [ 0x24, ['pointer', ['void']]],
'KernelStack' : [ 0x28, ['pointer', ['void']]],
'DebugActive' : [ 0x2c, ['unsigned char']],
'State' : [ 0x2d, ['unsigned char']],
'Alerted' : [ 0x2e, ['array', 2, ['unsigned char']]],
'Iopl' : [ 0x30, ['unsigned char']],
'NpxState' : [ 0x31, ['unsigned char']],
'Saturation' : [ 0x32, ['unsigned char']],
'Priority' : [ 0x33, ['unsigned char']],
'ApcState' : [ 0x34, ['_KAPC_STATE']],
'ContextSwitches' : [ 0x4c, ['unsigned long']],
'IdleSwapBlock' : [ 0x50, ['unsigned char']],
'Spare0' : [ 0x51, ['array', 3, ['unsigned char']]],
'WaitStatus' : [ 0x54, ['long']],
'WaitIrql' : [ 0x58, ['unsigned char']],
'WaitMode' : [ 0x59, ['unsigned char']],
'WaitNext' : [ 0x5a, ['unsigned char']],
'WaitReason' : [ 0x5b, ['unsigned char']],
'WaitBlockList' : [ 0x5c, ['pointer', ['_KWAIT_BLOCK']]],
'WaitListEntry' : [ 0x60, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x60, ['_SINGLE_LIST_ENTRY']],
'WaitTime' : [ 0x68, ['unsigned long']],
'BasePriority' : [ 0x6c, ['unsigned char']],
'DecrementCount' : [ 0x6d, ['unsigned char']],
'PriorityDecrement' : [ 0x6e, ['unsigned char']],
'Quantum' : [ 0x6f, ['unsigned char']],
'WaitBlock' : [ 0x70, ['array', 4, ['_KWAIT_BLOCK']]],
'LegoData' : [ 0xd0, ['pointer', ['void']]],
'KernelApcDisable' : [ 0xd4, ['unsigned long']],
'UserAffinity' : [ 0xd8, ['unsigned long']],
'SystemAffinityActive' : [ 0xdc, ['unsigned char']],
'PowerState' : [ 0xdd, ['unsigned char']],
'NpxIrql' : [ 0xde, ['unsigned char']],
'InitialNode' : [ 0xdf, ['unsigned char']],
'ServiceTable' : [ 0xe0, ['pointer', ['void']]],
'Queue' : [ 0xe4, ['pointer', ['_KQUEUE']]],
'ApcQueueLock' : [ 0xe8, ['unsigned long']],
'Timer' : [ 0xf0, ['_KTIMER']],
'QueueListEntry' : [ 0x118, ['_LIST_ENTRY']],
'SoftAffinity' : [ 0x120, ['unsigned long']],
'Affinity' : [ 0x124, ['unsigned long']],
'Preempted' : [ 0x128, ['unsigned char']],
'ProcessReadyQueue' : [ 0x129, ['unsigned char']],
'KernelStackResident' : [ 0x12a, ['unsigned char']],
'NextProcessor' : [ 0x12b, ['unsigned char']],
'CallbackStack' : [ 0x12c, ['pointer', ['void']]],
'Win32Thread' : [ 0x130, ['pointer', ['void']]],
'TrapFrame' : [ 0x134, ['pointer', ['_KTRAP_FRAME']]],
'ApcStatePointer' : [ 0x138, ['array', 2, ['pointer', ['_KAPC_STATE']]]],
'PreviousMode' : [ 0x140, ['unsigned char']],
'EnableStackSwap' : [ 0x141, ['unsigned char']],
'LargeStack' : [ 0x142, ['unsigned char']],
'ResourceIndex' : [ 0x143, ['unsigned char']],
'KernelTime' : [ 0x144, ['unsigned long']],
'UserTime' : [ 0x148, ['unsigned long']],
'SavedApcState' : [ 0x14c, ['_KAPC_STATE']],
'Alertable' : [ 0x164, ['unsigned char']],
'ApcStateIndex' : [ 0x165, ['unsigned char']],
'ApcQueueable' : [ 0x166, ['unsigned char']],
'AutoAlignment' : [ 0x167, ['unsigned char']],
'StackBase' : [ 0x168, ['pointer', ['void']]],
'SuspendApc' : [ 0x16c, ['_KAPC']],
'SuspendSemaphore' : [ 0x19c, ['_KSEMAPHORE']],
'ThreadListEntry' : [ 0x1b0, ['_LIST_ENTRY']],
'FreezeCount' : [ 0x1b8, ['unsigned char']],
'SuspendCount' : [ 0x1b9, ['unsigned char']],
'IdealProcessor' : [ 0x1ba, ['unsigned char']],
'DisableBoost' : [ 0x1bb, ['unsigned char']],
} ],
'__unnamed_10f2' : [ 0x208, {
'FnArea' : [ 0x0, ['_FNSAVE_FORMAT']],
'FxArea' : [ 0x0, ['_FXSAVE_FORMAT']],
} ],
'_FX_SAVE_AREA' : [ 0x210, {
'U' : [ 0x0, ['__unnamed_10f2']],
'NpxSavedCpu' : [ 0x208, ['unsigned long']],
'Cr0NpxState' : [ 0x20c, ['unsigned long']],
} ],
'__unnamed_10fe' : [ 0x4, {
'Long' : [ 0x0, ['unsigned long']],
'Hard' : [ 0x0, ['_MMPTE_HARDWARE']],
'Flush' : [ 0x0, ['_HARDWARE_PTE']],
'Proto' : [ 0x0, ['_MMPTE_PROTOTYPE']],
'Soft' : [ 0x0, ['_MMPTE_SOFTWARE']],
'Trans' : [ 0x0, ['_MMPTE_TRANSITION']],
'Subsect' : [ 0x0, ['_MMPTE_SUBSECTION']],
'List' : [ 0x0, ['_MMPTE_LIST']],
} ],
'_MMPTE' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_10fe']],
} ],
'_EXCEPTION_RECORD64' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long long']],
'ExceptionAddress' : [ 0x10, ['unsigned long long']],
'NumberParameters' : [ 0x18, ['unsigned long']],
'__unusedAlignment' : [ 0x1c, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_EXCEPTION_RECORD32' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long']],
'ExceptionAddress' : [ 0xc, ['unsigned long']],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'_DBGKM_EXCEPTION64' : [ 0xa0, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD64']],
'FirstChance' : [ 0x98, ['unsigned long']],
} ],
'_DBGKM_EXCEPTION32' : [ 0x54, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD32']],
'FirstChance' : [ 0x50, ['unsigned long']],
} ],
'_DBGKD_LOAD_SYMBOLS64' : [ 0x28, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x8, ['unsigned long long']],
'ProcessId' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
'SizeOfImage' : [ 0x1c, ['unsigned long']],
'UnloadSymbols' : [ 0x20, ['unsigned char']],
} ],
'_DBGKD_LOAD_SYMBOLS32' : [ 0x18, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x4, ['unsigned long']],
'ProcessId' : [ 0x8, ['unsigned long']],
'CheckSum' : [ 0xc, ['unsigned long']],
'SizeOfImage' : [ 0x10, ['unsigned long']],
'UnloadSymbols' : [ 0x14, ['unsigned char']],
} ],
'_DBGKD_READ_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesRead' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesRead' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesWritten' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesWritten' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT64' : [ 0x10, {
'BreakPointAddress' : [ 0x0, ['unsigned long long']],
'BreakPointHandle' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT32' : [ 0x8, {
'BreakPointAddress' : [ 0x0, ['unsigned long']],
'BreakPointHandle' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO64' : [ 0x10, {
'IoAddress' : [ 0x0, ['unsigned long long']],
'DataSize' : [ 0x8, ['unsigned long']],
'DataValue' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO32' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'IoAddress' : [ 0x4, ['unsigned long']],
'DataValue' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED64' : [ 0x20, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long long']],
'DataValue' : [ 0x18, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED32' : [ 0x18, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long']],
'DataValue' : [ 0x14, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL32' : [ 0x4, {
'SpecialCall' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL64' : [ 0x8, {
'SpecialCall' : [ 0x0, ['unsigned long long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT32' : [ 0x8, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT64' : [ 0x10, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT64' : [ 0x20, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
'Calls' : [ 0xc, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0x10, ['unsigned long']],
'MinInstructions' : [ 0x14, ['unsigned long']],
'MaxInstructions' : [ 0x18, ['unsigned long']],
'TotalInstructions' : [ 0x1c, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT32' : [ 0x1c, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'Calls' : [ 0x8, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0xc, ['unsigned long']],
'MinInstructions' : [ 0x10, ['unsigned long']],
'MaxInstructions' : [ 0x14, ['unsigned long']],
'TotalInstructions' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_116f' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT64']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO64']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED64']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL64']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT64']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT64']],
'GetVersion64' : [ 0x0, ['_DBGKD_GET_VERSION64']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
'GetSetBusData' : [ 0x0, ['_DBGKD_GET_SET_BUS_DATA']],
'FillMemory' : [ 0x0, ['_DBGKD_FILL_MEMORY']],
'QueryMemory' : [ 0x0, ['_DBGKD_QUERY_MEMORY']],
} ],
'_DBGKD_MANIPULATE_STATE64' : [ 0x38, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0x10, ['__unnamed_116f']],
} ],
'__unnamed_1176' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY32']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY32']],
'ReadMemory64' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory64' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT32']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO32']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED32']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL32']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT32']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT32']],
'GetVersion32' : [ 0x0, ['_DBGKD_GET_VERSION32']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
} ],
'_DBGKD_MANIPULATE_STATE32' : [ 0x34, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0xc, ['__unnamed_1176']],
} ],
'__unnamed_117f' : [ 0x8, {
'FileOffset' : [ 0x0, ['_LARGE_INTEGER']],
'ActiveCount' : [ 0x0, ['unsigned short']],
} ],
'_VACB' : [ 0x18, {
'BaseAddress' : [ 0x0, ['pointer', ['void']]],
'SharedCacheMap' : [ 0x4, ['pointer', ['_SHARED_CACHE_MAP']]],
'Overlay' : [ 0x8, ['__unnamed_117f']],
'LruList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_SHARED_CACHE_MAP' : [ 0x130, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'OpenCount' : [ 0x4, ['unsigned long']],
'FileSize' : [ 0x8, ['_LARGE_INTEGER']],
'BcbList' : [ 0x10, ['_LIST_ENTRY']],
'SectionSize' : [ 0x18, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataGoal' : [ 0x28, ['_LARGE_INTEGER']],
'InitialVacbs' : [ 0x30, ['array', 4, ['pointer', ['_VACB']]]],
'Vacbs' : [ 0x40, ['pointer', ['pointer', ['_VACB']]]],
'FileObject' : [ 0x44, ['pointer', ['_FILE_OBJECT']]],
'ActiveVacb' : [ 0x48, ['pointer', ['_VACB']]],
'NeedToZero' : [ 0x4c, ['pointer', ['void']]],
'ActivePage' : [ 0x50, ['unsigned long']],
'NeedToZeroPage' : [ 0x54, ['unsigned long']],
'ActiveVacbSpinLock' : [ 0x58, ['unsigned long']],
'VacbActiveCount' : [ 0x5c, ['unsigned long']],
'DirtyPages' : [ 0x60, ['unsigned long']],
'SharedCacheMapLinks' : [ 0x64, ['_LIST_ENTRY']],
'Flags' : [ 0x6c, ['unsigned long']],
'Status' : [ 0x70, ['long']],
'Mbcb' : [ 0x74, ['pointer', ['_MBCB']]],
'Section' : [ 0x78, ['pointer', ['void']]],
'CreateEvent' : [ 0x7c, ['pointer', ['_KEVENT']]],
'WaitOnActiveCount' : [ 0x80, ['pointer', ['_KEVENT']]],
'PagesToWrite' : [ 0x84, ['unsigned long']],
'BeyondLastFlush' : [ 0x88, ['long long']],
'Callbacks' : [ 0x90, ['pointer', ['_CACHE_MANAGER_CALLBACKS']]],
'LazyWriteContext' : [ 0x94, ['pointer', ['void']]],
'PrivateList' : [ 0x98, ['_LIST_ENTRY']],
'LogHandle' : [ 0xa0, ['pointer', ['void']]],
'FlushToLsnRoutine' : [ 0xa4, ['pointer', ['void']]],
'DirtyPageThreshold' : [ 0xa8, ['unsigned long']],
'LazyWritePassCount' : [ 0xac, ['unsigned long']],
'UninitializeEvent' : [ 0xb0, ['pointer', ['_CACHE_UNINITIALIZE_EVENT']]],
'NeedToZeroVacb' : [ 0xb4, ['pointer', ['_VACB']]],
'BcbSpinLock' : [ 0xb8, ['unsigned long']],
'Reserved' : [ 0xbc, ['pointer', ['void']]],
'Event' : [ 0xc0, ['_KEVENT']],
'VacbPushLock' : [ 0xd0, ['_EX_PUSH_LOCK']],
'PrivateCacheMap' : [ 0xd8, ['_PRIVATE_CACHE_MAP']],
} ],
'_VACB_LEVEL_REFERENCE' : [ 0x8, {
'Reference' : [ 0x0, ['long']],
'SpecialReference' : [ 0x4, ['long']],
} ],
'_HEAP_ENTRY' : [ 0x8, {
'Size' : [ 0x0, ['unsigned short']],
'PreviousSize' : [ 0x2, ['unsigned short']],
'SubSegmentCode' : [ 0x0, ['pointer', ['void']]],
'SmallTagIndex' : [ 0x4, ['unsigned char']],
'Flags' : [ 0x5, ['unsigned char']],
'UnusedBytes' : [ 0x6, ['unsigned char']],
'SegmentIndex' : [ 0x7, ['unsigned char']],
} ],
'__unnamed_11a9' : [ 0x10, {
'FreeListsInUseUlong' : [ 0x0, ['array', 4, ['unsigned long']]],
'FreeListsInUseBytes' : [ 0x0, ['array', 16, ['unsigned char']]],
} ],
'__unnamed_11ab' : [ 0x2, {
'FreeListsInUseTerminate' : [ 0x0, ['unsigned short']],
'DecommitCount' : [ 0x0, ['unsigned short']],
} ],
'_HEAP' : [ 0x588, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'Signature' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned long']],
'ForceFlags' : [ 0x10, ['unsigned long']],
'VirtualMemoryThreshold' : [ 0x14, ['unsigned long']],
'SegmentReserve' : [ 0x18, ['unsigned long']],
'SegmentCommit' : [ 0x1c, ['unsigned long']],
'DeCommitFreeBlockThreshold' : [ 0x20, ['unsigned long']],
'DeCommitTotalFreeThreshold' : [ 0x24, ['unsigned long']],
'TotalFreeSize' : [ 0x28, ['unsigned long']],
'MaximumAllocationSize' : [ 0x2c, ['unsigned long']],
'ProcessHeapsListIndex' : [ 0x30, ['unsigned short']],
'HeaderValidateLength' : [ 0x32, ['unsigned short']],
'HeaderValidateCopy' : [ 0x34, ['pointer', ['void']]],
'NextAvailableTagIndex' : [ 0x38, ['unsigned short']],
'MaximumTagIndex' : [ 0x3a, ['unsigned short']],
'TagEntries' : [ 0x3c, ['pointer', ['_HEAP_TAG_ENTRY']]],
'UCRSegments' : [ 0x40, ['pointer', ['_HEAP_UCR_SEGMENT']]],
'UnusedUnCommittedRanges' : [ 0x44, ['pointer', ['_HEAP_UNCOMMMTTED_RANGE']]],
'AlignRound' : [ 0x48, ['unsigned long']],
'AlignMask' : [ 0x4c, ['unsigned long']],
'VirtualAllocdBlocks' : [ 0x50, ['_LIST_ENTRY']],
'Segments' : [ 0x58, ['array', 64, ['pointer', ['_HEAP_SEGMENT']]]],
'u' : [ 0x158, ['__unnamed_11a9']],
'u2' : [ 0x168, ['__unnamed_11ab']],
'AllocatorBackTraceIndex' : [ 0x16a, ['unsigned short']],
'NonDedicatedListLength' : [ 0x16c, ['unsigned long']],
'LargeBlocksIndex' : [ 0x170, ['pointer', ['void']]],
'PseudoTagEntries' : [ 0x174, ['pointer', ['_HEAP_PSEUDO_TAG_ENTRY']]],
'FreeLists' : [ 0x178, ['array', 128, ['_LIST_ENTRY']]],
'LockVariable' : [ 0x578, ['pointer', ['_HEAP_LOCK']]],
'CommitRoutine' : [ 0x57c, ['pointer', ['void']]],
'FrontEndHeap' : [ 0x580, ['pointer', ['void']]],
'FrontHeapLockCount' : [ 0x584, ['unsigned short']],
'FrontEndHeapType' : [ 0x586, ['unsigned char']],
'LastSegmentIndex' : [ 0x587, ['unsigned char']],
} ],
'_HEAP_SEGMENT' : [ 0x3c, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'Signature' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned long']],
'Heap' : [ 0x10, ['pointer', ['_HEAP']]],
'LargestUnCommittedRange' : [ 0x14, ['unsigned long']],
'BaseAddress' : [ 0x18, ['pointer', ['void']]],
'NumberOfPages' : [ 0x1c, ['unsigned long']],
'FirstEntry' : [ 0x20, ['pointer', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x24, ['pointer', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x28, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x2c, ['unsigned long']],
'UnCommittedRanges' : [ 0x30, ['pointer', ['_HEAP_UNCOMMMTTED_RANGE']]],
'AllocatorBackTraceIndex' : [ 0x34, ['unsigned short']],
'Reserved' : [ 0x36, ['unsigned short']],
'LastEntryInSegment' : [ 0x38, ['pointer', ['_HEAP_ENTRY']]],
} ],
'_HEAP_SUBSEGMENT' : [ 0x20, {
'Bucket' : [ 0x0, ['pointer', ['void']]],
'UserBlocks' : [ 0x4, ['pointer', ['_HEAP_USERDATA_HEADER']]],
'AggregateExchg' : [ 0x8, ['_INTERLOCK_SEQ']],
'BlockSize' : [ 0x10, ['unsigned short']],
'FreeThreshold' : [ 0x12, ['unsigned short']],
'BlockCount' : [ 0x14, ['unsigned short']],
'SizeIndex' : [ 0x16, ['unsigned char']],
'AffinityIndex' : [ 0x17, ['unsigned char']],
'Alignment' : [ 0x10, ['array', 2, ['unsigned long']]],
'SFreeListEntry' : [ 0x18, ['_SINGLE_LIST_ENTRY']],
'Lock' : [ 0x1c, ['unsigned long']],
} ],
'_HEAP_UCR_SEGMENT' : [ 0x10, {
'Next' : [ 0x0, ['pointer', ['_HEAP_UCR_SEGMENT']]],
'ReservedSize' : [ 0x4, ['unsigned long']],
'CommittedSize' : [ 0x8, ['unsigned long']],
'filler' : [ 0xc, ['unsigned long']],
} ],
'_HMAP_TABLE' : [ 0x2000, {
'Table' : [ 0x0, ['array', 512, ['_HMAP_ENTRY']]],
} ],
'_OBJECT_SYMBOLIC_LINK' : [ 0x20, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LinkTarget' : [ 0x8, ['_UNICODE_STRING']],
'LinkTargetRemaining' : [ 0x10, ['_UNICODE_STRING']],
'LinkTargetObject' : [ 0x18, ['pointer', ['void']]],
'DosDeviceDriveIndex' : [ 0x1c, ['unsigned long']],
} ],
'_POOL_BLOCK_HEAD' : [ 0x10, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'List' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_DISPATCHER_HEADER' : [ 0x10, {
'Type' : [ 0x0, ['unsigned char']],
'Absolute' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'Inserted' : [ 0x3, ['unsigned char']],
'SignalState' : [ 0x4, ['long']],
'WaitListHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_LDR_DATA_TABLE_ENTRY' : [ 0x50, {
'InLoadOrderLinks' : [ 0x0, ['_LIST_ENTRY']],
'InMemoryOrderLinks' : [ 0x8, ['_LIST_ENTRY']],
'InInitializationOrderLinks' : [ 0x10, ['_LIST_ENTRY']],
'DllBase' : [ 0x18, ['pointer', ['void']]],
'EntryPoint' : [ 0x1c, ['pointer', ['void']]],
'SizeOfImage' : [ 0x20, ['unsigned long']],
'FullDllName' : [ 0x24, ['_UNICODE_STRING']],
'BaseDllName' : [ 0x2c, ['_UNICODE_STRING']],
'Flags' : [ 0x34, ['unsigned long']],
'LoadCount' : [ 0x38, ['unsigned short']],
'TlsIndex' : [ 0x3a, ['unsigned short']],
'HashLinks' : [ 0x3c, ['_LIST_ENTRY']],
'SectionPointer' : [ 0x3c, ['pointer', ['void']]],
'CheckSum' : [ 0x40, ['unsigned long']],
'TimeDateStamp' : [ 0x44, ['unsigned long']],
'LoadedImports' : [ 0x44, ['pointer', ['void']]],
'EntryPointActivationContext' : [ 0x48, ['pointer', ['void']]],
'PatchInformation' : [ 0x4c, ['pointer', ['void']]],
} ],
'_HEAP_UNCOMMMTTED_RANGE' : [ 0x10, {
'Next' : [ 0x0, ['pointer', ['_HEAP_UNCOMMMTTED_RANGE']]],
'Address' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'filler' : [ 0xc, ['unsigned long']],
} ],
'_VI_DEADLOCK_GLOBALS' : [ 0x110, {
'Nodes' : [ 0x0, ['array', 2, ['unsigned long']]],
'Resources' : [ 0x8, ['array', 2, ['unsigned long']]],
'Threads' : [ 0x10, ['array', 2, ['unsigned long']]],
'TimeAcquire' : [ 0x18, ['long long']],
'TimeRelease' : [ 0x20, ['long long']],
'BytesAllocated' : [ 0x28, ['unsigned long']],
'ResourceDatabase' : [ 0x2c, ['pointer', ['_LIST_ENTRY']]],
'ThreadDatabase' : [ 0x30, ['pointer', ['_LIST_ENTRY']]],
'AllocationFailures' : [ 0x34, ['unsigned long']],
'NodesTrimmedBasedOnAge' : [ 0x38, ['unsigned long']],
'NodesTrimmedBasedOnCount' : [ 0x3c, ['unsigned long']],
'NodesSearched' : [ 0x40, ['unsigned long']],
'MaxNodesSearched' : [ 0x44, ['unsigned long']],
'SequenceNumber' : [ 0x48, ['unsigned long']],
'RecursionDepthLimit' : [ 0x4c, ['unsigned long']],
'SearchedNodesLimit' : [ 0x50, ['unsigned long']],
'DepthLimitHits' : [ 0x54, ['unsigned long']],
'SearchLimitHits' : [ 0x58, ['unsigned long']],
'ABC_ACB_Skipped' : [ 0x5c, ['unsigned long']],
'FreeResourceList' : [ 0x60, ['_LIST_ENTRY']],
'FreeThreadList' : [ 0x68, ['_LIST_ENTRY']],
'FreeNodeList' : [ 0x70, ['_LIST_ENTRY']],
'FreeResourceCount' : [ 0x78, ['unsigned long']],
'FreeThreadCount' : [ 0x7c, ['unsigned long']],
'FreeNodeCount' : [ 0x80, ['unsigned long']],
'Instigator' : [ 0x84, ['pointer', ['void']]],
'NumberOfParticipants' : [ 0x88, ['unsigned long']],
'Participant' : [ 0x8c, ['array', 32, ['pointer', ['_VI_DEADLOCK_NODE']]]],
'CacheReductionInProgress' : [ 0x10c, ['unsigned long']],
} ],
'_THERMAL_INFORMATION' : [ 0x4c, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0xc, ['unsigned long']],
'SamplingPeriod' : [ 0x10, ['unsigned long']],
'CurrentTemperature' : [ 0x14, ['unsigned long']],
'PassiveTripPoint' : [ 0x18, ['unsigned long']],
'CriticalTripPoint' : [ 0x1c, ['unsigned long']],
'ActiveTripPointCount' : [ 0x20, ['unsigned char']],
'ActiveTripPoint' : [ 0x24, ['array', 10, ['unsigned long']]],
} ],
'_DBGKD_SEARCH_MEMORY' : [ 0x18, {
'SearchAddress' : [ 0x0, ['unsigned long long']],
'FoundAddress' : [ 0x0, ['unsigned long long']],
'SearchLength' : [ 0x8, ['unsigned long long']],
'PatternLength' : [ 0x10, ['unsigned long']],
} ],
'_SECTION_OBJECT' : [ 0x18, {
'StartingVa' : [ 0x0, ['pointer', ['void']]],
'EndingVa' : [ 0x4, ['pointer', ['void']]],
'Parent' : [ 0x8, ['pointer', ['void']]],
'LeftChild' : [ 0xc, ['pointer', ['void']]],
'RightChild' : [ 0x10, ['pointer', ['void']]],
'Segment' : [ 0x14, ['pointer', ['_SEGMENT_OBJECT']]],
} ],
'_POWER_STATE' : [ 0x4, {
'SystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_WMI_LOGGER_CONTEXT' : [ 0x1c8, {
'BufferSpinLock' : [ 0x0, ['unsigned long']],
'StartTime' : [ 0x8, ['_LARGE_INTEGER']],
'LogFileHandle' : [ 0x10, ['pointer', ['void']]],
'LoggerSemaphore' : [ 0x14, ['_KSEMAPHORE']],
'LoggerThread' : [ 0x28, ['pointer', ['_ETHREAD']]],
'LoggerEvent' : [ 0x2c, ['_KEVENT']],
'FlushEvent' : [ 0x3c, ['_KEVENT']],
'LoggerStatus' : [ 0x4c, ['long']],
'LoggerId' : [ 0x50, ['unsigned long']],
'BuffersAvailable' : [ 0x54, ['long']],
'UsePerfClock' : [ 0x58, ['unsigned long']],
'WriteFailureLimit' : [ 0x5c, ['unsigned long']],
'BuffersDirty' : [ 0x60, ['unsigned long']],
'BuffersInUse' : [ 0x64, ['unsigned long']],
'SwitchingInProgress' : [ 0x68, ['unsigned long']],
'FreeList' : [ 0x70, ['_SLIST_HEADER']],
'FlushList' : [ 0x78, ['_SLIST_HEADER']],
'GlobalList' : [ 0x80, ['_SLIST_HEADER']],
'ProcessorBuffers' : [ 0x88, ['pointer', ['_SLIST_HEADER']]],
'LoggerName' : [ 0x8c, ['_UNICODE_STRING']],
'LogFileName' : [ 0x94, ['_UNICODE_STRING']],
'LogFilePattern' : [ 0x9c, ['_UNICODE_STRING']],
'NewLogFileName' : [ 0xa4, ['_UNICODE_STRING']],
'EndPageMarker' : [ 0xac, ['pointer', ['unsigned char']]],
'CollectionOn' : [ 0xb0, ['long']],
'KernelTraceOn' : [ 0xb4, ['unsigned long']],
'PerfLogInTransition' : [ 0xb8, ['long']],
'RequestFlag' : [ 0xbc, ['unsigned long']],
'EnableFlags' : [ 0xc0, ['unsigned long']],
'MaximumFileSize' : [ 0xc4, ['unsigned long']],
'LoggerMode' : [ 0xc8, ['unsigned long']],
'LoggerModeFlags' : [ 0xc8, ['_WMI_LOGGER_MODE']],
'LastFlushedBuffer' : [ 0xcc, ['unsigned long']],
'RefCount' : [ 0xd0, ['unsigned long']],
'FlushTimer' : [ 0xd4, ['unsigned long']],
'FirstBufferOffset' : [ 0xd8, ['_LARGE_INTEGER']],
'ByteOffset' : [ 0xe0, ['_LARGE_INTEGER']],
'BufferAgeLimit' : [ 0xe8, ['_LARGE_INTEGER']],
'MaximumBuffers' : [ 0xf0, ['unsigned long']],
'MinimumBuffers' : [ 0xf4, ['unsigned long']],
'EventsLost' : [ 0xf8, ['unsigned long']],
'BuffersWritten' : [ 0xfc, ['unsigned long']],
'LogBuffersLost' : [ 0x100, ['unsigned long']],
'RealTimeBuffersLost' : [ 0x104, ['unsigned long']],
'BufferSize' : [ 0x108, ['unsigned long']],
'NumberOfBuffers' : [ 0x10c, ['long']],
'SequencePtr' : [ 0x110, ['pointer', ['long']]],
'InstanceGuid' : [ 0x114, ['_GUID']],
'LoggerHeader' : [ 0x124, ['pointer', ['void']]],
'GetCpuClock' : [ 0x128, ['pointer', ['void']]],
'ClientSecurityContext' : [ 0x12c, ['_SECURITY_CLIENT_CONTEXT']],
'LoggerExtension' : [ 0x168, ['pointer', ['void']]],
'ReleaseQueue' : [ 0x16c, ['long']],
'EnableFlagExtension' : [ 0x170, ['_TRACE_ENABLE_FLAG_EXTENSION']],
'LocalSequence' : [ 0x174, ['unsigned long']],
'MaximumIrql' : [ 0x178, ['unsigned long']],
'EnableFlagArray' : [ 0x17c, ['pointer', ['unsigned long']]],
'LoggerMutex' : [ 0x180, ['_KMUTANT']],
'MutexCount' : [ 0x1a0, ['long']],
'FileCounter' : [ 0x1a4, ['unsigned long']],
'BufferCallback' : [ 0x1a8, ['pointer', ['void']]],
'CallbackContext' : [ 0x1ac, ['pointer', ['void']]],
'PoolType' : [ 0x1b0, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'ReferenceSystemTime' : [ 0x1b8, ['_LARGE_INTEGER']],
'ReferenceTimeStamp' : [ 0x1c0, ['_LARGE_INTEGER']],
} ],
'_SEGMENT_OBJECT' : [ 0x30, {
'BaseAddress' : [ 0x0, ['pointer', ['void']]],
'TotalNumberOfPtes' : [ 0x4, ['unsigned long']],
'SizeOfSegment' : [ 0x8, ['_LARGE_INTEGER']],
'NonExtendedPtes' : [ 0x10, ['unsigned long']],
'ImageCommitment' : [ 0x14, ['unsigned long']],
'ControlArea' : [ 0x18, ['pointer', ['_CONTROL_AREA']]],
'Subsection' : [ 0x1c, ['pointer', ['_SUBSECTION']]],
'LargeControlArea' : [ 0x20, ['pointer', ['_LARGE_CONTROL_AREA']]],
'MmSectionFlags' : [ 0x24, ['pointer', ['_MMSECTION_FLAGS']]],
'MmSubSectionFlags' : [ 0x28, ['pointer', ['_MMSUBSECTION_FLAGS']]],
} ],
'__unnamed_123f' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MMSECTION_FLAGS']],
} ],
'_CONTROL_AREA' : [ 0x30, {
'Segment' : [ 0x0, ['pointer', ['_SEGMENT']]],
'DereferenceList' : [ 0x4, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0xc, ['unsigned long']],
'NumberOfPfnReferences' : [ 0x10, ['unsigned long']],
'NumberOfMappedViews' : [ 0x14, ['unsigned long']],
'NumberOfSubsections' : [ 0x18, ['unsigned short']],
'FlushInProgressCount' : [ 0x1a, ['unsigned short']],
'NumberOfUserReferences' : [ 0x1c, ['unsigned long']],
'u' : [ 0x20, ['__unnamed_123f']],
'FilePointer' : [ 0x24, ['pointer', ['_FILE_OBJECT']]],
'WaitingForDeletion' : [ 0x28, ['pointer', ['_EVENT_COUNTER']]],
'ModifiedWriteCount' : [ 0x2c, ['unsigned short']],
'NumberOfSystemCacheViews' : [ 0x2e, ['unsigned short']],
} ],
'_HANDLE_TABLE' : [ 0x44, {
'TableCode' : [ 0x0, ['unsigned long']],
'QuotaProcess' : [ 0x4, ['pointer', ['_EPROCESS']]],
'UniqueProcessId' : [ 0x8, ['pointer', ['void']]],
'HandleTableLock' : [ 0xc, ['array', 4, ['_EX_PUSH_LOCK']]],
'HandleTableList' : [ 0x1c, ['_LIST_ENTRY']],
'HandleContentionEvent' : [ 0x24, ['_EX_PUSH_LOCK']],
'DebugInfo' : [ 0x28, ['pointer', ['_HANDLE_TRACE_DEBUG_INFO']]],
'ExtraInfoPages' : [ 0x2c, ['long']],
'FirstFree' : [ 0x30, ['unsigned long']],
'LastFree' : [ 0x34, ['unsigned long']],
'NextHandleNeedingPool' : [ 0x38, ['unsigned long']],
'HandleCount' : [ 0x3c, ['long']],
'Flags' : [ 0x40, ['unsigned long']],
'StrictFIFO' : [ 0x40, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
} ],
'_POOL_HEADER' : [ 0x8, {
'PreviousSize' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned short')]],
'PoolIndex' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 16, native_type='unsigned short')]],
'BlockSize' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned short')]],
'PoolType' : [ 0x2, ['BitField', dict(start_bit = 9, end_bit = 16, native_type='unsigned short')]],
'Ulong1' : [ 0x0, ['unsigned long']],
'ProcessBilled' : [ 0x4, ['pointer', ['_EPROCESS']]],
'PoolTag' : [ 0x4, ['unsigned long']],
'AllocatorBackTraceIndex' : [ 0x4, ['unsigned short']],
'PoolTagHash' : [ 0x6, ['unsigned short']],
} ],
'_KWAIT_BLOCK' : [ 0x18, {
'WaitListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Thread' : [ 0x8, ['pointer', ['_KTHREAD']]],
'Object' : [ 0xc, ['pointer', ['void']]],
'NextWaitBlock' : [ 0x10, ['pointer', ['_KWAIT_BLOCK']]],
'WaitKey' : [ 0x14, ['unsigned short']],
'WaitType' : [ 0x16, ['unsigned short']],
} ],
'_MMPTE_PROTOTYPE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProtoAddressLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'WhichPool' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ProtoAddressHigh' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 32, native_type='unsigned long')]],
} ],
'_MMSUPPORT' : [ 0x40, {
'LastTrimTime' : [ 0x0, ['_LARGE_INTEGER']],
'Flags' : [ 0x8, ['_MMSUPPORT_FLAGS']],
'PageFaultCount' : [ 0xc, ['unsigned long']],
'PeakWorkingSetSize' : [ 0x10, ['unsigned long']],
'WorkingSetSize' : [ 0x14, ['unsigned long']],
'MinimumWorkingSetSize' : [ 0x18, ['unsigned long']],
'MaximumWorkingSetSize' : [ 0x1c, ['unsigned long']],
'VmWorkingSetList' : [ 0x20, ['pointer', ['_MMWSL']]],
'WorkingSetExpansionLinks' : [ 0x24, ['_LIST_ENTRY']],
'Claim' : [ 0x2c, ['unsigned long']],
'NextEstimationSlot' : [ 0x30, ['unsigned long']],
'NextAgingSlot' : [ 0x34, ['unsigned long']],
'EstimatedAvailable' : [ 0x38, ['unsigned long']],
'GrowthSinceLastEstimate' : [ 0x3c, ['unsigned long']],
} ],
'_EX_WORK_QUEUE' : [ 0x3c, {
'WorkerQueue' : [ 0x0, ['_KQUEUE']],
'DynamicThreadCount' : [ 0x28, ['unsigned long']],
'WorkItemsProcessed' : [ 0x2c, ['unsigned long']],
'WorkItemsProcessedLastPass' : [ 0x30, ['unsigned long']],
'QueueDepthLastPass' : [ 0x34, ['unsigned long']],
'Info' : [ 0x38, ['EX_QUEUE_WORKER_INFO']],
} ],
'_MMSUBSECTION_FLAGS' : [ 0x4, {
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadWrite' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SubsectionStatic' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 9, native_type='unsigned long')]],
'LargePages' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'StartingSector4132' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 20, native_type='unsigned long')]],
'SectorEndOffset' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 32, native_type='unsigned long')]],
} ],
'_KMUTANT' : [ 0x20, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListEntry' : [ 0x10, ['_LIST_ENTRY']],
'OwnerThread' : [ 0x18, ['pointer', ['_KTHREAD']]],
'Abandoned' : [ 0x1c, ['unsigned char']],
'ApcDisable' : [ 0x1d, ['unsigned char']],
} ],
'_HEAP_TAG_ENTRY' : [ 0x40, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'TagIndex' : [ 0xc, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0xe, ['unsigned short']],
'TagName' : [ 0x10, ['array', 24, ['unsigned short']]],
} ],
'_KEVENT' : [ 0x10, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_EPROCESS_QUOTA_BLOCK' : [ 0x40, {
'QuotaEntry' : [ 0x0, ['array', 3, ['_EPROCESS_QUOTA_ENTRY']]],
'QuotaList' : [ 0x30, ['_LIST_ENTRY']],
'ReferenceCount' : [ 0x38, ['unsigned long']],
'ProcessCount' : [ 0x3c, ['unsigned long']],
} ],
'_UNICODE_STRING' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['pointer', ['unsigned short']]],
} ],
'_EVENT_COUNTER' : [ 0x18, {
'ListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'RefCount' : [ 0x4, ['unsigned long']],
'Event' : [ 0x8, ['_KEVENT']],
} ],
'_EJOB' : [ 0x180, {
'Event' : [ 0x0, ['_KEVENT']],
'JobLinks' : [ 0x10, ['_LIST_ENTRY']],
'ProcessListHead' : [ 0x18, ['_LIST_ENTRY']],
'JobLock' : [ 0x20, ['_ERESOURCE']],
'TotalUserTime' : [ 0x58, ['_LARGE_INTEGER']],
'TotalKernelTime' : [ 0x60, ['_LARGE_INTEGER']],
'ThisPeriodTotalUserTime' : [ 0x68, ['_LARGE_INTEGER']],
'ThisPeriodTotalKernelTime' : [ 0x70, ['_LARGE_INTEGER']],
'TotalPageFaultCount' : [ 0x78, ['unsigned long']],
'TotalProcesses' : [ 0x7c, ['unsigned long']],
'ActiveProcesses' : [ 0x80, ['unsigned long']],
'TotalTerminatedProcesses' : [ 0x84, ['unsigned long']],
'PerProcessUserTimeLimit' : [ 0x88, ['_LARGE_INTEGER']],
'PerJobUserTimeLimit' : [ 0x90, ['_LARGE_INTEGER']],
'LimitFlags' : [ 0x98, ['unsigned long']],
'MinimumWorkingSetSize' : [ 0x9c, ['unsigned long']],
'MaximumWorkingSetSize' : [ 0xa0, ['unsigned long']],
'ActiveProcessLimit' : [ 0xa4, ['unsigned long']],
'Affinity' : [ 0xa8, ['unsigned long']],
'PriorityClass' : [ 0xac, ['unsigned char']],
'UIRestrictionsClass' : [ 0xb0, ['unsigned long']],
'SecurityLimitFlags' : [ 0xb4, ['unsigned long']],
'Token' : [ 0xb8, ['pointer', ['void']]],
'Filter' : [ 0xbc, ['pointer', ['_PS_JOB_TOKEN_FILTER']]],
'EndOfJobTimeAction' : [ 0xc0, ['unsigned long']],
'CompletionPort' : [ 0xc4, ['pointer', ['void']]],
'CompletionKey' : [ 0xc8, ['pointer', ['void']]],
'SessionId' : [ 0xcc, ['unsigned long']],
'SchedulingClass' : [ 0xd0, ['unsigned long']],
'ReadOperationCount' : [ 0xd8, ['unsigned long long']],
'WriteOperationCount' : [ 0xe0, ['unsigned long long']],
'OtherOperationCount' : [ 0xe8, ['unsigned long long']],
'ReadTransferCount' : [ 0xf0, ['unsigned long long']],
'WriteTransferCount' : [ 0xf8, ['unsigned long long']],
'OtherTransferCount' : [ 0x100, ['unsigned long long']],
'IoInfo' : [ 0x108, ['_IO_COUNTERS']],
'ProcessMemoryLimit' : [ 0x138, ['unsigned long']],
'JobMemoryLimit' : [ 0x13c, ['unsigned long']],
'PeakProcessMemoryUsed' : [ 0x140, ['unsigned long']],
'PeakJobMemoryUsed' : [ 0x144, ['unsigned long']],
'CurrentJobMemoryUsed' : [ 0x148, ['unsigned long']],
'MemoryLimitsLock' : [ 0x14c, ['_FAST_MUTEX']],
'JobSetLinks' : [ 0x16c, ['_LIST_ENTRY']],
'MemberLevel' : [ 0x174, ['unsigned long']],
'JobFlags' : [ 0x178, ['unsigned long']],
} ],
'_LARGE_CONTROL_AREA' : [ 0x40, {
'Segment' : [ 0x0, ['pointer', ['_SEGMENT']]],
'DereferenceList' : [ 0x4, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0xc, ['unsigned long']],
'NumberOfPfnReferences' : [ 0x10, ['unsigned long']],
'NumberOfMappedViews' : [ 0x14, ['unsigned long']],
'NumberOfSubsections' : [ 0x18, ['unsigned short']],
'FlushInProgressCount' : [ 0x1a, ['unsigned short']],
'NumberOfUserReferences' : [ 0x1c, ['unsigned long']],
'u' : [ 0x20, ['__unnamed_123f']],
'FilePointer' : [ 0x24, ['pointer', ['_FILE_OBJECT']]],
'WaitingForDeletion' : [ 0x28, ['pointer', ['_EVENT_COUNTER']]],
'ModifiedWriteCount' : [ 0x2c, ['unsigned short']],
'NumberOfSystemCacheViews' : [ 0x2e, ['unsigned short']],
'StartingFrame' : [ 0x30, ['unsigned long']],
'UserGlobalList' : [ 0x34, ['_LIST_ENTRY']],
'SessionId' : [ 0x3c, ['unsigned long']],
} ],
'_GUID' : [ 0x10, {
'Data1' : [ 0x0, ['unsigned long']],
'Data2' : [ 0x4, ['unsigned short']],
'Data3' : [ 0x6, ['unsigned short']],
'Data4' : [ 0x8, ['array', 8, ['unsigned char']]],
} ],
'_PS_JOB_TOKEN_FILTER' : [ 0x24, {
'CapturedSidCount' : [ 0x0, ['unsigned long']],
'CapturedSids' : [ 0x4, ['pointer', ['_SID_AND_ATTRIBUTES']]],
'CapturedSidsLength' : [ 0x8, ['unsigned long']],
'CapturedGroupCount' : [ 0xc, ['unsigned long']],
'CapturedGroups' : [ 0x10, ['pointer', ['_SID_AND_ATTRIBUTES']]],
'CapturedGroupsLength' : [ 0x14, ['unsigned long']],
'CapturedPrivilegeCount' : [ 0x18, ['unsigned long']],
'CapturedPrivileges' : [ 0x1c, ['pointer', ['_LUID_AND_ATTRIBUTES']]],
'CapturedPrivilegesLength' : [ 0x20, ['unsigned long']],
} ],
'_FAST_MUTEX' : [ 0x20, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x4, ['pointer', ['_KTHREAD']]],
'Contention' : [ 0x8, ['unsigned long']],
'Event' : [ 0xc, ['_KEVENT']],
'OldIrql' : [ 0x1c, ['unsigned long']],
} ],
'_MM_DRIVER_VERIFIER_DATA' : [ 0x70, {
'Level' : [ 0x0, ['unsigned long']],
'RaiseIrqls' : [ 0x4, ['unsigned long']],
'AcquireSpinLocks' : [ 0x8, ['unsigned long']],
'SynchronizeExecutions' : [ 0xc, ['unsigned long']],
'AllocationsAttempted' : [ 0x10, ['unsigned long']],
'AllocationsSucceeded' : [ 0x14, ['unsigned long']],
'AllocationsSucceededSpecialPool' : [ 0x18, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x1c, ['unsigned long']],
'TrimRequests' : [ 0x20, ['unsigned long']],
'Trims' : [ 0x24, ['unsigned long']],
'AllocationsFailed' : [ 0x28, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x2c, ['unsigned long']],
'Loads' : [ 0x30, ['unsigned long']],
'Unloads' : [ 0x34, ['unsigned long']],
'UnTrackedPool' : [ 0x38, ['unsigned long']],
'UserTrims' : [ 0x3c, ['unsigned long']],
'CurrentPagedPoolAllocations' : [ 0x40, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x44, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x48, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x4c, ['unsigned long']],
'PagedBytes' : [ 0x50, ['unsigned long']],
'NonPagedBytes' : [ 0x54, ['unsigned long']],
'PeakPagedBytes' : [ 0x58, ['unsigned long']],
'PeakNonPagedBytes' : [ 0x5c, ['unsigned long']],
'BurstAllocationsFailedDeliberately' : [ 0x60, ['unsigned long']],
'SessionTrims' : [ 0x64, ['unsigned long']],
'Reserved' : [ 0x68, ['array', 2, ['unsigned long']]],
} ],
'_IMAGE_FILE_HEADER' : [ 0x14, {
'Machine' : [ 0x0, ['unsigned short']],
'NumberOfSections' : [ 0x2, ['unsigned short']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'PointerToSymbolTable' : [ 0x8, ['unsigned long']],
'NumberOfSymbols' : [ 0xc, ['unsigned long']],
'SizeOfOptionalHeader' : [ 0x10, ['unsigned short']],
'Characteristics' : [ 0x12, ['unsigned short']],
} ],
'_FILE_OBJECT' : [ 0x70, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
'Vpb' : [ 0x8, ['pointer', ['_VPB']]],
'FsContext' : [ 0xc, ['pointer', ['void']]],
'FsContext2' : [ 0x10, ['pointer', ['void']]],
'SectionObjectPointer' : [ 0x14, ['pointer', ['_SECTION_OBJECT_POINTERS']]],
'PrivateCacheMap' : [ 0x18, ['pointer', ['void']]],
'FinalStatus' : [ 0x1c, ['long']],
'RelatedFileObject' : [ 0x20, ['pointer', ['_FILE_OBJECT']]],
'LockOperation' : [ 0x24, ['unsigned char']],
'DeletePending' : [ 0x25, ['unsigned char']],
'ReadAccess' : [ 0x26, ['unsigned char']],
'WriteAccess' : [ 0x27, ['unsigned char']],
'DeleteAccess' : [ 0x28, ['unsigned char']],
'SharedRead' : [ 0x29, ['unsigned char']],
'SharedWrite' : [ 0x2a, ['unsigned char']],
'SharedDelete' : [ 0x2b, ['unsigned char']],
'Flags' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['_UNICODE_STRING']],
'CurrentByteOffset' : [ 0x38, ['_LARGE_INTEGER']],
'Waiters' : [ 0x40, ['unsigned long']],
'Busy' : [ 0x44, ['unsigned long']],
'LastLock' : [ 0x48, ['pointer', ['void']]],
'Lock' : [ 0x4c, ['_KEVENT']],
'Event' : [ 0x5c, ['_KEVENT']],
'CompletionContext' : [ 0x6c, ['pointer', ['_IO_COMPLETION_CONTEXT']]],
} ],
'_MMPTE_HARDWARE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Writable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_IO_COMPLETION_CONTEXT' : [ 0x8, {
'Port' : [ 0x0, ['pointer', ['void']]],
'Key' : [ 0x4, ['pointer', ['void']]],
} ],
'_CALL_HASH_ENTRY' : [ 0x14, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CallersAddress' : [ 0x8, ['pointer', ['void']]],
'CallersCaller' : [ 0xc, ['pointer', ['void']]],
'CallCount' : [ 0x10, ['unsigned long']],
} ],
'_HMAP_ENTRY' : [ 0x10, {
'BlockAddress' : [ 0x0, ['unsigned long']],
'BinAddress' : [ 0x4, ['unsigned long']],
'CmView' : [ 0x8, ['pointer', ['_CM_VIEW_OF_FILE']]],
'MemAlloc' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_SET_CONTEXT' : [ 0x4, {
'ContextFlags' : [ 0x0, ['unsigned long']],
} ],
'_KLOCK_QUEUE_HANDLE' : [ 0xc, {
'LockQueue' : [ 0x0, ['_KSPIN_LOCK_QUEUE']],
'OldIrql' : [ 0x8, ['unsigned char']],
} ],
'_MMSECTION_FLAGS' : [ 0x4, {
'BeingDeleted' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'BeingCreated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'BeingPurged' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'NoModifiedWriting' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'FailAllIo' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Based' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'File' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Networked' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'NoCache' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'PhysicalMemory' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Reserve' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'Commit' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'FloppyMedia' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WasPurged' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'UserReference' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'DeleteOnClose' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'FilePointerNull' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'DebugSymbolsLoaded' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'SetMappedFileIoComplete' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'CollidedFlush' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'HadUserReference' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'ImageMappedInSystemSpace' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'UserWritable' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'GlobalOnlyPerSession' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'Rom' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'filler' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 32, native_type='unsigned long')]],
} ],
'_DEFERRED_WRITE' : [ 0x28, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'FileObject' : [ 0x4, ['pointer', ['_FILE_OBJECT']]],
'BytesToWrite' : [ 0x8, ['unsigned long']],
'DeferredWriteLinks' : [ 0xc, ['_LIST_ENTRY']],
'Event' : [ 0x14, ['pointer', ['_KEVENT']]],
'PostRoutine' : [ 0x18, ['pointer', ['void']]],
'Context1' : [ 0x1c, ['pointer', ['void']]],
'Context2' : [ 0x20, ['pointer', ['void']]],
'LimitModifiedPages' : [ 0x24, ['unsigned char']],
} ],
'_TRACE_ENABLE_FLAG_EXTENSION' : [ 0x4, {
'Offset' : [ 0x0, ['unsigned short']],
'Length' : [ 0x2, ['unsigned char']],
'Flag' : [ 0x3, ['unsigned char']],
} ],
'_SID_AND_ATTRIBUTES' : [ 0x8, {
'Sid' : [ 0x0, ['pointer', ['void']]],
'Attributes' : [ 0x4, ['unsigned long']],
} ],
'_HIVE_LIST_ENTRY' : [ 0x18, {
'Name' : [ 0x0, ['pointer', ['unsigned short']]],
'BaseName' : [ 0x4, ['pointer', ['unsigned short']]],
'CmHive' : [ 0x8, ['pointer', ['_CMHIVE']]],
'Flags' : [ 0xc, ['unsigned long']],
'CmHive2' : [ 0x10, ['pointer', ['_CMHIVE']]],
'ThreadFinished' : [ 0x14, ['unsigned char']],
'ThreadStarted' : [ 0x15, ['unsigned char']],
'Allocate' : [ 0x16, ['unsigned char']],
} ],
'_KSPIN_LOCK_QUEUE' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['_KSPIN_LOCK_QUEUE']]],
'Lock' : [ 0x4, ['pointer', ['unsigned long']]],
} ],
'_PS_IMPERSONATION_INFORMATION' : [ 0xc, {
'Token' : [ 0x0, ['pointer', ['void']]],
'CopyOnOpen' : [ 0x4, ['unsigned char']],
'EffectiveOnly' : [ 0x5, ['unsigned char']],
'ImpersonationLevel' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
} ],
'__unnamed_12ed' : [ 0x4, {
'LegacyDeviceNode' : [ 0x0, ['pointer', ['_DEVICE_NODE']]],
'PendingDeviceRelations' : [ 0x0, ['pointer', ['_DEVICE_RELATIONS']]],
} ],
'__unnamed_12ef' : [ 0x4, {
'NextResourceDeviceNode' : [ 0x0, ['pointer', ['_DEVICE_NODE']]],
} ],
'__unnamed_12f3' : [ 0x10, {
'DockStatus' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DOCK_NOTDOCKDEVICE', 1: 'DOCK_QUIESCENT', 2: 'DOCK_ARRIVING', 3: 'DOCK_DEPARTING', 4: 'DOCK_EJECTIRP_COMPLETED'})]],
'ListEntry' : [ 0x4, ['_LIST_ENTRY']],
'SerialNumber' : [ 0xc, ['pointer', ['unsigned short']]],
} ],
'_DEVICE_NODE' : [ 0x118, {
'Sibling' : [ 0x0, ['pointer', ['_DEVICE_NODE']]],
'Child' : [ 0x4, ['pointer', ['_DEVICE_NODE']]],
'Parent' : [ 0x8, ['pointer', ['_DEVICE_NODE']]],
'LastChild' : [ 0xc, ['pointer', ['_DEVICE_NODE']]],
'Level' : [ 0x10, ['unsigned long']],
'Notify' : [ 0x14, ['pointer', ['_PO_DEVICE_NOTIFY']]],
'State' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted'})]],
'PreviousState' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted'})]],
'StateHistory' : [ 0x20, ['array', -80, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted'})]]],
'StateHistoryEntry' : [ 0x70, ['unsigned long']],
'CompletionStatus' : [ 0x74, ['long']],
'PendingIrp' : [ 0x78, ['pointer', ['_IRP']]],
'Flags' : [ 0x7c, ['unsigned long']],
'UserFlags' : [ 0x80, ['unsigned long']],
'Problem' : [ 0x84, ['unsigned long']],
'PhysicalDeviceObject' : [ 0x88, ['pointer', ['_DEVICE_OBJECT']]],
'ResourceList' : [ 0x8c, ['pointer', ['_CM_RESOURCE_LIST']]],
'ResourceListTranslated' : [ 0x90, ['pointer', ['_CM_RESOURCE_LIST']]],
'InstancePath' : [ 0x94, ['_UNICODE_STRING']],
'ServiceName' : [ 0x9c, ['_UNICODE_STRING']],
'DuplicatePDO' : [ 0xa4, ['pointer', ['_DEVICE_OBJECT']]],
'ResourceRequirements' : [ 0xa8, ['pointer', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'InterfaceType' : [ 0xac, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0xb0, ['unsigned long']],
'ChildInterfaceType' : [ 0xb4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ChildBusNumber' : [ 0xb8, ['unsigned long']],
'ChildBusTypeIndex' : [ 0xbc, ['unsigned short']],
'RemovalPolicy' : [ 0xbe, ['unsigned char']],
'HardwareRemovalPolicy' : [ 0xbf, ['unsigned char']],
'TargetDeviceNotify' : [ 0xc0, ['_LIST_ENTRY']],
'DeviceArbiterList' : [ 0xc8, ['_LIST_ENTRY']],
'DeviceTranslatorList' : [ 0xd0, ['_LIST_ENTRY']],
'NoTranslatorMask' : [ 0xd8, ['unsigned short']],
'QueryTranslatorMask' : [ 0xda, ['unsigned short']],
'NoArbiterMask' : [ 0xdc, ['unsigned short']],
'QueryArbiterMask' : [ 0xde, ['unsigned short']],
'OverUsed1' : [ 0xe0, ['__unnamed_12ed']],
'OverUsed2' : [ 0xe4, ['__unnamed_12ef']],
'BootResources' : [ 0xe8, ['pointer', ['_CM_RESOURCE_LIST']]],
'CapabilityFlags' : [ 0xec, ['unsigned long']],
'DockInfo' : [ 0xf0, ['__unnamed_12f3']],
'DisableableDepends' : [ 0x100, ['unsigned long']],
'PendedSetInterfaceState' : [ 0x104, ['_LIST_ENTRY']],
'LegacyBusListEntry' : [ 0x10c, ['_LIST_ENTRY']],
'DriverUnloadRetryCount' : [ 0x114, ['unsigned long']],
} ],
'__unnamed_12f8' : [ 0x38, {
'CriticalSection' : [ 0x0, ['_RTL_CRITICAL_SECTION']],
'Resource' : [ 0x0, ['_ERESOURCE']],
} ],
'_HEAP_LOCK' : [ 0x38, {
'Lock' : [ 0x0, ['__unnamed_12f8']],
} ],
'_KPCR' : [ 0xd70, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'SelfPcr' : [ 0x1c, ['pointer', ['_KPCR']]],
'Prcb' : [ 0x20, ['pointer', ['_KPRCB']]],
'Irql' : [ 0x24, ['unsigned char']],
'IRR' : [ 0x28, ['unsigned long']],
'IrrActive' : [ 0x2c, ['unsigned long']],
'IDR' : [ 0x30, ['unsigned long']],
'KdVersionBlock' : [ 0x34, ['pointer', ['void']]],
'IDT' : [ 0x38, ['pointer', ['_KIDTENTRY']]],
'GDT' : [ 0x3c, ['pointer', ['_KGDTENTRY']]],
'TSS' : [ 0x40, ['pointer', ['_KTSS']]],
'MajorVersion' : [ 0x44, ['unsigned short']],
'MinorVersion' : [ 0x46, ['unsigned short']],
'SetMember' : [ 0x48, ['unsigned long']],
'StallScaleFactor' : [ 0x4c, ['unsigned long']],
'DebugActive' : [ 0x50, ['unsigned char']],
'Number' : [ 0x51, ['unsigned char']],
'Spare0' : [ 0x52, ['unsigned char']],
'SecondLevelCacheAssociativity' : [ 0x53, ['unsigned char']],
'VdmAlert' : [ 0x54, ['unsigned long']],
'KernelReserved' : [ 0x58, ['array', 14, ['unsigned long']]],
'SecondLevelCacheSize' : [ 0x90, ['unsigned long']],
'HalReserved' : [ 0x94, ['array', 16, ['unsigned long']]],
'InterruptMode' : [ 0xd4, ['unsigned long']],
'Spare1' : [ 0xd8, ['unsigned char']],
'KernelReserved2' : [ 0xdc, ['array', 17, ['unsigned long']]],
'PrcbData' : [ 0x120, ['_KPRCB']],
} ],
'_MMCOLOR_TABLES' : [ 0xc, {
'Flink' : [ 0x0, ['unsigned long']],
'Blink' : [ 0x4, ['pointer', ['void']]],
'Count' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_FILL_MEMORY' : [ 0x10, {
'Address' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned short']],
'PatternLength' : [ 0xe, ['unsigned short']],
} ],
'_PP_LOOKASIDE_LIST' : [ 0x8, {
'P' : [ 0x0, ['pointer', ['_GENERAL_LOOKASIDE']]],
'L' : [ 0x4, ['pointer', ['_GENERAL_LOOKASIDE']]],
} ],
'_PHYSICAL_MEMORY_RUN' : [ 0x8, {
'BasePage' : [ 0x0, ['unsigned long']],
'PageCount' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1317' : [ 0x4, {
'Flink' : [ 0x0, ['unsigned long']],
'WsIndex' : [ 0x0, ['unsigned long']],
'Event' : [ 0x0, ['pointer', ['_KEVENT']]],
'ReadStatus' : [ 0x0, ['long']],
'NextStackPfn' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
} ],
'__unnamed_1319' : [ 0x4, {
'Blink' : [ 0x0, ['unsigned long']],
'ShareCount' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_131c' : [ 0x4, {
'ShortFlags' : [ 0x0, ['unsigned short']],
'ReferenceCount' : [ 0x2, ['unsigned short']],
} ],
'__unnamed_131e' : [ 0x4, {
'e1' : [ 0x0, ['_MMPFNENTRY']],
'e2' : [ 0x0, ['__unnamed_131c']],
} ],
'__unnamed_1325' : [ 0x4, {
'EntireFrame' : [ 0x0, ['unsigned long']],
'PteFrame' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 26, native_type='unsigned long')]],
'InPageError' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'VerifierAllocation' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'AweAllocation' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'LockCharged' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'KernelStack' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_MMPFN' : [ 0x18, {
'u1' : [ 0x0, ['__unnamed_1317']],
'PteAddress' : [ 0x4, ['pointer', ['_MMPTE']]],
'u2' : [ 0x8, ['__unnamed_1319']],
'u3' : [ 0xc, ['__unnamed_131e']],
'OriginalPte' : [ 0x10, ['_MMPTE']],
'u4' : [ 0x14, ['__unnamed_1325']],
} ],
'__unnamed_132b' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MM_SESSION_SPACE_FLAGS']],
} ],
'_MM_SESSION_SPACE' : [ 0x1278, {
'ReferenceCount' : [ 0x0, ['unsigned long']],
'u' : [ 0x4, ['__unnamed_132b']],
'SessionId' : [ 0x8, ['unsigned long']],
'SessionPageDirectoryIndex' : [ 0xc, ['unsigned long']],
'GlobalVirtualAddress' : [ 0x10, ['pointer', ['_MM_SESSION_SPACE']]],
'ProcessList' : [ 0x14, ['_LIST_ENTRY']],
'NonPagedPoolBytes' : [ 0x1c, ['unsigned long']],
'PagedPoolBytes' : [ 0x20, ['unsigned long']],
'NonPagedPoolAllocations' : [ 0x24, ['unsigned long']],
'PagedPoolAllocations' : [ 0x28, ['unsigned long']],
'NonPagablePages' : [ 0x2c, ['unsigned long']],
'CommittedPages' : [ 0x30, ['unsigned long']],
'LastProcessSwappedOutTime' : [ 0x38, ['_LARGE_INTEGER']],
'PageTables' : [ 0x40, ['pointer', ['_MMPTE']]],
'PagedPoolMutex' : [ 0x44, ['_FAST_MUTEX']],
'PagedPoolStart' : [ 0x64, ['pointer', ['void']]],
'PagedPoolEnd' : [ 0x68, ['pointer', ['void']]],
'PagedPoolBasePde' : [ 0x6c, ['pointer', ['_MMPTE']]],
'PagedPoolInfo' : [ 0x70, ['_MM_PAGED_POOL_INFO']],
'Color' : [ 0x94, ['unsigned long']],
'ProcessOutSwapCount' : [ 0x98, ['unsigned long']],
'ImageList' : [ 0x9c, ['_LIST_ENTRY']],
'GlobalPteEntry' : [ 0xa4, ['pointer', ['_MMPTE']]],
'CopyOnWriteCount' : [ 0xa8, ['unsigned long']],
'SessionPoolAllocationFailures' : [ 0xac, ['array', 4, ['unsigned long']]],
'AttachCount' : [ 0xbc, ['unsigned long']],
'AttachEvent' : [ 0xc0, ['_KEVENT']],
'LastProcess' : [ 0xd0, ['pointer', ['_EPROCESS']]],
'Vm' : [ 0xd8, ['_MMSUPPORT']],
'Wsle' : [ 0x118, ['pointer', ['_MMWSLE']]],
'WsLock' : [ 0x11c, ['_ERESOURCE']],
'WsListEntry' : [ 0x154, ['_LIST_ENTRY']],
'Session' : [ 0x15c, ['_MMSESSION']],
'Win32KDriverObject' : [ 0x198, ['_DRIVER_OBJECT']],
'WorkingSetLockOwner' : [ 0x240, ['pointer', ['_ETHREAD']]],
'PagedPool' : [ 0x244, ['_POOL_DESCRIPTOR']],
'ProcessReferenceToSession' : [ 0x126c, ['long']],
'LocaleId' : [ 0x1270, ['unsigned long']],
} ],
'_PEB' : [ 0x210, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'SpareBool' : [ 0x3, ['unsigned char']],
'Mutant' : [ 0x4, ['pointer', ['void']]],
'ImageBaseAddress' : [ 0x8, ['pointer', ['void']]],
'Ldr' : [ 0xc, ['pointer', ['_PEB_LDR_DATA']]],
'ProcessParameters' : [ 0x10, ['pointer', ['_RTL_USER_PROCESS_PARAMETERS']]],
'SubSystemData' : [ 0x14, ['pointer', ['void']]],
'ProcessHeap' : [ 0x18, ['pointer', ['void']]],
'FastPebLock' : [ 0x1c, ['pointer', ['_RTL_CRITICAL_SECTION']]],
'FastPebLockRoutine' : [ 0x20, ['pointer', ['void']]],
'FastPebUnlockRoutine' : [ 0x24, ['pointer', ['void']]],
'EnvironmentUpdateCount' : [ 0x28, ['unsigned long']],
'KernelCallbackTable' : [ 0x2c, ['pointer', ['void']]],
'SystemReserved' : [ 0x30, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x34, ['unsigned long']],
'FreeList' : [ 0x38, ['pointer', ['_PEB_FREE_BLOCK']]],
'TlsExpansionCounter' : [ 0x3c, ['unsigned long']],
'TlsBitmap' : [ 0x40, ['pointer', ['void']]],
'TlsBitmapBits' : [ 0x44, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x4c, ['pointer', ['void']]],
'ReadOnlySharedMemoryHeap' : [ 0x50, ['pointer', ['void']]],
'ReadOnlyStaticServerData' : [ 0x54, ['pointer', ['pointer', ['void']]]],
'AnsiCodePageData' : [ 0x58, ['pointer', ['void']]],
'OemCodePageData' : [ 0x5c, ['pointer', ['void']]],
'UnicodeCaseTableData' : [ 0x60, ['pointer', ['void']]],
'NumberOfProcessors' : [ 0x64, ['unsigned long']],
'NtGlobalFlag' : [ 0x68, ['unsigned long']],
'CriticalSectionTimeout' : [ 0x70, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0x78, ['unsigned long']],
'HeapSegmentCommit' : [ 0x7c, ['unsigned long']],
'HeapDeCommitTotalFreeThreshold' : [ 0x80, ['unsigned long']],
'HeapDeCommitFreeBlockThreshold' : [ 0x84, ['unsigned long']],
'NumberOfHeaps' : [ 0x88, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0x8c, ['unsigned long']],
'ProcessHeaps' : [ 0x90, ['pointer', ['pointer', ['void']]]],
'GdiSharedHandleTable' : [ 0x94, ['pointer', ['void']]],
'ProcessStarterHelper' : [ 0x98, ['pointer', ['void']]],
'GdiDCAttributeList' : [ 0x9c, ['unsigned long']],
'LoaderLock' : [ 0xa0, ['pointer', ['void']]],
'OSMajorVersion' : [ 0xa4, ['unsigned long']],
'OSMinorVersion' : [ 0xa8, ['unsigned long']],
'OSBuildNumber' : [ 0xac, ['unsigned short']],
'OSCSDVersion' : [ 0xae, ['unsigned short']],
'OSPlatformId' : [ 0xb0, ['unsigned long']],
'ImageSubsystem' : [ 0xb4, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0xb8, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0xbc, ['unsigned long']],
'ImageProcessAffinityMask' : [ 0xc0, ['unsigned long']],
'GdiHandleBuffer' : [ 0xc4, ['array', 34, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x14c, ['pointer', ['void']]],
'TlsExpansionBitmap' : [ 0x150, ['pointer', ['void']]],
'TlsExpansionBitmapBits' : [ 0x154, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x1d4, ['unsigned long']],
'AppCompatFlags' : [ 0x1d8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x1e0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x1e8, ['pointer', ['void']]],
'AppCompatInfo' : [ 0x1ec, ['pointer', ['void']]],
'CSDVersion' : [ 0x1f0, ['_UNICODE_STRING']],
'ActivationContextData' : [ 0x1f8, ['pointer', ['void']]],
'ProcessAssemblyStorageMap' : [ 0x1fc, ['pointer', ['void']]],
'SystemDefaultActivationContextData' : [ 0x200, ['pointer', ['void']]],
'SystemAssemblyStorageMap' : [ 0x204, ['pointer', ['void']]],
'MinimumStackCommit' : [ 0x208, ['unsigned long']],
} ],
'_HEAP_FREE_ENTRY' : [ 0x10, {
'Size' : [ 0x0, ['unsigned short']],
'PreviousSize' : [ 0x2, ['unsigned short']],
'SubSegmentCode' : [ 0x0, ['pointer', ['void']]],
'SmallTagIndex' : [ 0x4, ['unsigned char']],
'Flags' : [ 0x5, ['unsigned char']],
'UnusedBytes' : [ 0x6, ['unsigned char']],
'SegmentIndex' : [ 0x7, ['unsigned char']],
'FreeList' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_ERESOURCE' : [ 0x38, {
'SystemResourcesList' : [ 0x0, ['_LIST_ENTRY']],
'OwnerTable' : [ 0x8, ['pointer', ['_OWNER_ENTRY']]],
'ActiveCount' : [ 0xc, ['short']],
'Flag' : [ 0xe, ['unsigned short']],
'SharedWaiters' : [ 0x10, ['pointer', ['_KSEMAPHORE']]],
'ExclusiveWaiters' : [ 0x14, ['pointer', ['_KEVENT']]],
'OwnerThreads' : [ 0x18, ['array', 2, ['_OWNER_ENTRY']]],
'ContentionCount' : [ 0x28, ['unsigned long']],
'NumberOfSharedWaiters' : [ 0x2c, ['unsigned short']],
'NumberOfExclusiveWaiters' : [ 0x2e, ['unsigned short']],
'Address' : [ 0x30, ['pointer', ['void']]],
'CreatorBackTraceIndex' : [ 0x30, ['unsigned long']],
'SpinLock' : [ 0x34, ['unsigned long']],
} ],
'_DBGKD_GET_CONTEXT' : [ 0x4, {
'Unused' : [ 0x0, ['unsigned long']],
} ],
'_MMPTE_SOFTWARE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFileHigh' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_IO_RESOURCE_REQUIREMENTS_LIST' : [ 0x48, {
'ListSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x8, ['unsigned long']],
'SlotNumber' : [ 0xc, ['unsigned long']],
'Reserved' : [ 0x10, ['array', 3, ['unsigned long']]],
'AlternativeLists' : [ 0x1c, ['unsigned long']],
'List' : [ 0x20, ['array', 1, ['_IO_RESOURCE_LIST']]],
} ],
'_CACHE_UNINITIALIZE_EVENT' : [ 0x14, {
'Next' : [ 0x0, ['pointer', ['_CACHE_UNINITIALIZE_EVENT']]],
'Event' : [ 0x4, ['_KEVENT']],
} ],
'_CM_RESOURCE_LIST' : [ 0x24, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['array', 1, ['_CM_FULL_RESOURCE_DESCRIPTOR']]],
} ],
'_CM_FULL_RESOURCE_DESCRIPTOR' : [ 0x20, {
'InterfaceType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x4, ['unsigned long']],
'PartialResourceList' : [ 0x8, ['_CM_PARTIAL_RESOURCE_LIST']],
} ],
'_EPROCESS_QUOTA_ENTRY' : [ 0x10, {
'Usage' : [ 0x0, ['unsigned long']],
'Limit' : [ 0x4, ['unsigned long']],
'Peak' : [ 0x8, ['unsigned long']],
'Return' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1362' : [ 0x50, {
'CellData' : [ 0x0, ['_CELL_DATA']],
'List' : [ 0x0, ['array', 1, ['unsigned long']]],
} ],
'_CM_CACHED_VALUE_INDEX' : [ 0x54, {
'CellIndex' : [ 0x0, ['unsigned long']],
'Data' : [ 0x4, ['__unnamed_1362']],
} ],
'_WMI_BUFFER_HEADER' : [ 0x48, {
'Wnode' : [ 0x0, ['_WNODE_HEADER']],
'Reserved1' : [ 0x0, ['unsigned long long']],
'Reserved2' : [ 0x8, ['unsigned long long']],
'Reserved3' : [ 0x10, ['_LARGE_INTEGER']],
'Alignment' : [ 0x18, ['pointer', ['void']]],
'SlistEntry' : [ 0x1c, ['_SINGLE_LIST_ENTRY']],
'Entry' : [ 0x18, ['_LIST_ENTRY']],
'ReferenceCount' : [ 0x0, ['long']],
'SavedOffset' : [ 0x4, ['unsigned long']],
'CurrentOffset' : [ 0x8, ['unsigned long']],
'UsePerfClock' : [ 0xc, ['unsigned long']],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'Guid' : [ 0x18, ['_GUID']],
'ClientContext' : [ 0x28, ['_WMI_CLIENT_CONTEXT']],
'State' : [ 0x2c, ['_WMI_BUFFER_STATE']],
'Flags' : [ 0x2c, ['unsigned long']],
'Offset' : [ 0x30, ['unsigned long']],
'EventsLost' : [ 0x34, ['unsigned long']],
'InstanceGuid' : [ 0x38, ['_GUID']],
'LoggerContext' : [ 0x38, ['pointer', ['void']]],
'GlobalEntry' : [ 0x3c, ['_SINGLE_LIST_ENTRY']],
} ],
'_KSEMAPHORE' : [ 0x14, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'Limit' : [ 0x10, ['long']],
} ],
'_PROCESSOR_POWER_STATE' : [ 0x120, {
'IdleFunction' : [ 0x0, ['pointer', ['void']]],
'Idle0KernelTimeLimit' : [ 0x4, ['unsigned long']],
'Idle0LastTime' : [ 0x8, ['unsigned long']],
'IdleHandlers' : [ 0xc, ['pointer', ['void']]],
'IdleState' : [ 0x10, ['pointer', ['void']]],
'IdleHandlersCount' : [ 0x14, ['unsigned long']],
'LastCheck' : [ 0x18, ['unsigned long long']],
'IdleTimes' : [ 0x20, ['PROCESSOR_IDLE_TIMES']],
'IdleTime1' : [ 0x40, ['unsigned long']],
'PromotionCheck' : [ 0x44, ['unsigned long']],
'IdleTime2' : [ 0x48, ['unsigned long']],
'CurrentThrottle' : [ 0x4c, ['unsigned char']],
'ThermalThrottleLimit' : [ 0x4d, ['unsigned char']],
'CurrentThrottleIndex' : [ 0x4e, ['unsigned char']],
'ThermalThrottleIndex' : [ 0x4f, ['unsigned char']],
'LastKernelUserTime' : [ 0x50, ['unsigned long']],
'LastIdleThreadKernelTime' : [ 0x54, ['unsigned long']],
'PackageIdleStartTime' : [ 0x58, ['unsigned long']],
'PackageIdleTime' : [ 0x5c, ['unsigned long']],
'DebugCount' : [ 0x60, ['unsigned long']],
'LastSysTime' : [ 0x64, ['unsigned long']],
'TotalIdleStateTime' : [ 0x68, ['array', 3, ['unsigned long long']]],
'TotalIdleTransitions' : [ 0x80, ['array', 3, ['unsigned long']]],
'PreviousC3StateTime' : [ 0x90, ['unsigned long long']],
'KneeThrottleIndex' : [ 0x98, ['unsigned char']],
'ThrottleLimitIndex' : [ 0x99, ['unsigned char']],
'PerfStatesCount' : [ 0x9a, ['unsigned char']],
'ProcessorMinThrottle' : [ 0x9b, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0x9c, ['unsigned char']],
'EnableIdleAccounting' : [ 0x9d, ['unsigned char']],
'LastC3Percentage' : [ 0x9e, ['unsigned char']],
'LastAdjustedBusyPercentage' : [ 0x9f, ['unsigned char']],
'PromotionCount' : [ 0xa0, ['unsigned long']],
'DemotionCount' : [ 0xa4, ['unsigned long']],
'ErrorCount' : [ 0xa8, ['unsigned long']],
'RetryCount' : [ 0xac, ['unsigned long']],
'Flags' : [ 0xb0, ['unsigned long']],
'PerfCounterFrequency' : [ 0xb8, ['_LARGE_INTEGER']],
'PerfTickCount' : [ 0xc0, ['unsigned long']],
'PerfTimer' : [ 0xc8, ['_KTIMER']],
'PerfDpc' : [ 0xf0, ['_KDPC']],
'PerfStates' : [ 0x110, ['pointer', ['PROCESSOR_PERF_STATE']]],
'PerfSetThrottle' : [ 0x114, ['pointer', ['void']]],
'LastC3KernelUserTime' : [ 0x118, ['unsigned long']],
'LastPackageIdleTime' : [ 0x11c, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_MSR' : [ 0xc, {
'Msr' : [ 0x0, ['unsigned long']],
'DataValueLow' : [ 0x4, ['unsigned long']],
'DataValueHigh' : [ 0x8, ['unsigned long']],
} ],
'_MMPFNENTRY' : [ 0x4, {
'Modified' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadInProgress' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'WriteInProgress' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'PrototypePte' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'PageColor' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 7, native_type='unsigned long')]],
'ParityError' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'PageLocation' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 11, native_type='unsigned long')]],
'RemovalRequested' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'CacheAttribute' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 14, native_type='unsigned long')]],
'Rom' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'LockCharged' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'DontUse' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
} ],
'_IO_COUNTERS' : [ 0x30, {
'ReadOperationCount' : [ 0x0, ['unsigned long long']],
'WriteOperationCount' : [ 0x8, ['unsigned long long']],
'OtherOperationCount' : [ 0x10, ['unsigned long long']],
'ReadTransferCount' : [ 0x18, ['unsigned long long']],
'WriteTransferCount' : [ 0x20, ['unsigned long long']],
'OtherTransferCount' : [ 0x28, ['unsigned long long']],
} ],
'_KTSS' : [ 0x20ac, {
'Backlink' : [ 0x0, ['unsigned short']],
'Reserved0' : [ 0x2, ['unsigned short']],
'Esp0' : [ 0x4, ['unsigned long']],
'Ss0' : [ 0x8, ['unsigned short']],
'Reserved1' : [ 0xa, ['unsigned short']],
'NotUsed1' : [ 0xc, ['array', 4, ['unsigned long']]],
'CR3' : [ 0x1c, ['unsigned long']],
'Eip' : [ 0x20, ['unsigned long']],
'EFlags' : [ 0x24, ['unsigned long']],
'Eax' : [ 0x28, ['unsigned long']],
'Ecx' : [ 0x2c, ['unsigned long']],
'Edx' : [ 0x30, ['unsigned long']],
'Ebx' : [ 0x34, ['unsigned long']],
'Esp' : [ 0x38, ['unsigned long']],
'Ebp' : [ 0x3c, ['unsigned long']],
'Esi' : [ 0x40, ['unsigned long']],
'Edi' : [ 0x44, ['unsigned long']],
'Es' : [ 0x48, ['unsigned short']],
'Reserved2' : [ 0x4a, ['unsigned short']],
'Cs' : [ 0x4c, ['unsigned short']],
'Reserved3' : [ 0x4e, ['unsigned short']],
'Ss' : [ 0x50, ['unsigned short']],
'Reserved4' : [ 0x52, ['unsigned short']],
'Ds' : [ 0x54, ['unsigned short']],
'Reserved5' : [ 0x56, ['unsigned short']],
'Fs' : [ 0x58, ['unsigned short']],
'Reserved6' : [ 0x5a, ['unsigned short']],
'Gs' : [ 0x5c, ['unsigned short']],
'Reserved7' : [ 0x5e, ['unsigned short']],
'LDT' : [ 0x60, ['unsigned short']],
'Reserved8' : [ 0x62, ['unsigned short']],
'Flags' : [ 0x64, ['unsigned short']],
'IoMapBase' : [ 0x66, ['unsigned short']],
'IoMaps' : [ 0x68, ['array', 1, ['_KiIoAccessMap']]],
'IntDirectionMap' : [ 0x208c, ['array', 32, ['unsigned char']]],
} ],
'_DBGKD_QUERY_MEMORY' : [ 0x18, {
'Address' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['unsigned long long']],
'AddressSpace' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'_KIDTENTRY' : [ 0x8, {
'Offset' : [ 0x0, ['unsigned short']],
'Selector' : [ 0x2, ['unsigned short']],
'Access' : [ 0x4, ['unsigned short']],
'ExtendedOffset' : [ 0x6, ['unsigned short']],
} ],
'_DEVICE_OBJECT_POWER_EXTENSION' : [ 0x4c, {
'IdleCount' : [ 0x0, ['unsigned long']],
'ConservationIdleTime' : [ 0x4, ['unsigned long']],
'PerformanceIdleTime' : [ 0x8, ['unsigned long']],
'DeviceObject' : [ 0xc, ['pointer', ['_DEVICE_OBJECT']]],
'IdleList' : [ 0x10, ['_LIST_ENTRY']],
'DeviceType' : [ 0x18, ['unsigned char']],
'State' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'NotifySourceList' : [ 0x20, ['_LIST_ENTRY']],
'NotifyTargetList' : [ 0x28, ['_LIST_ENTRY']],
'PowerChannelSummary' : [ 0x30, ['_POWER_CHANNEL_SUMMARY']],
'Volume' : [ 0x44, ['_LIST_ENTRY']],
} ],
'_MMSUPPORT_FLAGS' : [ 0x4, {
'SessionSpace' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'BeingTrimmed' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SessionLeader' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'TrimHard' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'WorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'AddressSpaceBeingDeleted' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Available' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 16, native_type='unsigned long')]],
'AllowWorkingSetAdjustment' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long')]],
'MemoryPriority' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_TERMINATION_PORT' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['_TERMINATION_PORT']]],
'Port' : [ 0x4, ['pointer', ['void']]],
} ],
'_SYSTEM_POWER_POLICY' : [ 0xe8, {
'Revision' : [ 0x0, ['unsigned long']],
'PowerButton' : [ 0x4, ['POWER_ACTION_POLICY']],
'SleepButton' : [ 0x10, ['POWER_ACTION_POLICY']],
'LidClose' : [ 0x1c, ['POWER_ACTION_POLICY']],
'LidOpenWake' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Reserved' : [ 0x2c, ['unsigned long']],
'Idle' : [ 0x30, ['POWER_ACTION_POLICY']],
'IdleTimeout' : [ 0x3c, ['unsigned long']],
'IdleSensitivity' : [ 0x40, ['unsigned char']],
'DynamicThrottle' : [ 0x41, ['unsigned char']],
'Spare2' : [ 0x42, ['array', 2, ['unsigned char']]],
'MinSleep' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MaxSleep' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ReducedLatencySleep' : [ 0x4c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WinLogonFlags' : [ 0x50, ['unsigned long']],
'Spare3' : [ 0x54, ['unsigned long']],
'DozeS4Timeout' : [ 0x58, ['unsigned long']],
'BroadcastCapacityResolution' : [ 0x5c, ['unsigned long']],
'DischargePolicy' : [ 0x60, ['array', 4, ['SYSTEM_POWER_LEVEL']]],
'VideoTimeout' : [ 0xc0, ['unsigned long']],
'VideoDimDisplay' : [ 0xc4, ['unsigned char']],
'VideoReserved' : [ 0xc8, ['array', 3, ['unsigned long']]],
'SpindownTimeout' : [ 0xd4, ['unsigned long']],
'OptimizeForPower' : [ 0xd8, ['unsigned char']],
'FanThrottleTolerance' : [ 0xd9, ['unsigned char']],
'ForcedThrottle' : [ 0xda, ['unsigned char']],
'MinThrottle' : [ 0xdb, ['unsigned char']],
'OverThrottled' : [ 0xdc, ['POWER_ACTION_POLICY']],
} ],
'_POP_THERMAL_ZONE' : [ 0xd0, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'State' : [ 0x8, ['unsigned char']],
'Flags' : [ 0x9, ['unsigned char']],
'Mode' : [ 0xa, ['unsigned char']],
'PendingMode' : [ 0xb, ['unsigned char']],
'ActivePoint' : [ 0xc, ['unsigned char']],
'PendingActivePoint' : [ 0xd, ['unsigned char']],
'Throttle' : [ 0x10, ['long']],
'LastTime' : [ 0x18, ['unsigned long long']],
'SampleRate' : [ 0x20, ['unsigned long']],
'LastTemp' : [ 0x24, ['unsigned long']],
'PassiveTimer' : [ 0x28, ['_KTIMER']],
'PassiveDpc' : [ 0x50, ['_KDPC']],
'OverThrottled' : [ 0x70, ['_POP_ACTION_TRIGGER']],
'Irp' : [ 0x7c, ['pointer', ['_IRP']]],
'Info' : [ 0x80, ['_THERMAL_INFORMATION']],
} ],
'_DBGKD_CONTINUE2' : [ 0x20, {
'ContinueStatus' : [ 0x0, ['long']],
'ControlSet' : [ 0x4, ['_X86_DBGKD_CONTROL_SET']],
'AnyControlSet' : [ 0x4, ['_DBGKD_ANY_CONTROL_SET']],
} ],
'_PROCESSOR_POWER_POLICY' : [ 0x4c, {
'Revision' : [ 0x0, ['unsigned long']],
'DynamicThrottle' : [ 0x4, ['unsigned char']],
'Spare' : [ 0x5, ['array', 3, ['unsigned char']]],
'DisableCStates' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'PolicyCount' : [ 0xc, ['unsigned long']],
'Policy' : [ 0x10, ['array', 3, ['_PROCESSOR_POWER_POLICY_INFO']]],
} ],
'_IMAGE_DOS_HEADER' : [ 0x40, {
'e_magic' : [ 0x0, ['unsigned short']],
'e_cblp' : [ 0x2, ['unsigned short']],
'e_cp' : [ 0x4, ['unsigned short']],
'e_crlc' : [ 0x6, ['unsigned short']],
'e_cparhdr' : [ 0x8, ['unsigned short']],
'e_minalloc' : [ 0xa, ['unsigned short']],
'e_maxalloc' : [ 0xc, ['unsigned short']],
'e_ss' : [ 0xe, ['unsigned short']],
'e_sp' : [ 0x10, ['unsigned short']],
'e_csum' : [ 0x12, ['unsigned short']],
'e_ip' : [ 0x14, ['unsigned short']],
'e_cs' : [ 0x16, ['unsigned short']],
'e_lfarlc' : [ 0x18, ['unsigned short']],
'e_ovno' : [ 0x1a, ['unsigned short']],
'e_res' : [ 0x1c, ['array', 4, ['unsigned short']]],
'e_oemid' : [ 0x24, ['unsigned short']],
'e_oeminfo' : [ 0x26, ['unsigned short']],
'e_res2' : [ 0x28, ['array', 10, ['unsigned short']]],
'e_lfanew' : [ 0x3c, ['long']],
} ],
'_OWNER_ENTRY' : [ 0x8, {
'OwnerThread' : [ 0x0, ['unsigned long']],
'OwnerCount' : [ 0x4, ['long']],
'TableSize' : [ 0x4, ['unsigned long']],
} ],
'_HEAP_VIRTUAL_ALLOC_ENTRY' : [ 0x20, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtraStuff' : [ 0x8, ['_HEAP_ENTRY_EXTRA']],
'CommitSize' : [ 0x10, ['unsigned long']],
'ReserveSize' : [ 0x14, ['unsigned long']],
'BusyBlock' : [ 0x18, ['_HEAP_ENTRY']],
} ],
'_RTL_ATOM_TABLE' : [ 0x44, {
'Signature' : [ 0x0, ['unsigned long']],
'CriticalSection' : [ 0x4, ['_RTL_CRITICAL_SECTION']],
'RtlHandleTable' : [ 0x1c, ['_RTL_HANDLE_TABLE']],
'NumberOfBuckets' : [ 0x3c, ['unsigned long']],
'Buckets' : [ 0x40, ['array', 1, ['pointer', ['_RTL_ATOM_TABLE_ENTRY']]]],
} ],
'_FNSAVE_FORMAT' : [ 0x6c, {
'ControlWord' : [ 0x0, ['unsigned long']],
'StatusWord' : [ 0x4, ['unsigned long']],
'TagWord' : [ 0x8, ['unsigned long']],
'ErrorOffset' : [ 0xc, ['unsigned long']],
'ErrorSelector' : [ 0x10, ['unsigned long']],
'DataOffset' : [ 0x14, ['unsigned long']],
'DataSelector' : [ 0x18, ['unsigned long']],
'RegisterArea' : [ 0x1c, ['array', 80, ['unsigned char']]],
} ],
'EX_QUEUE_WORKER_INFO' : [ 0x4, {
'QueueDisabled' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MakeThreadsAsNecessary' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'WaitMode' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WorkerCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'QueueWorkerInfo' : [ 0x0, ['long']],
} ],
'SYSTEM_POWER_LEVEL' : [ 0x18, {
'Enable' : [ 0x0, ['unsigned char']],
'Spare' : [ 0x1, ['array', 3, ['unsigned char']]],
'BatteryLevel' : [ 0x4, ['unsigned long']],
'PowerPolicy' : [ 0x8, ['POWER_ACTION_POLICY']],
'MinSystemState' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'POWER_ACTION_POLICY' : [ 0xc, {
'Action' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'Flags' : [ 0x4, ['unsigned long']],
'EventCode' : [ 0x8, ['unsigned long']],
} ],
'PROCESSOR_PERF_STATE' : [ 0x20, {
'PercentFrequency' : [ 0x0, ['unsigned char']],
'MinCapacity' : [ 0x1, ['unsigned char']],
'Power' : [ 0x2, ['unsigned short']],
'IncreaseLevel' : [ 0x4, ['unsigned char']],
'DecreaseLevel' : [ 0x5, ['unsigned char']],
'Flags' : [ 0x6, ['unsigned short']],
'IncreaseTime' : [ 0x8, ['unsigned long']],
'DecreaseTime' : [ 0xc, ['unsigned long']],
'IncreaseCount' : [ 0x10, ['unsigned long']],
'DecreaseCount' : [ 0x14, ['unsigned long']],
'PerformanceTime' : [ 0x18, ['unsigned long long']],
} ],
'PROCESSOR_IDLE_TIMES' : [ 0x20, {
'StartTime' : [ 0x0, ['unsigned long long']],
'EndTime' : [ 0x8, ['unsigned long long']],
'IdleHandlerReserved' : [ 0x10, ['array', 4, ['unsigned long']]],
} ],
'_IMAGE_ROM_OPTIONAL_HEADER' : [ 0x38, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'BaseOfBss' : [ 0x1c, ['unsigned long']],
'GprMask' : [ 0x20, ['unsigned long']],
'CprMask' : [ 0x24, ['array', 4, ['unsigned long']]],
'GpValue' : [ 0x34, ['unsigned long']],
} ],
'_MMPTE_LIST' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OneEntry' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'filler0' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'filler1' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_CMHIVE' : [ 0x49c, {
'Hive' : [ 0x0, ['_HHIVE']],
'FileHandles' : [ 0x210, ['array', 3, ['pointer', ['void']]]],
'NotifyList' : [ 0x21c, ['_LIST_ENTRY']],
'HiveList' : [ 0x224, ['_LIST_ENTRY']],
'HiveLock' : [ 0x22c, ['pointer', ['_FAST_MUTEX']]],
'ViewLock' : [ 0x230, ['pointer', ['_FAST_MUTEX']]],
'LRUViewListHead' : [ 0x234, ['_LIST_ENTRY']],
'PinViewListHead' : [ 0x23c, ['_LIST_ENTRY']],
'FileObject' : [ 0x244, ['pointer', ['_FILE_OBJECT']]],
'FileFullPath' : [ 0x248, ['_UNICODE_STRING']],
'FileUserName' : [ 0x250, ['_UNICODE_STRING']],
'MappedViews' : [ 0x258, ['unsigned short']],
'PinnedViews' : [ 0x25a, ['unsigned short']],
'UseCount' : [ 0x25c, ['unsigned long']],
'SecurityCount' : [ 0x260, ['unsigned long']],
'SecurityCacheSize' : [ 0x264, ['unsigned long']],
'SecurityHitHint' : [ 0x268, ['long']],
'SecurityCache' : [ 0x26c, ['pointer', ['_CM_KEY_SECURITY_CACHE_ENTRY']]],
'SecurityHash' : [ 0x270, ['array', 64, ['_LIST_ENTRY']]],
'UnloadEvent' : [ 0x470, ['pointer', ['_KEVENT']]],
'RootKcb' : [ 0x474, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'Frozen' : [ 0x478, ['unsigned char']],
'UnloadWorkItem' : [ 0x47c, ['pointer', ['_WORK_QUEUE_ITEM']]],
'GrowOnlyMode' : [ 0x480, ['unsigned char']],
'GrowOffset' : [ 0x484, ['unsigned long']],
'KcbConvertListHead' : [ 0x488, ['_LIST_ENTRY']],
'KnodeConvertListHead' : [ 0x490, ['_LIST_ENTRY']],
'CellRemapArray' : [ 0x498, ['pointer', ['_CM_CELL_REMAP_BLOCK']]],
} ],
'_HANDLE_TRACE_DEBUG_INFO' : [ 0x50004, {
'CurrentStackIndex' : [ 0x0, ['unsigned long']],
'TraceDb' : [ 0x4, ['array', 4096, ['_HANDLE_TRACE_DB_ENTRY']]],
} ],
'_HHIVE' : [ 0x210, {
'Signature' : [ 0x0, ['unsigned long']],
'GetCellRoutine' : [ 0x4, ['pointer', ['void']]],
'ReleaseCellRoutine' : [ 0x8, ['pointer', ['void']]],
'Allocate' : [ 0xc, ['pointer', ['void']]],
'Free' : [ 0x10, ['pointer', ['void']]],
'FileSetSize' : [ 0x14, ['pointer', ['void']]],
'FileWrite' : [ 0x18, ['pointer', ['void']]],
'FileRead' : [ 0x1c, ['pointer', ['void']]],
'FileFlush' : [ 0x20, ['pointer', ['void']]],
'BaseBlock' : [ 0x24, ['pointer', ['_HBASE_BLOCK']]],
'DirtyVector' : [ 0x28, ['_RTL_BITMAP']],
'DirtyCount' : [ 0x30, ['unsigned long']],
'DirtyAlloc' : [ 0x34, ['unsigned long']],
'RealWrites' : [ 0x38, ['unsigned char']],
'Cluster' : [ 0x3c, ['unsigned long']],
'Flat' : [ 0x40, ['unsigned char']],
'ReadOnly' : [ 0x41, ['unsigned char']],
'Log' : [ 0x42, ['unsigned char']],
'HiveFlags' : [ 0x44, ['unsigned long']],
'LogSize' : [ 0x48, ['unsigned long']],
'RefreshCount' : [ 0x4c, ['unsigned long']],
'StorageTypeCount' : [ 0x50, ['unsigned long']],
'Version' : [ 0x54, ['unsigned long']],
'Storage' : [ 0x58, ['array', 2, ['_DUAL']]],
} ],
'_PAGEFAULT_HISTORY' : [ 0x18, {
'CurrentIndex' : [ 0x0, ['unsigned long']],
'MaxIndex' : [ 0x4, ['unsigned long']],
'SpinLock' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['pointer', ['void']]],
'WatchInfo' : [ 0x10, ['array', 1, ['_PROCESS_WS_WATCH_INFORMATION']]],
} ],
'_RTL_ATOM_TABLE_ENTRY' : [ 0x10, {
'HashLink' : [ 0x0, ['pointer', ['_RTL_ATOM_TABLE_ENTRY']]],
'HandleIndex' : [ 0x4, ['unsigned short']],
'Atom' : [ 0x6, ['unsigned short']],
'ReferenceCount' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'NameLength' : [ 0xb, ['unsigned char']],
'Name' : [ 0xc, ['array', 1, ['unsigned short']]],
} ],
'_MM_SESSION_SPACE_FLAGS' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Filler0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 4, native_type='unsigned long')]],
'HasWsLock' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DeletePending' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Filler' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
} ],
'_CM_PARTIAL_RESOURCE_LIST' : [ 0x18, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'PartialDescriptors' : [ 0x8, ['array', 1, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_DRIVER_OBJECT' : [ 0xa8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x8, ['unsigned long']],
'DriverStart' : [ 0xc, ['pointer', ['void']]],
'DriverSize' : [ 0x10, ['unsigned long']],
'DriverSection' : [ 0x14, ['pointer', ['void']]],
'DriverExtension' : [ 0x18, ['pointer', ['_DRIVER_EXTENSION']]],
'DriverName' : [ 0x1c, ['_UNICODE_STRING']],
'HardwareDatabase' : [ 0x24, ['pointer', ['_UNICODE_STRING']]],
'FastIoDispatch' : [ 0x28, ['pointer', ['_FAST_IO_DISPATCH']]],
'DriverInit' : [ 0x2c, ['pointer', ['void']]],
'DriverStartIo' : [ 0x30, ['pointer', ['void']]],
'DriverUnload' : [ 0x34, ['pointer', ['void']]],
'MajorFunction' : [ 0x38, ['array', 28, ['pointer', ['void']]]],
} ],
'_WMI_BUFFER_STATE' : [ 0x4, {
'Free' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'InUse' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Flush' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
} ],
'_MMFREE_POOL_ENTRY' : [ 0x14, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'Size' : [ 0x8, ['unsigned long']],
'Signature' : [ 0xc, ['unsigned long']],
'Owner' : [ 0x10, ['pointer', ['_MMFREE_POOL_ENTRY']]],
} ],
'__unnamed_143b' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Wcb' : [ 0x0, ['_WAIT_CONTEXT_BLOCK']],
} ],
'_DEVICE_OBJECT' : [ 0xb8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'ReferenceCount' : [ 0x4, ['long']],
'DriverObject' : [ 0x8, ['pointer', ['_DRIVER_OBJECT']]],
'NextDevice' : [ 0xc, ['pointer', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x10, ['pointer', ['_DEVICE_OBJECT']]],
'CurrentIrp' : [ 0x14, ['pointer', ['_IRP']]],
'Timer' : [ 0x18, ['pointer', ['_IO_TIMER']]],
'Flags' : [ 0x1c, ['unsigned long']],
'Characteristics' : [ 0x20, ['unsigned long']],
'Vpb' : [ 0x24, ['pointer', ['_VPB']]],
'DeviceExtension' : [ 0x28, ['pointer', ['void']]],
'DeviceType' : [ 0x2c, ['unsigned long']],
'StackSize' : [ 0x30, ['unsigned char']],
'Queue' : [ 0x34, ['__unnamed_143b']],
'AlignmentRequirement' : [ 0x5c, ['unsigned long']],
'DeviceQueue' : [ 0x60, ['_KDEVICE_QUEUE']],
'Dpc' : [ 0x74, ['_KDPC']],
'ActiveThreadCount' : [ 0x94, ['unsigned long']],
'SecurityDescriptor' : [ 0x98, ['pointer', ['void']]],
'DeviceLock' : [ 0x9c, ['_KEVENT']],
'SectorSize' : [ 0xac, ['unsigned short']],
'Spare1' : [ 0xae, ['unsigned short']],
'DeviceObjectExtension' : [ 0xb0, ['pointer', ['_DEVOBJ_EXTENSION']]],
'Reserved' : [ 0xb4, ['pointer', ['void']]],
} ],
'_SECTION_OBJECT_POINTERS' : [ 0xc, {
'DataSectionObject' : [ 0x0, ['pointer', ['void']]],
'SharedCacheMap' : [ 0x4, ['pointer', ['void']]],
'ImageSectionObject' : [ 0x8, ['pointer', ['void']]],
} ],
'_RTL_BITMAP' : [ 0x8, {
'SizeOfBitMap' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x4, ['pointer', ['unsigned long']]],
} ],
'_MBCB' : [ 0x80, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeIsInZone' : [ 0x2, ['short']],
'PagesToWrite' : [ 0x4, ['unsigned long']],
'DirtyPages' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'BitmapRanges' : [ 0x10, ['_LIST_ENTRY']],
'ResumeWritePage' : [ 0x18, ['long long']],
'BitmapRange1' : [ 0x20, ['_BITMAP_RANGE']],
'BitmapRange2' : [ 0x40, ['_BITMAP_RANGE']],
'BitmapRange3' : [ 0x60, ['_BITMAP_RANGE']],
} ],
'_POWER_CHANNEL_SUMMARY' : [ 0x14, {
'Signature' : [ 0x0, ['unsigned long']],
'TotalCount' : [ 0x4, ['unsigned long']],
'D0Count' : [ 0x8, ['unsigned long']],
'NotifyList' : [ 0xc, ['_LIST_ENTRY']],
} ],
'_CM_VIEW_OF_FILE' : [ 0x24, {
'LRUViewList' : [ 0x0, ['_LIST_ENTRY']],
'PinViewList' : [ 0x8, ['_LIST_ENTRY']],
'FileOffset' : [ 0x10, ['unsigned long']],
'Size' : [ 0x14, ['unsigned long']],
'ViewAddress' : [ 0x18, ['pointer', ['unsigned long']]],
'Bcb' : [ 0x1c, ['pointer', ['void']]],
'UseCount' : [ 0x20, ['unsigned long']],
} ],
'_KDEVICE_QUEUE' : [ 0x14, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceListHead' : [ 0x4, ['_LIST_ENTRY']],
'Lock' : [ 0xc, ['unsigned long']],
'Busy' : [ 0x10, ['unsigned char']],
} ],
'_KUSER_SHARED_DATA' : [ 0x338, {
'TickCountLow' : [ 0x0, ['unsigned long']],
'TickCountMultiplier' : [ 0x4, ['unsigned long']],
'InterruptTime' : [ 0x8, ['_KSYSTEM_TIME']],
'SystemTime' : [ 0x14, ['_KSYSTEM_TIME']],
'TimeZoneBias' : [ 0x20, ['_KSYSTEM_TIME']],
'ImageNumberLow' : [ 0x2c, ['unsigned short']],
'ImageNumberHigh' : [ 0x2e, ['unsigned short']],
'NtSystemRoot' : [ 0x30, ['array', 260, ['unsigned short']]],
'MaxStackTraceDepth' : [ 0x238, ['unsigned long']],
'CryptoExponent' : [ 0x23c, ['unsigned long']],
'TimeZoneId' : [ 0x240, ['unsigned long']],
'Reserved2' : [ 0x244, ['array', 8, ['unsigned long']]],
'NtProductType' : [ 0x264, ['Enumeration', dict(target = 'long', choices = {1: 'NtProductWinNt', 2: 'NtProductLanManNt', 3: 'NtProductServer'})]],
'ProductTypeIsValid' : [ 0x268, ['unsigned char']],
'NtMajorVersion' : [ 0x26c, ['unsigned long']],
'NtMinorVersion' : [ 0x270, ['unsigned long']],
'ProcessorFeatures' : [ 0x274, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x2b4, ['unsigned long']],
'Reserved3' : [ 0x2b8, ['unsigned long']],
'TimeSlip' : [ 0x2bc, ['unsigned long']],
'AlternativeArchitecture' : [ 0x2c0, ['Enumeration', dict(target = 'long', choices = {0: 'StandardDesign', 1: 'NEC98x86', 2: 'EndAlternatives'})]],
'SystemExpirationDate' : [ 0x2c8, ['_LARGE_INTEGER']],
'SuiteMask' : [ 0x2d0, ['unsigned long']],
'KdDebuggerEnabled' : [ 0x2d4, ['unsigned char']],
'NXSupportPolicy' : [ 0x2d5, ['unsigned char']],
'ActiveConsoleId' : [ 0x2d8, ['unsigned long']],
'DismountCount' : [ 0x2dc, ['unsigned long']],
'ComPlusPackage' : [ 0x2e0, ['unsigned long']],
'LastSystemRITEventTickCount' : [ 0x2e4, ['unsigned long']],
'NumberOfPhysicalPages' : [ 0x2e8, ['unsigned long']],
'SafeBootMode' : [ 0x2ec, ['unsigned char']],
'TraceLogging' : [ 0x2f0, ['unsigned long']],
'TestRetInstruction' : [ 0x2f8, ['unsigned long long']],
'SystemCall' : [ 0x300, ['unsigned long']],
'SystemCallReturn' : [ 0x304, ['unsigned long']],
'SystemCallPad' : [ 0x308, ['array', 3, ['unsigned long long']]],
'TickCount' : [ 0x320, ['_KSYSTEM_TIME']],
'TickCountQuad' : [ 0x320, ['unsigned long long']],
'Cookie' : [ 0x330, ['unsigned long']],
} ],
'_OBJECT_TYPE_INITIALIZER' : [ 0x4c, {
'Length' : [ 0x0, ['unsigned short']],
'UseDefaultObject' : [ 0x2, ['unsigned char']],
'CaseInsensitive' : [ 0x3, ['unsigned char']],
'InvalidAttributes' : [ 0x4, ['unsigned long']],
'GenericMapping' : [ 0x8, ['_GENERIC_MAPPING']],
'ValidAccessMask' : [ 0x18, ['unsigned long']],
'SecurityRequired' : [ 0x1c, ['unsigned char']],
'MaintainHandleCount' : [ 0x1d, ['unsigned char']],
'MaintainTypeList' : [ 0x1e, ['unsigned char']],
'PoolType' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'DefaultPagedPoolCharge' : [ 0x24, ['unsigned long']],
'DefaultNonPagedPoolCharge' : [ 0x28, ['unsigned long']],
'DumpProcedure' : [ 0x2c, ['pointer', ['void']]],
'OpenProcedure' : [ 0x30, ['pointer', ['void']]],
'CloseProcedure' : [ 0x34, ['pointer', ['void']]],
'DeleteProcedure' : [ 0x38, ['pointer', ['void']]],
'ParseProcedure' : [ 0x3c, ['pointer', ['void']]],
'SecurityProcedure' : [ 0x40, ['pointer', ['void']]],
'QueryNameProcedure' : [ 0x44, ['pointer', ['void']]],
'OkayToCloseProcedure' : [ 0x48, ['pointer', ['void']]],
} ],
'__unnamed_1481' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'SubsectionFlags' : [ 0x0, ['_MMSUBSECTION_FLAGS']],
} ],
'_SUBSECTION' : [ 0x20, {
'ControlArea' : [ 0x0, ['pointer', ['_CONTROL_AREA']]],
'u' : [ 0x4, ['__unnamed_1481']],
'StartingSector' : [ 0x8, ['unsigned long']],
'NumberOfFullSectors' : [ 0xc, ['unsigned long']],
'SubsectionBase' : [ 0x10, ['pointer', ['_MMPTE']]],
'UnusedPtes' : [ 0x14, ['unsigned long']],
'PtesInSubsection' : [ 0x18, ['unsigned long']],
'NextSubsection' : [ 0x1c, ['pointer', ['_SUBSECTION']]],
} ],
'_WMI_LOGGER_MODE' : [ 0x4, {
'SequentialFile' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CircularFile' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'AppendFile' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned long')]],
'RealTime' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'DelayOpenFile' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'BufferOnly' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'PrivateLogger' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'AddHeader' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'UseExisting' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'UseGlobalSequence' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'UseLocalSequence' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'Unused2' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
} ],
'_NT_TIB' : [ 0x1c, {
'ExceptionList' : [ 0x0, ['pointer', ['_EXCEPTION_REGISTRATION_RECORD']]],
'StackBase' : [ 0x4, ['pointer', ['void']]],
'StackLimit' : [ 0x8, ['pointer', ['void']]],
'SubSystemTib' : [ 0xc, ['pointer', ['void']]],
'FiberData' : [ 0x10, ['pointer', ['void']]],
'Version' : [ 0x10, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x14, ['pointer', ['void']]],
'Self' : [ 0x18, ['pointer', ['_NT_TIB']]],
} ],
'__unnamed_1492' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'VadFlags' : [ 0x0, ['_MMVAD_FLAGS']],
} ],
'__unnamed_1495' : [ 0x4, {
'LongFlags2' : [ 0x0, ['unsigned long']],
'VadFlags2' : [ 0x0, ['_MMVAD_FLAGS2']],
} ],
'__unnamed_1498' : [ 0x8, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'Secured' : [ 0x0, ['_MMADDRESS_LIST']],
} ],
'__unnamed_149e' : [ 0x4, {
'Banked' : [ 0x0, ['pointer', ['_MMBANKED_SECTION']]],
'ExtendedInfo' : [ 0x0, ['pointer', ['_MMEXTEND_INFO']]],
} ],
'_MMVAD_LONG' : [ 0x34, {
'StartingVpn' : [ 0x0, ['unsigned long']],
'EndingVpn' : [ 0x4, ['unsigned long']],
'Parent' : [ 0x8, ['pointer', ['_MMVAD']]],
'LeftChild' : [ 0xc, ['pointer', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer', ['_MMVAD']]],
'u' : [ 0x14, ['__unnamed_1492']],
'ControlArea' : [ 0x18, ['pointer', ['_CONTROL_AREA']]],
'FirstPrototypePte' : [ 0x1c, ['pointer', ['_MMPTE']]],
'LastContiguousPte' : [ 0x20, ['pointer', ['_MMPTE']]],
'u2' : [ 0x24, ['__unnamed_1495']],
'u3' : [ 0x28, ['__unnamed_1498']],
'u4' : [ 0x30, ['__unnamed_149e']],
} ],
'_MMVAD_FLAGS' : [ 0x4, {
'CommitCharge' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 19, native_type='unsigned long')]],
'PhysicalMapping' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'ImageMap' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'UserPhysicalPages' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'WriteWatch' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 29, native_type='unsigned long')]],
'LargePages' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'MemCommit' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'PrivateMemory' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_POOL_DESCRIPTOR' : [ 0x1028, {
'PoolType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'PoolIndex' : [ 0x4, ['unsigned long']],
'RunningAllocs' : [ 0x8, ['unsigned long']],
'RunningDeAllocs' : [ 0xc, ['unsigned long']],
'TotalPages' : [ 0x10, ['unsigned long']],
'TotalBigPages' : [ 0x14, ['unsigned long']],
'Threshold' : [ 0x18, ['unsigned long']],
'LockAddress' : [ 0x1c, ['pointer', ['void']]],
'PendingFrees' : [ 0x20, ['pointer', ['void']]],
'PendingFreeDepth' : [ 0x24, ['long']],
'ListHeads' : [ 0x28, ['array', 512, ['_LIST_ENTRY']]],
} ],
'_HARDWARE_PTE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'reserved' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_PEB_LDR_DATA' : [ 0x28, {
'Length' : [ 0x0, ['unsigned long']],
'Initialized' : [ 0x4, ['unsigned char']],
'SsHandle' : [ 0x8, ['pointer', ['void']]],
'InLoadOrderModuleList' : [ 0xc, ['_LIST_ENTRY']],
'InMemoryOrderModuleList' : [ 0x14, ['_LIST_ENTRY']],
'InInitializationOrderModuleList' : [ 0x1c, ['_LIST_ENTRY']],
'EntryInProgress' : [ 0x24, ['pointer', ['void']]],
} ],
'_DBGKD_GET_VERSION32' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned short']],
'Flags' : [ 0x6, ['unsigned short']],
'KernBase' : [ 0x8, ['unsigned long']],
'PsLoadedModuleList' : [ 0xc, ['unsigned long']],
'MachineType' : [ 0x10, ['unsigned short']],
'ThCallbackStack' : [ 0x12, ['unsigned short']],
'NextCallback' : [ 0x14, ['unsigned short']],
'FramePointer' : [ 0x16, ['unsigned short']],
'KiCallUserMode' : [ 0x18, ['unsigned long']],
'KeUserCallbackDispatcher' : [ 0x1c, ['unsigned long']],
'BreakpointWithStatus' : [ 0x20, ['unsigned long']],
'DebuggerDataList' : [ 0x24, ['unsigned long']],
} ],
'_MM_PAGED_POOL_INFO' : [ 0x24, {
'PagedPoolAllocationMap' : [ 0x0, ['pointer', ['_RTL_BITMAP']]],
'EndOfPagedPoolBitmap' : [ 0x4, ['pointer', ['_RTL_BITMAP']]],
'PagedPoolLargeSessionAllocationMap' : [ 0x8, ['pointer', ['_RTL_BITMAP']]],
'FirstPteForPagedPool' : [ 0xc, ['pointer', ['_MMPTE']]],
'LastPteForPagedPool' : [ 0x10, ['pointer', ['_MMPTE']]],
'NextPdeForPagedPoolExpansion' : [ 0x14, ['pointer', ['_MMPTE']]],
'PagedPoolHint' : [ 0x18, ['unsigned long']],
'PagedPoolCommit' : [ 0x1c, ['unsigned long']],
'AllocatedPagedPool' : [ 0x20, ['unsigned long']],
} ],
'_INTERLOCK_SEQ' : [ 0x8, {
'Depth' : [ 0x0, ['unsigned short']],
'FreeEntryOffset' : [ 0x2, ['unsigned short']],
'OffsetAndDepth' : [ 0x0, ['unsigned long']],
'Sequence' : [ 0x4, ['unsigned long']],
'Exchg' : [ 0x0, ['long long']],
} ],
'_VPB' : [ 0x58, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned short']],
'VolumeLabelLength' : [ 0x6, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer', ['_DEVICE_OBJECT']]],
'RealDevice' : [ 0xc, ['pointer', ['_DEVICE_OBJECT']]],
'SerialNumber' : [ 0x10, ['unsigned long']],
'ReferenceCount' : [ 0x14, ['unsigned long']],
'VolumeLabel' : [ 0x18, ['array', 32, ['unsigned short']]],
} ],
'_MMSESSION' : [ 0x3c, {
'SystemSpaceViewLock' : [ 0x0, ['_FAST_MUTEX']],
'SystemSpaceViewLockPointer' : [ 0x20, ['pointer', ['_FAST_MUTEX']]],
'SystemSpaceViewStart' : [ 0x24, ['pointer', ['unsigned char']]],
'SystemSpaceViewTable' : [ 0x28, ['pointer', ['_MMVIEW']]],
'SystemSpaceHashSize' : [ 0x2c, ['unsigned long']],
'SystemSpaceHashEntries' : [ 0x30, ['unsigned long']],
'SystemSpaceHashKey' : [ 0x34, ['unsigned long']],
'SystemSpaceBitMap' : [ 0x38, ['pointer', ['_RTL_BITMAP']]],
} ],
'_GENERIC_MAPPING' : [ 0x10, {
'GenericRead' : [ 0x0, ['unsigned long']],
'GenericWrite' : [ 0x4, ['unsigned long']],
'GenericExecute' : [ 0x8, ['unsigned long']],
'GenericAll' : [ 0xc, ['unsigned long']],
} ],
'_KiIoAccessMap' : [ 0x2024, {
'DirectionMap' : [ 0x0, ['array', 32, ['unsigned char']]],
'IoMap' : [ 0x20, ['array', 8196, ['unsigned char']]],
} ],
'_DBGKD_RESTORE_BREAKPOINT' : [ 0x4, {
'BreakPointHandle' : [ 0x0, ['unsigned long']],
} ],
'_EXCEPTION_REGISTRATION_RECORD' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['_EXCEPTION_REGISTRATION_RECORD']]],
'Handler' : [ 0x4, ['pointer', ['void']]],
} ],
'_POOL_TRACKER_BIG_PAGES' : [ 0xc, {
'Va' : [ 0x0, ['pointer', ['void']]],
'Key' : [ 0x4, ['unsigned long']],
'NumberOfPages' : [ 0x8, ['unsigned long']],
} ],
'_PROCESS_WS_WATCH_INFORMATION' : [ 0x8, {
'FaultingPc' : [ 0x0, ['pointer', ['void']]],
'FaultingVa' : [ 0x4, ['pointer', ['void']]],
} ],
'_MMPTE_SUBSECTION' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'SubsectionAddressLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'SubsectionAddressHigh' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 31, native_type='unsigned long')]],
'WhichPool' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_VI_DEADLOCK_NODE' : [ 0x68, {
'Parent' : [ 0x0, ['pointer', ['_VI_DEADLOCK_NODE']]],
'ChildrenList' : [ 0x4, ['_LIST_ENTRY']],
'SiblingsList' : [ 0xc, ['_LIST_ENTRY']],
'ResourceList' : [ 0x14, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x14, ['_LIST_ENTRY']],
'Root' : [ 0x1c, ['pointer', ['_VI_DEADLOCK_RESOURCE']]],
'ThreadEntry' : [ 0x20, ['pointer', ['_VI_DEADLOCK_THREAD']]],
'Active' : [ 0x24, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OnlyTryAcquireUsed' : [ 0x24, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SequenceNumber' : [ 0x24, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
'StackTrace' : [ 0x28, ['array', 8, ['pointer', ['void']]]],
'ParentStackTrace' : [ 0x48, ['array', 8, ['pointer', ['void']]]],
} ],
'_CONTEXT' : [ 0x2cc, {
'ContextFlags' : [ 0x0, ['unsigned long']],
'Dr0' : [ 0x4, ['unsigned long']],
'Dr1' : [ 0x8, ['unsigned long']],
'Dr2' : [ 0xc, ['unsigned long']],
'Dr3' : [ 0x10, ['unsigned long']],
'Dr6' : [ 0x14, ['unsigned long']],
'Dr7' : [ 0x18, ['unsigned long']],
'FloatSave' : [ 0x1c, ['_FLOATING_SAVE_AREA']],
'SegGs' : [ 0x8c, ['unsigned long']],
'SegFs' : [ 0x90, ['unsigned long']],
'SegEs' : [ 0x94, ['unsigned long']],
'SegDs' : [ 0x98, ['unsigned long']],
'Edi' : [ 0x9c, ['unsigned long']],
'Esi' : [ 0xa0, ['unsigned long']],
'Ebx' : [ 0xa4, ['unsigned long']],
'Edx' : [ 0xa8, ['unsigned long']],
'Ecx' : [ 0xac, ['unsigned long']],
'Eax' : [ 0xb0, ['unsigned long']],
'Ebp' : [ 0xb4, ['unsigned long']],
'Eip' : [ 0xb8, ['unsigned long']],
'SegCs' : [ 0xbc, ['unsigned long']],
'EFlags' : [ 0xc0, ['unsigned long']],
'Esp' : [ 0xc4, ['unsigned long']],
'SegSs' : [ 0xc8, ['unsigned long']],
'ExtendedRegisters' : [ 0xcc, ['array', 512, ['unsigned char']]],
} ],
'_IMAGE_OPTIONAL_HEADER' : [ 0xe0, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'ImageBase' : [ 0x1c, ['unsigned long']],
'SectionAlignment' : [ 0x20, ['unsigned long']],
'FileAlignment' : [ 0x24, ['unsigned long']],
'MajorOperatingSystemVersion' : [ 0x28, ['unsigned short']],
'MinorOperatingSystemVersion' : [ 0x2a, ['unsigned short']],
'MajorImageVersion' : [ 0x2c, ['unsigned short']],
'MinorImageVersion' : [ 0x2e, ['unsigned short']],
'MajorSubsystemVersion' : [ 0x30, ['unsigned short']],
'MinorSubsystemVersion' : [ 0x32, ['unsigned short']],
'Win32VersionValue' : [ 0x34, ['unsigned long']],
'SizeOfImage' : [ 0x38, ['unsigned long']],
'SizeOfHeaders' : [ 0x3c, ['unsigned long']],
'CheckSum' : [ 0x40, ['unsigned long']],
'Subsystem' : [ 0x44, ['unsigned short']],
'DllCharacteristics' : [ 0x46, ['unsigned short']],
'SizeOfStackReserve' : [ 0x48, ['unsigned long']],
'SizeOfStackCommit' : [ 0x4c, ['unsigned long']],
'SizeOfHeapReserve' : [ 0x50, ['unsigned long']],
'SizeOfHeapCommit' : [ 0x54, ['unsigned long']],
'LoaderFlags' : [ 0x58, ['unsigned long']],
'NumberOfRvaAndSizes' : [ 0x5c, ['unsigned long']],
'DataDirectory' : [ 0x60, ['array', 16, ['_IMAGE_DATA_DIRECTORY']]],
} ],
'_DBGKD_QUERY_SPECIAL_CALLS' : [ 0x4, {
'NumberOfSpecialCalls' : [ 0x0, ['unsigned long']],
} ],
'CMP_OFFSET_ARRAY' : [ 0xc, {
'FileOffset' : [ 0x0, ['unsigned long']],
'DataBuffer' : [ 0x4, ['pointer', ['void']]],
'DataLength' : [ 0x8, ['unsigned long']],
} ],
'_PCI_PDO_EXTENSION' : [ 0xc8, {
'Next' : [ 0x0, ['pointer', ['_PCI_PDO_EXTENSION']]],
'ExtensionType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_AgpTarget'})]],
'IrpDispatchTable' : [ 0x8, ['pointer', ['_PCI_MJ_DISPATCH_TABLE']]],
'DeviceState' : [ 0xc, ['unsigned char']],
'TentativeNextState' : [ 0xd, ['unsigned char']],
'SecondaryExtLock' : [ 0x10, ['_KEVENT']],
'Slot' : [ 0x20, ['_PCI_SLOT_NUMBER']],
'PhysicalDeviceObject' : [ 0x24, ['pointer', ['_DEVICE_OBJECT']]],
'ParentFdoExtension' : [ 0x28, ['pointer', ['_PCI_FDO_EXTENSION']]],
'SecondaryExtension' : [ 0x2c, ['_SINGLE_LIST_ENTRY']],
'BusInterfaceReferenceCount' : [ 0x30, ['unsigned long']],
'AgpInterfaceReferenceCount' : [ 0x34, ['unsigned long']],
'VendorId' : [ 0x38, ['unsigned short']],
'DeviceId' : [ 0x3a, ['unsigned short']],
'SubsystemVendorId' : [ 0x3c, ['unsigned short']],
'SubsystemId' : [ 0x3e, ['unsigned short']],
'RevisionId' : [ 0x40, ['unsigned char']],
'ProgIf' : [ 0x41, ['unsigned char']],
'SubClass' : [ 0x42, ['unsigned char']],
'BaseClass' : [ 0x43, ['unsigned char']],
'AdditionalResourceCount' : [ 0x44, ['unsigned char']],
'AdjustedInterruptLine' : [ 0x45, ['unsigned char']],
'InterruptPin' : [ 0x46, ['unsigned char']],
'RawInterruptLine' : [ 0x47, ['unsigned char']],
'CapabilitiesPtr' : [ 0x48, ['unsigned char']],
'SavedLatencyTimer' : [ 0x49, ['unsigned char']],
'SavedCacheLineSize' : [ 0x4a, ['unsigned char']],
'HeaderType' : [ 0x4b, ['unsigned char']],
'NotPresent' : [ 0x4c, ['unsigned char']],
'ReportedMissing' : [ 0x4d, ['unsigned char']],
'ExpectedWritebackFailure' : [ 0x4e, ['unsigned char']],
'NoTouchPmeEnable' : [ 0x4f, ['unsigned char']],
'LegacyDriver' : [ 0x50, ['unsigned char']],
'UpdateHardware' : [ 0x51, ['unsigned char']],
'MovedDevice' : [ 0x52, ['unsigned char']],
'DisablePowerDown' : [ 0x53, ['unsigned char']],
'NeedsHotPlugConfiguration' : [ 0x54, ['unsigned char']],
'SwitchedIDEToNativeMode' : [ 0x55, ['unsigned char']],
'BIOSAllowsIDESwitchToNativeMode' : [ 0x56, ['unsigned char']],
'IoSpaceUnderNativeIdeControl' : [ 0x57, ['unsigned char']],
'OnDebugPath' : [ 0x58, ['unsigned char']],
'PowerState' : [ 0x5c, ['PCI_POWER_STATE']],
'Dependent' : [ 0x9c, ['PCI_HEADER_TYPE_DEPENDENT']],
'HackFlags' : [ 0xa0, ['unsigned long long']],
'Resources' : [ 0xa8, ['pointer', ['PCI_FUNCTION_RESOURCES']]],
'BridgeFdoExtension' : [ 0xac, ['pointer', ['_PCI_FDO_EXTENSION']]],
'NextBridge' : [ 0xb0, ['pointer', ['_PCI_PDO_EXTENSION']]],
'NextHashEntry' : [ 0xb4, ['pointer', ['_PCI_PDO_EXTENSION']]],
'Lock' : [ 0xb8, ['_PCI_LOCK']],
'PowerCapabilities' : [ 0xc0, ['_PCI_PMC']],
'TargetAgpCapabilityId' : [ 0xc2, ['unsigned char']],
'CommandEnables' : [ 0xc4, ['unsigned short']],
'InitialCommand' : [ 0xc6, ['unsigned short']],
} ],
'_HMAP_DIRECTORY' : [ 0x1000, {
'Directory' : [ 0x0, ['array', 1024, ['pointer', ['_HMAP_TABLE']]]],
} ],
'_OBJECT_HEADER' : [ 0x20, {
'PointerCount' : [ 0x0, ['long']],
'HandleCount' : [ 0x4, ['long']],
'NextToFree' : [ 0x4, ['pointer', ['void']]],
'Type' : [ 0x8, ['pointer', ['_OBJECT_TYPE']]],
'NameInfoOffset' : [ 0xc, ['unsigned char']],
'HandleInfoOffset' : [ 0xd, ['unsigned char']],
'QuotaInfoOffset' : [ 0xe, ['unsigned char']],
'Flags' : [ 0xf, ['unsigned char']],
'ObjectCreateInfo' : [ 0x10, ['pointer', ['_OBJECT_CREATE_INFORMATION']]],
'QuotaBlockCharged' : [ 0x10, ['pointer', ['void']]],
'SecurityDescriptor' : [ 0x14, ['pointer', ['void']]],
'Body' : [ 0x18, ['_QUAD']],
} ],
'_QUAD' : [ 0x8, {
'DoNotUseThisField' : [ 0x0, ['double']],
} ],
'_SECURITY_DESCRIPTOR' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['pointer', ['void']]],
'Group' : [ 0x8, ['pointer', ['void']]],
'Sacl' : [ 0xc, ['pointer', ['_ACL']]],
'Dacl' : [ 0x10, ['pointer', ['_ACL']]],
} ],
'__unnamed_150f' : [ 0x8, {
'UserData' : [ 0x0, ['pointer', ['void']]],
'Owner' : [ 0x4, ['pointer', ['void']]],
} ],
'__unnamed_1511' : [ 0x8, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_RTLP_RANGE_LIST_ENTRY' : [ 0x28, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'Allocated' : [ 0x10, ['__unnamed_150f']],
'Merged' : [ 0x10, ['__unnamed_1511']],
'Attributes' : [ 0x18, ['unsigned char']],
'PublicFlags' : [ 0x19, ['unsigned char']],
'PrivateFlags' : [ 0x1a, ['unsigned short']],
'ListEntry' : [ 0x1c, ['_LIST_ENTRY']],
} ],
'_KAPC_STATE' : [ 0x18, {
'ApcListHead' : [ 0x0, ['array', 2, ['_LIST_ENTRY']]],
'Process' : [ 0x10, ['pointer', ['_KPROCESS']]],
'KernelApcInProgress' : [ 0x14, ['unsigned char']],
'KernelApcPending' : [ 0x15, ['unsigned char']],
'UserApcPending' : [ 0x16, ['unsigned char']],
} ],
'_OBJECT_HEADER_CREATOR_INFO' : [ 0x10, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'CreatorUniqueProcess' : [ 0x8, ['pointer', ['void']]],
'CreatorBackTraceIndex' : [ 0xc, ['unsigned short']],
'Reserved' : [ 0xe, ['unsigned short']],
} ],
'_HEAP_STOP_ON_VALUES' : [ 0x18, {
'AllocAddress' : [ 0x0, ['unsigned long']],
'AllocTag' : [ 0x4, ['_HEAP_STOP_ON_TAG']],
'ReAllocAddress' : [ 0x8, ['unsigned long']],
'ReAllocTag' : [ 0xc, ['_HEAP_STOP_ON_TAG']],
'FreeAddress' : [ 0x10, ['unsigned long']],
'FreeTag' : [ 0x14, ['_HEAP_STOP_ON_TAG']],
} ],
'_DEVICE_RELATIONS' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'Objects' : [ 0x4, ['array', 1, ['pointer', ['_DEVICE_OBJECT']]]],
} ],
'_KPROCESS' : [ 0x6c, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'ProfileListHead' : [ 0x10, ['_LIST_ENTRY']],
'DirectoryTableBase' : [ 0x18, ['array', 2, ['unsigned long']]],
'LdtDescriptor' : [ 0x20, ['_KGDTENTRY']],
'Int21Descriptor' : [ 0x28, ['_KIDTENTRY']],
'IopmOffset' : [ 0x30, ['unsigned short']],
'Iopl' : [ 0x32, ['unsigned char']],
'Unused' : [ 0x33, ['unsigned char']],
'ActiveProcessors' : [ 0x34, ['unsigned long']],
'KernelTime' : [ 0x38, ['unsigned long']],
'UserTime' : [ 0x3c, ['unsigned long']],
'ReadyListHead' : [ 0x40, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x48, ['_SINGLE_LIST_ENTRY']],
'VdmTrapcHandler' : [ 0x4c, ['pointer', ['void']]],
'ThreadListHead' : [ 0x50, ['_LIST_ENTRY']],
'ProcessLock' : [ 0x58, ['unsigned long']],
'Affinity' : [ 0x5c, ['unsigned long']],
'StackCount' : [ 0x60, ['unsigned short']],
'BasePriority' : [ 0x62, ['unsigned char']],
'ThreadQuantum' : [ 0x63, ['unsigned char']],
'AutoAlignment' : [ 0x64, ['unsigned char']],
'State' : [ 0x65, ['unsigned char']],
'ThreadSeed' : [ 0x66, ['unsigned char']],
'DisableBoost' : [ 0x67, ['unsigned char']],
'PowerState' : [ 0x68, ['unsigned char']],
'DisableQuantum' : [ 0x69, ['unsigned char']],
'IdealNode' : [ 0x6a, ['unsigned char']],
'Flags' : [ 0x6b, ['_KEXECUTE_OPTIONS']],
'ExecuteOptions' : [ 0x6b, ['unsigned char']],
} ],
'_HEAP_PSEUDO_TAG_ENTRY' : [ 0xc, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
} ],
'_IO_RESOURCE_LIST' : [ 0x28, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'Descriptors' : [ 0x8, ['array', 1, ['_IO_RESOURCE_DESCRIPTOR']]],
} ],
'_MMBANKED_SECTION' : [ 0x20, {
'BasePhysicalPage' : [ 0x0, ['unsigned long']],
'BasedPte' : [ 0x4, ['pointer', ['_MMPTE']]],
'BankSize' : [ 0x8, ['unsigned long']],
'BankShift' : [ 0xc, ['unsigned long']],
'BankedRoutine' : [ 0x10, ['pointer', ['void']]],
'Context' : [ 0x14, ['pointer', ['void']]],
'CurrentMappedPte' : [ 0x18, ['pointer', ['_MMPTE']]],
'BankTemplate' : [ 0x1c, ['array', 1, ['_MMPTE']]],
} ],
'_RTL_CRITICAL_SECTION' : [ 0x18, {
'DebugInfo' : [ 0x0, ['pointer', ['_RTL_CRITICAL_SECTION_DEBUG']]],
'LockCount' : [ 0x4, ['long']],
'RecursionCount' : [ 0x8, ['long']],
'OwningThread' : [ 0xc, ['pointer', ['void']]],
'LockSemaphore' : [ 0x10, ['pointer', ['void']]],
'SpinCount' : [ 0x14, ['unsigned long']],
} ],
'_KTRAP_FRAME' : [ 0x8c, {
'DbgEbp' : [ 0x0, ['unsigned long']],
'DbgEip' : [ 0x4, ['unsigned long']],
'DbgArgMark' : [ 0x8, ['unsigned long']],
'DbgArgPointer' : [ 0xc, ['unsigned long']],
'TempSegCs' : [ 0x10, ['unsigned long']],
'TempEsp' : [ 0x14, ['unsigned long']],
'Dr0' : [ 0x18, ['unsigned long']],
'Dr1' : [ 0x1c, ['unsigned long']],
'Dr2' : [ 0x20, ['unsigned long']],
'Dr3' : [ 0x24, ['unsigned long']],
'Dr6' : [ 0x28, ['unsigned long']],
'Dr7' : [ 0x2c, ['unsigned long']],
'SegGs' : [ 0x30, ['unsigned long']],
'SegEs' : [ 0x34, ['unsigned long']],
'SegDs' : [ 0x38, ['unsigned long']],
'Edx' : [ 0x3c, ['unsigned long']],
'Ecx' : [ 0x40, ['unsigned long']],
'Eax' : [ 0x44, ['unsigned long']],
'PreviousPreviousMode' : [ 0x48, ['unsigned long']],
'ExceptionList' : [ 0x4c, ['pointer', ['_EXCEPTION_REGISTRATION_RECORD']]],
'SegFs' : [ 0x50, ['unsigned long']],
'Edi' : [ 0x54, ['unsigned long']],
'Esi' : [ 0x58, ['unsigned long']],
'Ebx' : [ 0x5c, ['unsigned long']],
'Ebp' : [ 0x60, ['unsigned long']],
'ErrCode' : [ 0x64, ['unsigned long']],
'Eip' : [ 0x68, ['unsigned long']],
'SegCs' : [ 0x6c, ['unsigned long']],
'EFlags' : [ 0x70, ['unsigned long']],
'HardwareEsp' : [ 0x74, ['unsigned long']],
'HardwareSegSs' : [ 0x78, ['unsigned long']],
'V86Es' : [ 0x7c, ['unsigned long']],
'V86Ds' : [ 0x80, ['unsigned long']],
'V86Fs' : [ 0x84, ['unsigned long']],
'V86Gs' : [ 0x88, ['unsigned long']],
} ],
'__unnamed_153a' : [ 0x4, {
'BaseMid' : [ 0x0, ['unsigned char']],
'Flags1' : [ 0x1, ['unsigned char']],
'Flags2' : [ 0x2, ['unsigned char']],
'BaseHi' : [ 0x3, ['unsigned char']],
} ],
'__unnamed_1541' : [ 0x4, {
'BaseMid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned long')]],
'Dpl' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned long')]],
'Pres' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'LimitHi' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'Sys' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'Reserved_0' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Default_Big' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'Granularity' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'BaseHi' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_1543' : [ 0x4, {
'Bytes' : [ 0x0, ['__unnamed_153a']],
'Bits' : [ 0x0, ['__unnamed_1541']],
} ],
'_KGDTENTRY' : [ 0x8, {
'LimitLow' : [ 0x0, ['unsigned short']],
'BaseLow' : [ 0x2, ['unsigned short']],
'HighWord' : [ 0x4, ['__unnamed_1543']],
} ],
'__unnamed_154d' : [ 0x5, {
'Acquired' : [ 0x0, ['unsigned char']],
'CacheLineSize' : [ 0x1, ['unsigned char']],
'LatencyTimer' : [ 0x2, ['unsigned char']],
'EnablePERR' : [ 0x3, ['unsigned char']],
'EnableSERR' : [ 0x4, ['unsigned char']],
} ],
'_PCI_FDO_EXTENSION' : [ 0xc0, {
'List' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'ExtensionType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_AgpTarget'})]],
'IrpDispatchTable' : [ 0x8, ['pointer', ['_PCI_MJ_DISPATCH_TABLE']]],
'DeviceState' : [ 0xc, ['unsigned char']],
'TentativeNextState' : [ 0xd, ['unsigned char']],
'SecondaryExtLock' : [ 0x10, ['_KEVENT']],
'PhysicalDeviceObject' : [ 0x20, ['pointer', ['_DEVICE_OBJECT']]],
'FunctionalDeviceObject' : [ 0x24, ['pointer', ['_DEVICE_OBJECT']]],
'AttachedDeviceObject' : [ 0x28, ['pointer', ['_DEVICE_OBJECT']]],
'ChildListLock' : [ 0x2c, ['_KEVENT']],
'ChildPdoList' : [ 0x3c, ['pointer', ['_PCI_PDO_EXTENSION']]],
'BusRootFdoExtension' : [ 0x40, ['pointer', ['_PCI_FDO_EXTENSION']]],
'ParentFdoExtension' : [ 0x44, ['pointer', ['_PCI_FDO_EXTENSION']]],
'ChildBridgePdoList' : [ 0x48, ['pointer', ['_PCI_PDO_EXTENSION']]],
'PciBusInterface' : [ 0x4c, ['pointer', ['_PCI_BUS_INTERFACE_STANDARD']]],
'MaxSubordinateBus' : [ 0x50, ['unsigned char']],
'BusHandler' : [ 0x54, ['pointer', ['_BUS_HANDLER']]],
'BaseBus' : [ 0x58, ['unsigned char']],
'Fake' : [ 0x59, ['unsigned char']],
'ChildDelete' : [ 0x5a, ['unsigned char']],
'Scanned' : [ 0x5b, ['unsigned char']],
'ArbitersInitialized' : [ 0x5c, ['unsigned char']],
'BrokenVideoHackApplied' : [ 0x5d, ['unsigned char']],
'Hibernated' : [ 0x5e, ['unsigned char']],
'PowerState' : [ 0x60, ['PCI_POWER_STATE']],
'SecondaryExtension' : [ 0xa0, ['_SINGLE_LIST_ENTRY']],
'ChildWaitWakeCount' : [ 0xa4, ['unsigned long']],
'PreservedConfig' : [ 0xa8, ['pointer', ['_PCI_COMMON_CONFIG']]],
'Lock' : [ 0xac, ['_PCI_LOCK']],
'HotPlugParameters' : [ 0xb4, ['__unnamed_154d']],
'BusHackFlags' : [ 0xbc, ['unsigned long']],
} ],
'__unnamed_1551' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1553' : [ 0xc, {
'Level' : [ 0x0, ['unsigned long']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1555' : [ 0xc, {
'Channel' : [ 0x0, ['unsigned long']],
'Port' : [ 0x4, ['unsigned long']],
'Reserved1' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1557' : [ 0xc, {
'Data' : [ 0x0, ['array', 3, ['unsigned long']]],
} ],
'__unnamed_1559' : [ 0xc, {
'Start' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_155b' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_155d' : [ 0xc, {
'Generic' : [ 0x0, ['__unnamed_1551']],
'Port' : [ 0x0, ['__unnamed_1551']],
'Interrupt' : [ 0x0, ['__unnamed_1553']],
'Memory' : [ 0x0, ['__unnamed_1551']],
'Dma' : [ 0x0, ['__unnamed_1555']],
'DevicePrivate' : [ 0x0, ['__unnamed_1557']],
'BusNumber' : [ 0x0, ['__unnamed_1559']],
'DeviceSpecificData' : [ 0x0, ['__unnamed_155b']],
} ],
'_CM_PARTIAL_RESOURCE_DESCRIPTOR' : [ 0x10, {
'Type' : [ 0x0, ['unsigned char']],
'ShareDisposition' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned short']],
'u' : [ 0x4, ['__unnamed_155d']],
} ],
'_SYSPTES_HEADER' : [ 0xc, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Count' : [ 0x8, ['unsigned long']],
} ],
'_WAIT_CONTEXT_BLOCK' : [ 0x28, {
'WaitQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DeviceRoutine' : [ 0x10, ['pointer', ['void']]],
'DeviceContext' : [ 0x14, ['pointer', ['void']]],
'NumberOfMapRegisters' : [ 0x18, ['unsigned long']],
'DeviceObject' : [ 0x1c, ['pointer', ['void']]],
'CurrentIrp' : [ 0x20, ['pointer', ['void']]],
'BufferChainingDpc' : [ 0x24, ['pointer', ['_KDPC']]],
} ],
'_CM_KEY_CONTROL_BLOCK' : [ 0x50, {
'RefCount' : [ 0x0, ['unsigned long']],
'ExtFlags' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'PrivateAlloc' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Delete' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'DelayedCloseIndex' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 22, native_type='unsigned long')]],
'TotalLevels' : [ 0x4, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long')]],
'KeyHash' : [ 0x8, ['_CM_KEY_HASH']],
'ConvKey' : [ 0x8, ['unsigned long']],
'NextHash' : [ 0xc, ['pointer', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x10, ['pointer', ['_HHIVE']]],
'KeyCell' : [ 0x14, ['unsigned long']],
'ParentKcb' : [ 0x18, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'NameBlock' : [ 0x1c, ['pointer', ['_CM_NAME_CONTROL_BLOCK']]],
'CachedSecurity' : [ 0x20, ['pointer', ['_CM_KEY_SECURITY_CACHE']]],
'ValueCache' : [ 0x24, ['_CACHED_CHILD_LIST']],
'IndexHint' : [ 0x2c, ['pointer', ['_CM_INDEX_HINT_BLOCK']]],
'HashKey' : [ 0x2c, ['unsigned long']],
'SubKeyCount' : [ 0x2c, ['unsigned long']],
'KeyBodyListHead' : [ 0x30, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x30, ['_LIST_ENTRY']],
'KcbLastWriteTime' : [ 0x38, ['_LARGE_INTEGER']],
'KcbMaxNameLen' : [ 0x40, ['unsigned short']],
'KcbMaxValueNameLen' : [ 0x42, ['unsigned short']],
'KcbMaxValueDataLen' : [ 0x44, ['unsigned long']],
'KcbUserFlags' : [ 0x48, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'KcbVirtControlFlags' : [ 0x48, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'KcbDebug' : [ 0x48, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'Flags' : [ 0x48, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
} ],
'_KDPC' : [ 0x20, {
'Type' : [ 0x0, ['short']],
'Number' : [ 0x2, ['unsigned char']],
'Importance' : [ 0x3, ['unsigned char']],
'DpcListEntry' : [ 0x4, ['_LIST_ENTRY']],
'DeferredRoutine' : [ 0xc, ['pointer', ['void']]],
'DeferredContext' : [ 0x10, ['pointer', ['void']]],
'SystemArgument1' : [ 0x14, ['pointer', ['void']]],
'SystemArgument2' : [ 0x18, ['pointer', ['void']]],
'Lock' : [ 0x1c, ['pointer', ['unsigned long']]],
} ],
'_PCI_BUS_INTERFACE_STANDARD' : [ 0x20, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x4, ['pointer', ['void']]],
'InterfaceReference' : [ 0x8, ['pointer', ['void']]],
'InterfaceDereference' : [ 0xc, ['pointer', ['void']]],
'ReadConfig' : [ 0x10, ['pointer', ['void']]],
'WriteConfig' : [ 0x14, ['pointer', ['void']]],
'PinToLine' : [ 0x18, ['pointer', ['void']]],
'LineToPin' : [ 0x1c, ['pointer', ['void']]],
} ],
'_WORK_QUEUE_ITEM' : [ 0x10, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'WorkerRoutine' : [ 0x8, ['pointer', ['void']]],
'Parameter' : [ 0xc, ['pointer', ['void']]],
} ],
'_PI_RESOURCE_ARBITER_ENTRY' : [ 0x38, {
'DeviceArbiterList' : [ 0x0, ['_LIST_ENTRY']],
'ResourceType' : [ 0x8, ['unsigned char']],
'ArbiterInterface' : [ 0xc, ['pointer', ['_ARBITER_INTERFACE']]],
'Level' : [ 0x10, ['unsigned long']],
'ResourceList' : [ 0x14, ['_LIST_ENTRY']],
'BestResourceList' : [ 0x1c, ['_LIST_ENTRY']],
'BestConfig' : [ 0x24, ['_LIST_ENTRY']],
'ActiveArbiterList' : [ 0x2c, ['_LIST_ENTRY']],
'State' : [ 0x34, ['unsigned char']],
'ResourcesChanged' : [ 0x35, ['unsigned char']],
} ],
'_KTIMER' : [ 0x28, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'DueTime' : [ 0x10, ['_ULARGE_INTEGER']],
'TimerListEntry' : [ 0x18, ['_LIST_ENTRY']],
'Dpc' : [ 0x20, ['pointer', ['_KDPC']]],
'Period' : [ 0x24, ['long']],
} ],
'_CM_KEY_HASH' : [ 0x10, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x4, ['pointer', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x8, ['pointer', ['_HHIVE']]],
'KeyCell' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_159b' : [ 0x4, {
'MasterIrp' : [ 0x0, ['pointer', ['_IRP']]],
'IrpCount' : [ 0x0, ['long']],
'SystemBuffer' : [ 0x0, ['pointer', ['void']]],
} ],
'__unnamed_15a2' : [ 0x8, {
'UserApcRoutine' : [ 0x0, ['pointer', ['void']]],
'UserApcContext' : [ 0x4, ['pointer', ['void']]],
} ],
'__unnamed_15a4' : [ 0x8, {
'AsynchronousParameters' : [ 0x0, ['__unnamed_15a2']],
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'__unnamed_15a9' : [ 0x28, {
'DeviceQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DriverContext' : [ 0x0, ['array', 4, ['pointer', ['void']]]],
'Thread' : [ 0x10, ['pointer', ['_ETHREAD']]],
'AuxiliaryBuffer' : [ 0x14, ['pointer', ['unsigned char']]],
'ListEntry' : [ 0x18, ['_LIST_ENTRY']],
'CurrentStackLocation' : [ 0x20, ['pointer', ['_IO_STACK_LOCATION']]],
'PacketType' : [ 0x20, ['unsigned long']],
'OriginalFileObject' : [ 0x24, ['pointer', ['_FILE_OBJECT']]],
} ],
'__unnamed_15ab' : [ 0x30, {
'Overlay' : [ 0x0, ['__unnamed_15a9']],
'Apc' : [ 0x0, ['_KAPC']],
'CompletionKey' : [ 0x0, ['pointer', ['void']]],
} ],
'_IRP' : [ 0x70, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'MdlAddress' : [ 0x4, ['pointer', ['_MDL']]],
'Flags' : [ 0x8, ['unsigned long']],
'AssociatedIrp' : [ 0xc, ['__unnamed_159b']],
'ThreadListEntry' : [ 0x10, ['_LIST_ENTRY']],
'IoStatus' : [ 0x18, ['_IO_STATUS_BLOCK']],
'RequestorMode' : [ 0x20, ['unsigned char']],
'PendingReturned' : [ 0x21, ['unsigned char']],
'StackCount' : [ 0x22, ['unsigned char']],
'CurrentLocation' : [ 0x23, ['unsigned char']],
'Cancel' : [ 0x24, ['unsigned char']],
'CancelIrql' : [ 0x25, ['unsigned char']],
'ApcEnvironment' : [ 0x26, ['unsigned char']],
'AllocationFlags' : [ 0x27, ['unsigned char']],
'UserIosb' : [ 0x28, ['pointer', ['_IO_STATUS_BLOCK']]],
'UserEvent' : [ 0x2c, ['pointer', ['_KEVENT']]],
'Overlay' : [ 0x30, ['__unnamed_15a4']],
'CancelRoutine' : [ 0x38, ['pointer', ['void']]],
'UserBuffer' : [ 0x3c, ['pointer', ['void']]],
'Tail' : [ 0x40, ['__unnamed_15ab']],
} ],
'_PCI_LOCK' : [ 0x8, {
'Atom' : [ 0x0, ['unsigned long']],
'OldIrql' : [ 0x4, ['unsigned char']],
} ],
'_CM_KEY_SECURITY_CACHE_ENTRY' : [ 0x8, {
'Cell' : [ 0x0, ['unsigned long']],
'CachedSecurity' : [ 0x4, ['pointer', ['_CM_KEY_SECURITY_CACHE']]],
} ],
'__unnamed_15b4' : [ 0x4, {
'PhysicalAddress' : [ 0x0, ['unsigned long']],
'VirtualSize' : [ 0x0, ['unsigned long']],
} ],
'_IMAGE_SECTION_HEADER' : [ 0x28, {
'Name' : [ 0x0, ['array', 8, ['unsigned char']]],
'Misc' : [ 0x8, ['__unnamed_15b4']],
'VirtualAddress' : [ 0xc, ['unsigned long']],
'SizeOfRawData' : [ 0x10, ['unsigned long']],
'PointerToRawData' : [ 0x14, ['unsigned long']],
'PointerToRelocations' : [ 0x18, ['unsigned long']],
'PointerToLinenumbers' : [ 0x1c, ['unsigned long']],
'NumberOfRelocations' : [ 0x20, ['unsigned short']],
'NumberOfLinenumbers' : [ 0x22, ['unsigned short']],
'Characteristics' : [ 0x24, ['unsigned long']],
} ],
'__unnamed_15ba' : [ 0x4, {
'Level' : [ 0x0, ['unsigned long']],
} ],
'_POP_ACTION_TRIGGER' : [ 0xc, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyInitiatePowerActionAPI', 4: 'PolicySetPowerStateAPI', 5: 'PolicyImmediateDozeS4', 6: 'PolicySystemIdle'})]],
'Flags' : [ 0x4, ['unsigned char']],
'Spare' : [ 0x5, ['array', 3, ['unsigned char']]],
'Battery' : [ 0x8, ['__unnamed_15ba']],
'Wait' : [ 0x8, ['pointer', ['_POP_TRIGGER_WAIT']]],
} ],
'_FAST_IO_DISPATCH' : [ 0x70, {
'SizeOfFastIoDispatch' : [ 0x0, ['unsigned long']],
'FastIoCheckIfPossible' : [ 0x4, ['pointer', ['void']]],
'FastIoRead' : [ 0x8, ['pointer', ['void']]],
'FastIoWrite' : [ 0xc, ['pointer', ['void']]],
'FastIoQueryBasicInfo' : [ 0x10, ['pointer', ['void']]],
'FastIoQueryStandardInfo' : [ 0x14, ['pointer', ['void']]],
'FastIoLock' : [ 0x18, ['pointer', ['void']]],
'FastIoUnlockSingle' : [ 0x1c, ['pointer', ['void']]],
'FastIoUnlockAll' : [ 0x20, ['pointer', ['void']]],
'FastIoUnlockAllByKey' : [ 0x24, ['pointer', ['void']]],
'FastIoDeviceControl' : [ 0x28, ['pointer', ['void']]],
'AcquireFileForNtCreateSection' : [ 0x2c, ['pointer', ['void']]],
'ReleaseFileForNtCreateSection' : [ 0x30, ['pointer', ['void']]],
'FastIoDetachDevice' : [ 0x34, ['pointer', ['void']]],
'FastIoQueryNetworkOpenInfo' : [ 0x38, ['pointer', ['void']]],
'AcquireForModWrite' : [ 0x3c, ['pointer', ['void']]],
'MdlRead' : [ 0x40, ['pointer', ['void']]],
'MdlReadComplete' : [ 0x44, ['pointer', ['void']]],
'PrepareMdlWrite' : [ 0x48, ['pointer', ['void']]],
'MdlWriteComplete' : [ 0x4c, ['pointer', ['void']]],
'FastIoReadCompressed' : [ 0x50, ['pointer', ['void']]],
'FastIoWriteCompressed' : [ 0x54, ['pointer', ['void']]],
'MdlReadCompleteCompressed' : [ 0x58, ['pointer', ['void']]],
'MdlWriteCompleteCompressed' : [ 0x5c, ['pointer', ['void']]],
'FastIoQueryOpen' : [ 0x60, ['pointer', ['void']]],
'ReleaseForModWrite' : [ 0x64, ['pointer', ['void']]],
'AcquireForCcFlush' : [ 0x68, ['pointer', ['void']]],
'ReleaseForCcFlush' : [ 0x6c, ['pointer', ['void']]],
} ],
'_ETIMER' : [ 0x98, {
'KeTimer' : [ 0x0, ['_KTIMER']],
'TimerApc' : [ 0x28, ['_KAPC']],
'TimerDpc' : [ 0x58, ['_KDPC']],
'ActiveTimerListEntry' : [ 0x78, ['_LIST_ENTRY']],
'Lock' : [ 0x80, ['unsigned long']],
'Period' : [ 0x84, ['long']],
'ApcAssociated' : [ 0x88, ['unsigned char']],
'WakeTimer' : [ 0x89, ['unsigned char']],
'WakeTimerListEntry' : [ 0x8c, ['_LIST_ENTRY']],
} ],
'_DBGKD_BREAKPOINTEX' : [ 0x8, {
'BreakPointCount' : [ 0x0, ['unsigned long']],
'ContinueStatus' : [ 0x4, ['long']],
} ],
'_CM_CELL_REMAP_BLOCK' : [ 0x8, {
'OldCell' : [ 0x0, ['unsigned long']],
'NewCell' : [ 0x4, ['unsigned long']],
} ],
'_PCI_PMC' : [ 0x2, {
'Version' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'PMEClock' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Rsvd1' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'DeviceSpecificInitialization' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Rsvd2' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'Support' : [ 0x1, ['_PM_SUPPORT']],
} ],
'_DBGKD_CONTINUE' : [ 0x4, {
'ContinueStatus' : [ 0x0, ['long']],
} ],
'__unnamed_161d' : [ 0x4, {
'VirtualAddress' : [ 0x0, ['pointer', ['void']]],
'Long' : [ 0x0, ['unsigned long']],
'e1' : [ 0x0, ['_MMWSLENTRY']],
} ],
'_MMWSLE' : [ 0x4, {
'u1' : [ 0x0, ['__unnamed_161d']],
} ],
'_EXCEPTION_POINTERS' : [ 0x8, {
'ExceptionRecord' : [ 0x0, ['pointer', ['_EXCEPTION_RECORD']]],
'ContextRecord' : [ 0x4, ['pointer', ['_CONTEXT']]],
} ],
'_KQUEUE' : [ 0x28, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'EntryListHead' : [ 0x10, ['_LIST_ENTRY']],
'CurrentCount' : [ 0x18, ['unsigned long']],
'MaximumCount' : [ 0x1c, ['unsigned long']],
'ThreadListHead' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_RTL_USER_PROCESS_PARAMETERS' : [ 0x290, {
'MaximumLength' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'DebugFlags' : [ 0xc, ['unsigned long']],
'ConsoleHandle' : [ 0x10, ['pointer', ['void']]],
'ConsoleFlags' : [ 0x14, ['unsigned long']],
'StandardInput' : [ 0x18, ['pointer', ['void']]],
'StandardOutput' : [ 0x1c, ['pointer', ['void']]],
'StandardError' : [ 0x20, ['pointer', ['void']]],
'CurrentDirectory' : [ 0x24, ['_CURDIR']],
'DllPath' : [ 0x30, ['_UNICODE_STRING']],
'ImagePathName' : [ 0x38, ['_UNICODE_STRING']],
'CommandLine' : [ 0x40, ['_UNICODE_STRING']],
'Environment' : [ 0x48, ['pointer', ['void']]],
'StartingX' : [ 0x4c, ['unsigned long']],
'StartingY' : [ 0x50, ['unsigned long']],
'CountX' : [ 0x54, ['unsigned long']],
'CountY' : [ 0x58, ['unsigned long']],
'CountCharsX' : [ 0x5c, ['unsigned long']],
'CountCharsY' : [ 0x60, ['unsigned long']],
'FillAttribute' : [ 0x64, ['unsigned long']],
'WindowFlags' : [ 0x68, ['unsigned long']],
'ShowWindowFlags' : [ 0x6c, ['unsigned long']],
'WindowTitle' : [ 0x70, ['_UNICODE_STRING']],
'DesktopInfo' : [ 0x78, ['_UNICODE_STRING']],
'ShellInfo' : [ 0x80, ['_UNICODE_STRING']],
'RuntimeData' : [ 0x88, ['_UNICODE_STRING']],
'CurrentDirectores' : [ 0x90, ['array', 32, ['_RTL_DRIVE_LETTER_CURDIR']]],
} ],
'_CACHE_MANAGER_CALLBACKS' : [ 0x10, {
'AcquireForLazyWrite' : [ 0x0, ['pointer', ['void']]],
'ReleaseFromLazyWrite' : [ 0x4, ['pointer', ['void']]],
'AcquireForReadAhead' : [ 0x8, ['pointer', ['void']]],
'ReleaseFromReadAhead' : [ 0xc, ['pointer', ['void']]],
} ],
'_FILE_BASIC_INFORMATION' : [ 0x28, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x20, ['unsigned long']],
} ],
'_CELL_DATA' : [ 0x50, {
'u' : [ 0x0, ['_u']],
} ],
'_SE_AUDIT_PROCESS_CREATION_INFO' : [ 0x4, {
'ImageFileName' : [ 0x0, ['pointer', ['_OBJECT_NAME_INFORMATION']]],
} ],
'_HEAP_ENTRY_EXTRA' : [ 0x8, {
'AllocatorBackTraceIndex' : [ 0x0, ['unsigned short']],
'TagIndex' : [ 0x2, ['unsigned short']],
'Settable' : [ 0x4, ['unsigned long']],
'ZeroInit' : [ 0x0, ['unsigned long long']],
} ],
'_VI_DEADLOCK_RESOURCE' : [ 0x80, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'VfDeadlockUnknown', 1: 'VfDeadlockMutex', 2: 'VfDeadlockFastMutex', 3: 'VfDeadlockFastMutexUnsafe', 4: 'VfDeadlockSpinLock', 5: 'VfDeadlockQueuedSpinLock', 6: 'VfDeadlockTypeMaximum'})]],
'NodeCount' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'RecursionCount' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'ResourceAddress' : [ 0x8, ['pointer', ['void']]],
'ThreadOwner' : [ 0xc, ['pointer', ['_VI_DEADLOCK_THREAD']]],
'ResourceList' : [ 0x10, ['_LIST_ENTRY']],
'HashChainList' : [ 0x18, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x18, ['_LIST_ENTRY']],
'StackTrace' : [ 0x20, ['array', 8, ['pointer', ['void']]]],
'LastAcquireTrace' : [ 0x40, ['array', 8, ['pointer', ['void']]]],
'LastReleaseTrace' : [ 0x60, ['array', 8, ['pointer', ['void']]]],
} ],
'_CLIENT_ID' : [ 0x8, {
'UniqueProcess' : [ 0x0, ['pointer', ['void']]],
'UniqueThread' : [ 0x4, ['pointer', ['void']]],
} ],
'_PEB_FREE_BLOCK' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['_PEB_FREE_BLOCK']]],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_PO_DEVICE_NOTIFY' : [ 0x28, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'TargetDevice' : [ 0x8, ['pointer', ['_DEVICE_OBJECT']]],
'WakeNeeded' : [ 0xc, ['unsigned char']],
'OrderLevel' : [ 0xd, ['unsigned char']],
'DeviceObject' : [ 0x10, ['pointer', ['_DEVICE_OBJECT']]],
'Node' : [ 0x14, ['pointer', ['void']]],
'DeviceName' : [ 0x18, ['pointer', ['unsigned short']]],
'DriverName' : [ 0x1c, ['pointer', ['unsigned short']]],
'ChildCount' : [ 0x20, ['unsigned long']],
'ActiveChild' : [ 0x24, ['unsigned long']],
} ],
'_MMPFNLIST' : [ 0x10, {
'Total' : [ 0x0, ['unsigned long']],
'ListName' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ZeroedPageList', 1: 'FreePageList', 2: 'StandbyPageList', 3: 'ModifiedPageList', 4: 'ModifiedNoWritePageList', 5: 'BadPageList', 6: 'ActiveAndValid', 7: 'TransitionPage'})]],
'Flink' : [ 0x8, ['unsigned long']],
'Blink' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1649' : [ 0x4, {
'Spare' : [ 0x0, ['array', 4, ['unsigned char']]],
} ],
'__unnamed_164b' : [ 0x4, {
'PrimaryBus' : [ 0x0, ['unsigned char']],
'SecondaryBus' : [ 0x1, ['unsigned char']],
'SubordinateBus' : [ 0x2, ['unsigned char']],
'SubtractiveDecode' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsaBitSet' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'VgaBitSet' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'WeChangedBusNumbers' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'IsaBitRequired' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
} ],
'PCI_HEADER_TYPE_DEPENDENT' : [ 0x4, {
'type0' : [ 0x0, ['__unnamed_1649']],
'type1' : [ 0x0, ['__unnamed_164b']],
'type2' : [ 0x0, ['__unnamed_164b']],
} ],
'_DBGKD_GET_SET_BUS_DATA' : [ 0x14, {
'BusDataType' : [ 0x0, ['unsigned long']],
'BusNumber' : [ 0x4, ['unsigned long']],
'SlotNumber' : [ 0x8, ['unsigned long']],
'Offset' : [ 0xc, ['unsigned long']],
'Length' : [ 0x10, ['unsigned long']],
} ],
'_OBJECT_HEADER_NAME_INFO' : [ 0x10, {
'Directory' : [ 0x0, ['pointer', ['_OBJECT_DIRECTORY']]],
'Name' : [ 0x4, ['_UNICODE_STRING']],
'QueryReferences' : [ 0xc, ['unsigned long']],
} ],
'_KINTERRUPT' : [ 0x1e4, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'InterruptListEntry' : [ 0x4, ['_LIST_ENTRY']],
'ServiceRoutine' : [ 0xc, ['pointer', ['void']]],
'ServiceContext' : [ 0x10, ['pointer', ['void']]],
'SpinLock' : [ 0x14, ['unsigned long']],
'TickCount' : [ 0x18, ['unsigned long']],
'ActualLock' : [ 0x1c, ['pointer', ['unsigned long']]],
'DispatchAddress' : [ 0x20, ['pointer', ['void']]],
'Vector' : [ 0x24, ['unsigned long']],
'Irql' : [ 0x28, ['unsigned char']],
'SynchronizeIrql' : [ 0x29, ['unsigned char']],
'FloatingSave' : [ 0x2a, ['unsigned char']],
'Connected' : [ 0x2b, ['unsigned char']],
'Number' : [ 0x2c, ['unsigned char']],
'ShareVector' : [ 0x2d, ['unsigned char']],
'Mode' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'ServiceCount' : [ 0x34, ['unsigned long']],
'DispatchCount' : [ 0x38, ['unsigned long']],
'DispatchCode' : [ 0x3c, ['array', 106, ['unsigned long']]],
} ],
'_SECURITY_CLIENT_CONTEXT' : [ 0x3c, {
'SecurityQos' : [ 0x0, ['_SECURITY_QUALITY_OF_SERVICE']],
'ClientToken' : [ 0xc, ['pointer', ['void']]],
'DirectlyAccessClientToken' : [ 0x10, ['unsigned char']],
'DirectAccessEffectiveOnly' : [ 0x11, ['unsigned char']],
'ServerIsRemote' : [ 0x12, ['unsigned char']],
'ClientTokenControl' : [ 0x14, ['_TOKEN_CONTROL']],
} ],
'_BITMAP_RANGE' : [ 0x20, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'BasePage' : [ 0x8, ['long long']],
'FirstDirtyPage' : [ 0x10, ['unsigned long']],
'LastDirtyPage' : [ 0x14, ['unsigned long']],
'DirtyPages' : [ 0x18, ['unsigned long']],
'Bitmap' : [ 0x1c, ['pointer', ['unsigned long']]],
} ],
'_PCI_ARBITER_INSTANCE' : [ 0xe0, {
'Header' : [ 0x0, ['PCI_SECONDARY_EXTENSION']],
'Interface' : [ 0xc, ['pointer', ['_PCI_INTERFACE']]],
'BusFdoExtension' : [ 0x10, ['pointer', ['_PCI_FDO_EXTENSION']]],
'InstanceName' : [ 0x14, ['array', 24, ['unsigned short']]],
'CommonInstance' : [ 0x44, ['_ARBITER_INSTANCE']],
} ],
'_HANDLE_TRACE_DB_ENTRY' : [ 0x50, {
'ClientId' : [ 0x0, ['_CLIENT_ID']],
'Handle' : [ 0x8, ['pointer', ['void']]],
'Type' : [ 0xc, ['unsigned long']],
'StackTrace' : [ 0x10, ['array', 16, ['pointer', ['void']]]],
} ],
'_MMPAGING_FILE' : [ 0x44, {
'Size' : [ 0x0, ['unsigned long']],
'MaximumSize' : [ 0x4, ['unsigned long']],
'MinimumSize' : [ 0x8, ['unsigned long']],
'FreeSpace' : [ 0xc, ['unsigned long']],
'CurrentUsage' : [ 0x10, ['unsigned long']],
'PeakUsage' : [ 0x14, ['unsigned long']],
'Hint' : [ 0x18, ['unsigned long']],
'HighestPage' : [ 0x1c, ['unsigned long']],
'Entry' : [ 0x20, ['array', 2, ['pointer', ['_MMMOD_WRITER_MDL_ENTRY']]]],
'Bitmap' : [ 0x28, ['pointer', ['_RTL_BITMAP']]],
'File' : [ 0x2c, ['pointer', ['_FILE_OBJECT']]],
'PageFileName' : [ 0x30, ['_UNICODE_STRING']],
'PageFileNumber' : [ 0x38, ['unsigned long']],
'Extended' : [ 0x3c, ['unsigned char']],
'HintSetToZero' : [ 0x3d, ['unsigned char']],
'BootPartition' : [ 0x3e, ['unsigned char']],
'FileHandle' : [ 0x40, ['pointer', ['void']]],
} ],
'_BUS_EXTENSION_LIST' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['void']]],
'BusExtension' : [ 0x4, ['pointer', ['_PI_BUS_EXTENSION']]],
} ],
'_PCI_MJ_DISPATCH_TABLE' : [ 0x20, {
'PnpIrpMaximumMinorFunction' : [ 0x0, ['unsigned long']],
'PnpIrpDispatchTable' : [ 0x4, ['pointer', ['_PCI_MN_DISPATCH_TABLE']]],
'PowerIrpMaximumMinorFunction' : [ 0x8, ['unsigned long']],
'PowerIrpDispatchTable' : [ 0xc, ['pointer', ['_PCI_MN_DISPATCH_TABLE']]],
'SystemControlIrpDispatchStyle' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'IRP_COMPLETE', 1: 'IRP_DOWNWARD', 2: 'IRP_UPWARD', 3: 'IRP_DISPATCH'})]],
'SystemControlIrpDispatchFunction' : [ 0x14, ['pointer', ['void']]],
'OtherIrpDispatchStyle' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'IRP_COMPLETE', 1: 'IRP_DOWNWARD', 2: 'IRP_UPWARD', 3: 'IRP_DISPATCH'})]],
'OtherIrpDispatchFunction' : [ 0x1c, ['pointer', ['void']]],
} ],
'_POP_TRIGGER_WAIT' : [ 0x20, {
'Event' : [ 0x0, ['_KEVENT']],
'Status' : [ 0x10, ['long']],
'Link' : [ 0x14, ['_LIST_ENTRY']],
'Trigger' : [ 0x1c, ['pointer', ['_POP_ACTION_TRIGGER']]],
} ],
'_IO_TIMER' : [ 0x18, {
'Type' : [ 0x0, ['short']],
'TimerFlag' : [ 0x2, ['short']],
'TimerList' : [ 0x4, ['_LIST_ENTRY']],
'TimerRoutine' : [ 0xc, ['pointer', ['void']]],
'Context' : [ 0x10, ['pointer', ['void']]],
'DeviceObject' : [ 0x14, ['pointer', ['_DEVICE_OBJECT']]],
} ],
'_FXSAVE_FORMAT' : [ 0x208, {
'ControlWord' : [ 0x0, ['unsigned short']],
'StatusWord' : [ 0x2, ['unsigned short']],
'TagWord' : [ 0x4, ['unsigned short']],
'ErrorOpcode' : [ 0x6, ['unsigned short']],
'ErrorOffset' : [ 0x8, ['unsigned long']],
'ErrorSelector' : [ 0xc, ['unsigned long']],
'DataOffset' : [ 0x10, ['unsigned long']],
'DataSelector' : [ 0x14, ['unsigned long']],
'MXCsr' : [ 0x18, ['unsigned long']],
'MXCsrMask' : [ 0x1c, ['unsigned long']],
'RegisterArea' : [ 0x20, ['array', 128, ['unsigned char']]],
'Reserved3' : [ 0xa0, ['array', 128, ['unsigned char']]],
'Reserved4' : [ 0x120, ['array', 224, ['unsigned char']]],
'Align16Byte' : [ 0x200, ['array', 8, ['unsigned char']]],
} ],
'_MMWSLENTRY' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'LockedInWs' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LockedInMemory' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned long')]],
'Hashed' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Direct' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Age' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
'VirtualPageNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_OBJECT_DIRECTORY' : [ 0xa4, {
'HashBuckets' : [ 0x0, ['array', 37, ['pointer', ['_OBJECT_DIRECTORY_ENTRY']]]],
'Lock' : [ 0x94, ['_EX_PUSH_LOCK']],
'DeviceMap' : [ 0x98, ['pointer', ['_DEVICE_MAP']]],
'SessionId' : [ 0x9c, ['unsigned long']],
'Reserved' : [ 0xa0, ['unsigned short']],
'SymbolicLinkUsageCount' : [ 0xa2, ['unsigned short']],
} ],
'_OBJECT_CREATE_INFORMATION' : [ 0x30, {
'Attributes' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x4, ['pointer', ['void']]],
'ParseContext' : [ 0x8, ['pointer', ['void']]],
'ProbeMode' : [ 0xc, ['unsigned char']],
'PagedPoolCharge' : [ 0x10, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x14, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x18, ['unsigned long']],
'SecurityDescriptor' : [ 0x1c, ['pointer', ['void']]],
'SecurityQos' : [ 0x20, ['pointer', ['_SECURITY_QUALITY_OF_SERVICE']]],
'SecurityQualityOfService' : [ 0x24, ['_SECURITY_QUALITY_OF_SERVICE']],
} ],
'_WMI_CLIENT_CONTEXT' : [ 0x4, {
'ProcessorNumber' : [ 0x0, ['unsigned char']],
'Alignment' : [ 0x1, ['unsigned char']],
'LoggerId' : [ 0x2, ['unsigned short']],
} ],
'_HEAP_LOOKASIDE' : [ 0x30, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x8, ['unsigned short']],
'MaximumDepth' : [ 0xa, ['unsigned short']],
'TotalAllocates' : [ 0xc, ['unsigned long']],
'AllocateMisses' : [ 0x10, ['unsigned long']],
'TotalFrees' : [ 0x14, ['unsigned long']],
'FreeMisses' : [ 0x18, ['unsigned long']],
'LastTotalAllocates' : [ 0x1c, ['unsigned long']],
'LastAllocateMisses' : [ 0x20, ['unsigned long']],
'Counters' : [ 0x24, ['array', 2, ['unsigned long']]],
} ],
'_ARBITER_INTERFACE' : [ 0x18, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x4, ['pointer', ['void']]],
'InterfaceReference' : [ 0x8, ['pointer', ['void']]],
'InterfaceDereference' : [ 0xc, ['pointer', ['void']]],
'ArbiterHandler' : [ 0x10, ['pointer', ['void']]],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'_ACL' : [ 0x8, {
'AclRevision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'AclSize' : [ 0x2, ['unsigned short']],
'AceCount' : [ 0x4, ['unsigned short']],
'Sbz2' : [ 0x6, ['unsigned short']],
} ],
'_CALL_PERFORMANCE_DATA' : [ 0x204, {
'SpinLock' : [ 0x0, ['unsigned long']],
'HashTable' : [ 0x4, ['array', 64, ['_LIST_ENTRY']]],
} ],
'_MMWSL' : [ 0x69c, {
'Quota' : [ 0x0, ['unsigned long']],
'FirstFree' : [ 0x4, ['unsigned long']],
'FirstDynamic' : [ 0x8, ['unsigned long']],
'LastEntry' : [ 0xc, ['unsigned long']],
'NextSlot' : [ 0x10, ['unsigned long']],
'Wsle' : [ 0x14, ['pointer', ['_MMWSLE']]],
'LastInitializedWsle' : [ 0x18, ['unsigned long']],
'NonDirectCount' : [ 0x1c, ['unsigned long']],
'HashTable' : [ 0x20, ['pointer', ['_MMWSLE_HASH']]],
'HashTableSize' : [ 0x24, ['unsigned long']],
'NumberOfCommittedPageTables' : [ 0x28, ['unsigned long']],
'HashTableStart' : [ 0x2c, ['pointer', ['void']]],
'HighestPermittedHashAddress' : [ 0x30, ['pointer', ['void']]],
'NumberOfImageWaiters' : [ 0x34, ['unsigned long']],
'VadBitMapHint' : [ 0x38, ['unsigned long']],
'UsedPageTableEntries' : [ 0x3c, ['array', 768, ['unsigned short']]],
'CommittedPageTables' : [ 0x63c, ['array', 24, ['unsigned long']]],
} ],
'_RTL_DRIVE_LETTER_CURDIR' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned short']],
'Length' : [ 0x2, ['unsigned short']],
'TimeStamp' : [ 0x4, ['unsigned long']],
'DosPath' : [ 0x8, ['_STRING']],
} ],
'PCI_FUNCTION_RESOURCES' : [ 0x150, {
'Limit' : [ 0x0, ['array', 7, ['_IO_RESOURCE_DESCRIPTOR']]],
'Current' : [ 0xe0, ['array', 7, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_WNODE_HEADER' : [ 0x30, {
'BufferSize' : [ 0x0, ['unsigned long']],
'ProviderId' : [ 0x4, ['unsigned long']],
'HistoricalContext' : [ 0x8, ['unsigned long long']],
'Version' : [ 0x8, ['unsigned long']],
'Linkage' : [ 0xc, ['unsigned long']],
'CountLost' : [ 0x10, ['unsigned long']],
'KernelHandle' : [ 0x10, ['pointer', ['void']]],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'Guid' : [ 0x18, ['_GUID']],
'ClientContext' : [ 0x28, ['unsigned long']],
'Flags' : [ 0x2c, ['unsigned long']],
} ],
'_EXCEPTION_RECORD' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['pointer', ['_EXCEPTION_RECORD']]],
'ExceptionAddress' : [ 0xc, ['pointer', ['void']]],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'__unnamed_16c4' : [ 0x4, {
'ImageCommitment' : [ 0x0, ['unsigned long']],
'CreatingProcess' : [ 0x0, ['pointer', ['_EPROCESS']]],
} ],
'__unnamed_16c8' : [ 0x4, {
'ImageInformation' : [ 0x0, ['pointer', ['_SECTION_IMAGE_INFORMATION']]],
'FirstMappedVa' : [ 0x0, ['pointer', ['void']]],
} ],
'_SEGMENT' : [ 0x40, {
'ControlArea' : [ 0x0, ['pointer', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x4, ['unsigned long']],
'NonExtendedPtes' : [ 0x8, ['unsigned long']],
'WritableUserReferences' : [ 0xc, ['unsigned long']],
'SizeOfSegment' : [ 0x10, ['unsigned long long']],
'SegmentPteTemplate' : [ 0x18, ['_MMPTE']],
'NumberOfCommittedPages' : [ 0x1c, ['unsigned long']],
'ExtendInfo' : [ 0x20, ['pointer', ['_MMEXTEND_INFO']]],
'SystemImageBase' : [ 0x24, ['pointer', ['void']]],
'BasedAddress' : [ 0x28, ['pointer', ['void']]],
'u1' : [ 0x2c, ['__unnamed_16c4']],
'u2' : [ 0x30, ['__unnamed_16c8']],
'PrototypePte' : [ 0x34, ['pointer', ['_MMPTE']]],
'ThePtes' : [ 0x38, ['array', 1, ['_MMPTE']]],
} ],
'_PCI_COMMON_EXTENSION' : [ 0x20, {
'Next' : [ 0x0, ['pointer', ['void']]],
'ExtensionType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_AgpTarget'})]],
'IrpDispatchTable' : [ 0x8, ['pointer', ['_PCI_MJ_DISPATCH_TABLE']]],
'DeviceState' : [ 0xc, ['unsigned char']],
'TentativeNextState' : [ 0xd, ['unsigned char']],
'SecondaryExtLock' : [ 0x10, ['_KEVENT']],
} ],
'_PRIVATE_CACHE_MAP' : [ 0x58, {
'NodeTypeCode' : [ 0x0, ['short']],
'Flags' : [ 0x0, ['_PRIVATE_CACHE_MAP_FLAGS']],
'UlongFlags' : [ 0x0, ['unsigned long']],
'ReadAheadMask' : [ 0x4, ['unsigned long']],
'FileObject' : [ 0x8, ['pointer', ['_FILE_OBJECT']]],
'FileOffset1' : [ 0x10, ['_LARGE_INTEGER']],
'BeyondLastByte1' : [ 0x18, ['_LARGE_INTEGER']],
'FileOffset2' : [ 0x20, ['_LARGE_INTEGER']],
'BeyondLastByte2' : [ 0x28, ['_LARGE_INTEGER']],
'ReadAheadOffset' : [ 0x30, ['array', 2, ['_LARGE_INTEGER']]],
'ReadAheadLength' : [ 0x40, ['array', 2, ['unsigned long']]],
'ReadAheadSpinLock' : [ 0x48, ['unsigned long']],
'PrivateLinks' : [ 0x4c, ['_LIST_ENTRY']],
} ],
'_RTL_HANDLE_TABLE' : [ 0x20, {
'MaximumNumberOfHandles' : [ 0x0, ['unsigned long']],
'SizeOfHandleTableEntry' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['array', 2, ['unsigned long']]],
'FreeHandles' : [ 0x10, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
'CommittedHandles' : [ 0x14, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
'UnCommittedHandles' : [ 0x18, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
'MaxReservedHandles' : [ 0x1c, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_POP_IDLE_HANDLER' : [ 0x20, {
'Latency' : [ 0x0, ['unsigned long']],
'TimeCheck' : [ 0x4, ['unsigned long']],
'DemoteLimit' : [ 0x8, ['unsigned long']],
'PromoteLimit' : [ 0xc, ['unsigned long']],
'PromoteCount' : [ 0x10, ['unsigned long']],
'Demote' : [ 0x14, ['unsigned char']],
'Promote' : [ 0x15, ['unsigned char']],
'PromotePercent' : [ 0x16, ['unsigned char']],
'DemotePercent' : [ 0x17, ['unsigned char']],
'State' : [ 0x18, ['unsigned char']],
'Spare' : [ 0x19, ['array', 3, ['unsigned char']]],
'IdleFunction' : [ 0x1c, ['pointer', ['void']]],
} ],
'SYSTEM_POWER_CAPABILITIES' : [ 0x4c, {
'PowerButtonPresent' : [ 0x0, ['unsigned char']],
'SleepButtonPresent' : [ 0x1, ['unsigned char']],
'LidPresent' : [ 0x2, ['unsigned char']],
'SystemS1' : [ 0x3, ['unsigned char']],
'SystemS2' : [ 0x4, ['unsigned char']],
'SystemS3' : [ 0x5, ['unsigned char']],
'SystemS4' : [ 0x6, ['unsigned char']],
'SystemS5' : [ 0x7, ['unsigned char']],
'HiberFilePresent' : [ 0x8, ['unsigned char']],
'FullWake' : [ 0x9, ['unsigned char']],
'VideoDimPresent' : [ 0xa, ['unsigned char']],
'ApmPresent' : [ 0xb, ['unsigned char']],
'UpsPresent' : [ 0xc, ['unsigned char']],
'ThermalControl' : [ 0xd, ['unsigned char']],
'ProcessorThrottle' : [ 0xe, ['unsigned char']],
'ProcessorMinThrottle' : [ 0xf, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0x10, ['unsigned char']],
'spare2' : [ 0x11, ['array', 4, ['unsigned char']]],
'DiskSpinDown' : [ 0x15, ['unsigned char']],
'spare3' : [ 0x16, ['array', 8, ['unsigned char']]],
'SystemBatteriesPresent' : [ 0x1e, ['unsigned char']],
'BatteriesAreShortTerm' : [ 0x1f, ['unsigned char']],
'BatteryScale' : [ 0x20, ['array', 3, ['BATTERY_REPORTING_SCALE']]],
'AcOnLineWake' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SoftLidWake' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'RtcWake' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MinDeviceWakeState' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DefaultLowLatencyWake' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_DEVOBJ_EXTENSION' : [ 0x2c, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
'PowerFlags' : [ 0x8, ['unsigned long']],
'Dope' : [ 0xc, ['pointer', ['_DEVICE_OBJECT_POWER_EXTENSION']]],
'ExtensionFlags' : [ 0x10, ['unsigned long']],
'DeviceNode' : [ 0x14, ['pointer', ['void']]],
'AttachedTo' : [ 0x18, ['pointer', ['_DEVICE_OBJECT']]],
'StartIoCount' : [ 0x1c, ['long']],
'StartIoKey' : [ 0x20, ['long']],
'StartIoFlags' : [ 0x24, ['unsigned long']],
'Vpb' : [ 0x28, ['pointer', ['_VPB']]],
} ],
'_FLOATING_SAVE_AREA' : [ 0x70, {
'ControlWord' : [ 0x0, ['unsigned long']],
'StatusWord' : [ 0x4, ['unsigned long']],
'TagWord' : [ 0x8, ['unsigned long']],
'ErrorOffset' : [ 0xc, ['unsigned long']],
'ErrorSelector' : [ 0x10, ['unsigned long']],
'DataOffset' : [ 0x14, ['unsigned long']],
'DataSelector' : [ 0x18, ['unsigned long']],
'RegisterArea' : [ 0x1c, ['array', 80, ['unsigned char']]],
'Cr0NpxState' : [ 0x6c, ['unsigned long']],
} ],
'_DBGKD_GET_VERSION64' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned short']],
'Flags' : [ 0x6, ['unsigned short']],
'MachineType' : [ 0x8, ['unsigned short']],
'MaxPacketType' : [ 0xa, ['unsigned char']],
'MaxStateChange' : [ 0xb, ['unsigned char']],
'MaxManipulate' : [ 0xc, ['unsigned char']],
'Simulation' : [ 0xd, ['unsigned char']],
'Unused' : [ 0xe, ['array', 1, ['unsigned short']]],
'KernBase' : [ 0x10, ['unsigned long long']],
'PsLoadedModuleList' : [ 0x18, ['unsigned long long']],
'DebuggerDataList' : [ 0x20, ['unsigned long long']],
} ],
'_MMVIEW' : [ 0x8, {
'Entry' : [ 0x0, ['unsigned long']],
'ControlArea' : [ 0x4, ['pointer', ['_CONTROL_AREA']]],
} ],
'_KSYSTEM_TIME' : [ 0xc, {
'LowPart' : [ 0x0, ['unsigned long']],
'High1Time' : [ 0x4, ['long']],
'High2Time' : [ 0x8, ['long']],
} ],
'_TOKEN' : [ 0xa8, {
'TokenSource' : [ 0x0, ['_TOKEN_SOURCE']],
'TokenId' : [ 0x10, ['_LUID']],
'AuthenticationId' : [ 0x18, ['_LUID']],
'ParentTokenId' : [ 0x20, ['_LUID']],
'ExpirationTime' : [ 0x28, ['_LARGE_INTEGER']],
'TokenLock' : [ 0x30, ['pointer', ['_ERESOURCE']]],
'AuditPolicy' : [ 0x38, ['_SEP_AUDIT_POLICY']],
'ModifiedId' : [ 0x40, ['_LUID']],
'SessionId' : [ 0x48, ['unsigned long']],
'UserAndGroupCount' : [ 0x4c, ['unsigned long']],
'RestrictedSidCount' : [ 0x50, ['unsigned long']],
'PrivilegeCount' : [ 0x54, ['unsigned long']],
'VariableLength' : [ 0x58, ['unsigned long']],
'DynamicCharged' : [ 0x5c, ['unsigned long']],
'DynamicAvailable' : [ 0x60, ['unsigned long']],
'DefaultOwnerIndex' : [ 0x64, ['unsigned long']],
'UserAndGroups' : [ 0x68, ['pointer', ['_SID_AND_ATTRIBUTES']]],
'RestrictedSids' : [ 0x6c, ['pointer', ['_SID_AND_ATTRIBUTES']]],
'PrimaryGroup' : [ 0x70, ['pointer', ['void']]],
'Privileges' : [ 0x74, ['pointer', ['_LUID_AND_ATTRIBUTES']]],
'DynamicPart' : [ 0x78, ['pointer', ['unsigned long']]],
'DefaultDacl' : [ 0x7c, ['pointer', ['_ACL']]],
'TokenType' : [ 0x80, ['Enumeration', dict(target = 'long', choices = {1: 'TokenPrimary', 2: 'TokenImpersonation'})]],
'ImpersonationLevel' : [ 0x84, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'TokenFlags' : [ 0x88, ['unsigned long']],
'TokenInUse' : [ 0x8c, ['unsigned char']],
'ProxyData' : [ 0x90, ['pointer', ['_SECURITY_TOKEN_PROXY_DATA']]],
'AuditData' : [ 0x94, ['pointer', ['_SECURITY_TOKEN_AUDIT_DATA']]],
'OriginatingLogonSession' : [ 0x98, ['_LUID']],
'VariablePart' : [ 0xa0, ['unsigned long']],
} ],
'_TEB' : [ 0xfb8, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'EnvironmentPointer' : [ 0x1c, ['pointer', ['void']]],
'ClientId' : [ 0x20, ['_CLIENT_ID']],
'ActiveRpcHandle' : [ 0x28, ['pointer', ['void']]],
'ThreadLocalStoragePointer' : [ 0x2c, ['pointer', ['void']]],
'ProcessEnvironmentBlock' : [ 0x30, ['pointer', ['_PEB']]],
'LastErrorValue' : [ 0x34, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x38, ['unsigned long']],
'CsrClientThread' : [ 0x3c, ['pointer', ['void']]],
'Win32ThreadInfo' : [ 0x40, ['pointer', ['void']]],
'User32Reserved' : [ 0x44, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xac, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0xc0, ['pointer', ['void']]],
'CurrentLocale' : [ 0xc4, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0xc8, ['unsigned long']],
'SystemReserved1' : [ 0xcc, ['array', 54, ['pointer', ['void']]]],
'ExceptionCode' : [ 0x1a4, ['long']],
'ActivationContextStack' : [ 0x1a8, ['_ACTIVATION_CONTEXT_STACK']],
'SpareBytes1' : [ 0x1bc, ['array', 24, ['unsigned char']]],
'GdiTebBatch' : [ 0x1d4, ['_GDI_TEB_BATCH']],
'RealClientId' : [ 0x6b4, ['_CLIENT_ID']],
'GdiCachedProcessHandle' : [ 0x6bc, ['pointer', ['void']]],
'GdiClientPID' : [ 0x6c0, ['unsigned long']],
'GdiClientTID' : [ 0x6c4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x6c8, ['pointer', ['void']]],
'Win32ClientInfo' : [ 0x6cc, ['array', 62, ['unsigned long']]],
'glDispatchTable' : [ 0x7c4, ['array', 233, ['pointer', ['void']]]],
'glReserved1' : [ 0xb68, ['array', 29, ['unsigned long']]],
'glReserved2' : [ 0xbdc, ['pointer', ['void']]],
'glSectionInfo' : [ 0xbe0, ['pointer', ['void']]],
'glSection' : [ 0xbe4, ['pointer', ['void']]],
'glTable' : [ 0xbe8, ['pointer', ['void']]],
'glCurrentRC' : [ 0xbec, ['pointer', ['void']]],
'glContext' : [ 0xbf0, ['pointer', ['void']]],
'LastStatusValue' : [ 0xbf4, ['unsigned long']],
'StaticUnicodeString' : [ 0xbf8, ['_UNICODE_STRING']],
'StaticUnicodeBuffer' : [ 0xc00, ['array', 261, ['unsigned short']]],
'DeallocationStack' : [ 0xe0c, ['pointer', ['void']]],
'TlsSlots' : [ 0xe10, ['array', 64, ['pointer', ['void']]]],
'TlsLinks' : [ 0xf10, ['_LIST_ENTRY']],
'Vdm' : [ 0xf18, ['pointer', ['void']]],
'ReservedForNtRpc' : [ 0xf1c, ['pointer', ['void']]],
'DbgSsReserved' : [ 0xf20, ['array', 2, ['pointer', ['void']]]],
'HardErrorsAreDisabled' : [ 0xf28, ['unsigned long']],
'Instrumentation' : [ 0xf2c, ['array', 16, ['pointer', ['void']]]],
'WinSockData' : [ 0xf6c, ['pointer', ['void']]],
'GdiBatchCount' : [ 0xf70, ['unsigned long']],
'InDbgPrint' : [ 0xf74, ['unsigned char']],
'FreeStackOnTermination' : [ 0xf75, ['unsigned char']],
'HasFiberData' : [ 0xf76, ['unsigned char']],
'IdealProcessor' : [ 0xf77, ['unsigned char']],
'Spare3' : [ 0xf78, ['unsigned long']],
'ReservedForPerf' : [ 0xf7c, ['pointer', ['void']]],
'ReservedForOle' : [ 0xf80, ['pointer', ['void']]],
'WaitingOnLoaderLock' : [ 0xf84, ['unsigned long']],
'Wx86Thread' : [ 0xf88, ['_Wx86ThreadState']],
'TlsExpansionSlots' : [ 0xf94, ['pointer', ['pointer', ['void']]]],
'ImpersonationLocale' : [ 0xf98, ['unsigned long']],
'IsImpersonating' : [ 0xf9c, ['unsigned long']],
'NlsCache' : [ 0xfa0, ['pointer', ['void']]],
'pShimData' : [ 0xfa4, ['pointer', ['void']]],
'HeapVirtualAffinity' : [ 0xfa8, ['unsigned long']],
'CurrentTransactionHandle' : [ 0xfac, ['pointer', ['void']]],
'ActiveFrame' : [ 0xfb0, ['pointer', ['_TEB_ACTIVE_FRAME']]],
'SafeThunkCall' : [ 0xfb4, ['unsigned char']],
'BooleanSpare' : [ 0xfb5, ['array', 3, ['unsigned char']]],
} ],
'PCI_SECONDARY_EXTENSION' : [ 0xc, {
'List' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'ExtensionType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_AgpTarget'})]],
'Destructor' : [ 0x8, ['pointer', ['void']]],
} ],
'__unnamed_170f' : [ 0x30, {
'type0' : [ 0x0, ['_PCI_HEADER_TYPE_0']],
'type1' : [ 0x0, ['_PCI_HEADER_TYPE_1']],
'type2' : [ 0x0, ['_PCI_HEADER_TYPE_2']],
} ],
'_PCI_COMMON_CONFIG' : [ 0x100, {
'VendorID' : [ 0x0, ['unsigned short']],
'DeviceID' : [ 0x2, ['unsigned short']],
'Command' : [ 0x4, ['unsigned short']],
'Status' : [ 0x6, ['unsigned short']],
'RevisionID' : [ 0x8, ['unsigned char']],
'ProgIf' : [ 0x9, ['unsigned char']],
'SubClass' : [ 0xa, ['unsigned char']],
'BaseClass' : [ 0xb, ['unsigned char']],
'CacheLineSize' : [ 0xc, ['unsigned char']],
'LatencyTimer' : [ 0xd, ['unsigned char']],
'HeaderType' : [ 0xe, ['unsigned char']],
'BIST' : [ 0xf, ['unsigned char']],
'u' : [ 0x10, ['__unnamed_170f']],
'DeviceSpecific' : [ 0x40, ['array', 192, ['unsigned char']]],
} ],
'_HEAP_FREE_ENTRY_EXTRA' : [ 0x4, {
'TagIndex' : [ 0x0, ['unsigned short']],
'FreeBackTraceIndex' : [ 0x2, ['unsigned short']],
} ],
'_X86_DBGKD_CONTROL_SET' : [ 0x10, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long']],
'CurrentSymbolStart' : [ 0x8, ['unsigned long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long']],
} ],
'_SECTION_IMAGE_INFORMATION' : [ 0x30, {
'TransferAddress' : [ 0x0, ['pointer', ['void']]],
'ZeroBits' : [ 0x4, ['unsigned long']],
'MaximumStackSize' : [ 0x8, ['unsigned long']],
'CommittedStackSize' : [ 0xc, ['unsigned long']],
'SubSystemType' : [ 0x10, ['unsigned long']],
'SubSystemMinorVersion' : [ 0x14, ['unsigned short']],
'SubSystemMajorVersion' : [ 0x16, ['unsigned short']],
'SubSystemVersion' : [ 0x14, ['unsigned long']],
'GpValue' : [ 0x18, ['unsigned long']],
'ImageCharacteristics' : [ 0x1c, ['unsigned short']],
'DllCharacteristics' : [ 0x1e, ['unsigned short']],
'Machine' : [ 0x20, ['unsigned short']],
'ImageContainsCode' : [ 0x22, ['unsigned char']],
'Spare1' : [ 0x23, ['unsigned char']],
'LoaderFlags' : [ 0x24, ['unsigned long']],
'ImageFileSize' : [ 0x28, ['unsigned long']],
'Reserved' : [ 0x2c, ['array', 1, ['unsigned long']]],
} ],
'_POOL_TRACKER_TABLE' : [ 0x1c, {
'Key' : [ 0x0, ['unsigned long']],
'NonPagedAllocs' : [ 0x4, ['unsigned long']],
'NonPagedFrees' : [ 0x8, ['unsigned long']],
'NonPagedBytes' : [ 0xc, ['unsigned long']],
'PagedAllocs' : [ 0x10, ['unsigned long']],
'PagedFrees' : [ 0x14, ['unsigned long']],
'PagedBytes' : [ 0x18, ['unsigned long']],
} ],
'_MDL' : [ 0x1c, {
'Next' : [ 0x0, ['pointer', ['_MDL']]],
'Size' : [ 0x4, ['short']],
'MdlFlags' : [ 0x6, ['short']],
'Process' : [ 0x8, ['pointer', ['_EPROCESS']]],
'MappedSystemVa' : [ 0xc, ['pointer', ['void']]],
'StartVa' : [ 0x10, ['pointer', ['void']]],
'ByteCount' : [ 0x14, ['unsigned long']],
'ByteOffset' : [ 0x18, ['unsigned long']],
} ],
'_KNODE' : [ 0x30, {
'ProcessorMask' : [ 0x0, ['unsigned long']],
'Color' : [ 0x4, ['unsigned long']],
'MmShiftedColor' : [ 0x8, ['unsigned long']],
'FreeCount' : [ 0xc, ['array', 2, ['unsigned long']]],
'DeadStackList' : [ 0x18, ['_SLIST_HEADER']],
'PfnDereferenceSListHead' : [ 0x20, ['_SLIST_HEADER']],
'PfnDeferredList' : [ 0x28, ['pointer', ['_SINGLE_LIST_ENTRY']]],
'Seed' : [ 0x2c, ['unsigned char']],
'Flags' : [ 0x2d, ['_flags']],
} ],
'_PHYSICAL_MEMORY_DESCRIPTOR' : [ 0x10, {
'NumberOfRuns' : [ 0x0, ['unsigned long']],
'NumberOfPages' : [ 0x4, ['unsigned long']],
'Run' : [ 0x8, ['array', 1, ['_PHYSICAL_MEMORY_RUN']]],
} ],
'_PI_BUS_EXTENSION' : [ 0x44, {
'Flags' : [ 0x0, ['unsigned long']],
'NumberCSNs' : [ 0x4, ['unsigned long']],
'ReadDataPort' : [ 0x8, ['pointer', ['unsigned char']]],
'DataPortMapped' : [ 0xc, ['unsigned char']],
'AddressPort' : [ 0x10, ['pointer', ['unsigned char']]],
'AddrPortMapped' : [ 0x14, ['unsigned char']],
'CommandPort' : [ 0x18, ['pointer', ['unsigned char']]],
'CmdPortMapped' : [ 0x1c, ['unsigned char']],
'NextSlotNumber' : [ 0x20, ['unsigned long']],
'DeviceList' : [ 0x24, ['_SINGLE_LIST_ENTRY']],
'CardList' : [ 0x28, ['_SINGLE_LIST_ENTRY']],
'PhysicalBusDevice' : [ 0x2c, ['pointer', ['_DEVICE_OBJECT']]],
'FunctionalBusDevice' : [ 0x30, ['pointer', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x34, ['pointer', ['_DEVICE_OBJECT']]],
'BusNumber' : [ 0x38, ['unsigned long']],
'SystemPowerState' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DevicePowerState' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_VI_DEADLOCK_THREAD' : [ 0x1c, {
'Thread' : [ 0x0, ['pointer', ['_KTHREAD']]],
'CurrentSpinNode' : [ 0x4, ['pointer', ['_VI_DEADLOCK_NODE']]],
'CurrentOtherNode' : [ 0x8, ['pointer', ['_VI_DEADLOCK_NODE']]],
'ListEntry' : [ 0xc, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0xc, ['_LIST_ENTRY']],
'NodeCount' : [ 0x14, ['unsigned long']],
'PagingCount' : [ 0x18, ['unsigned long']],
} ],
'_MMEXTEND_INFO' : [ 0x10, {
'CommittedSize' : [ 0x0, ['unsigned long long']],
'ReferenceCount' : [ 0x8, ['unsigned long']],
} ],
'_IMAGE_DEBUG_DIRECTORY' : [ 0x1c, {
'Characteristics' : [ 0x0, ['unsigned long']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'MajorVersion' : [ 0x8, ['unsigned short']],
'MinorVersion' : [ 0xa, ['unsigned short']],
'Type' : [ 0xc, ['unsigned long']],
'SizeOfData' : [ 0x10, ['unsigned long']],
'AddressOfRawData' : [ 0x14, ['unsigned long']],
'PointerToRawData' : [ 0x18, ['unsigned long']],
} ],
'_PCI_INTERFACE' : [ 0x1c, {
'InterfaceType' : [ 0x0, ['pointer', ['_GUID']]],
'MinSize' : [ 0x4, ['unsigned short']],
'MinVersion' : [ 0x6, ['unsigned short']],
'MaxVersion' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned short']],
'ReferenceCount' : [ 0xc, ['long']],
'Signature' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_AgpTarget'})]],
'Constructor' : [ 0x14, ['pointer', ['void']]],
'Initializer' : [ 0x18, ['pointer', ['void']]],
} ],
'_FILE_NETWORK_OPEN_INFORMATION' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'AllocationSize' : [ 0x20, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x28, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x30, ['unsigned long']],
} ],
'_MMVAD' : [ 0x28, {
'StartingVpn' : [ 0x0, ['unsigned long']],
'EndingVpn' : [ 0x4, ['unsigned long']],
'Parent' : [ 0x8, ['pointer', ['_MMVAD']]],
'LeftChild' : [ 0xc, ['pointer', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer', ['_MMVAD']]],
'u' : [ 0x14, ['__unnamed_1492']],
'ControlArea' : [ 0x18, ['pointer', ['_CONTROL_AREA']]],
'FirstPrototypePte' : [ 0x1c, ['pointer', ['_MMPTE']]],
'LastContiguousPte' : [ 0x20, ['pointer', ['_MMPTE']]],
'u2' : [ 0x24, ['__unnamed_1495']],
} ],
'__unnamed_1743' : [ 0x8, {
'IoStatus' : [ 0x0, ['_IO_STATUS_BLOCK']],
'LastByte' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'_MMMOD_WRITER_MDL_ENTRY' : [ 0x58, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'WriteOffset' : [ 0x8, ['_LARGE_INTEGER']],
'u' : [ 0x10, ['__unnamed_1743']],
'Irp' : [ 0x18, ['pointer', ['_IRP']]],
'LastPageToWrite' : [ 0x1c, ['unsigned long']],
'PagingListHead' : [ 0x20, ['pointer', ['_MMMOD_WRITER_LISTHEAD']]],
'CurrentList' : [ 0x24, ['pointer', ['_LIST_ENTRY']]],
'PagingFile' : [ 0x28, ['pointer', ['_MMPAGING_FILE']]],
'File' : [ 0x2c, ['pointer', ['_FILE_OBJECT']]],
'ControlArea' : [ 0x30, ['pointer', ['_CONTROL_AREA']]],
'FileResource' : [ 0x34, ['pointer', ['_ERESOURCE']]],
'Mdl' : [ 0x38, ['_MDL']],
'Page' : [ 0x54, ['array', 1, ['unsigned long']]],
} ],
'_POP_POWER_ACTION' : [ 0x40, {
'Updates' : [ 0x0, ['unsigned char']],
'State' : [ 0x1, ['unsigned char']],
'Shutdown' : [ 0x2, ['unsigned char']],
'Action' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'LightestState' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Flags' : [ 0xc, ['unsigned long']],
'Status' : [ 0x10, ['long']],
'IrpMinor' : [ 0x14, ['unsigned char']],
'SystemState' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'NextSystemState' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ShutdownBugCode' : [ 0x20, ['pointer', ['_POP_SHUTDOWN_BUG_CHECK']]],
'DevState' : [ 0x24, ['pointer', ['_POP_DEVICE_SYS_STATE']]],
'HiberContext' : [ 0x28, ['pointer', ['_POP_HIBER_CONTEXT']]],
'LastWakeState' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WakeTime' : [ 0x30, ['unsigned long long']],
'SleepTime' : [ 0x38, ['unsigned long long']],
} ],
'_IO_STATUS_BLOCK' : [ 0x8, {
'Status' : [ 0x0, ['long']],
'Pointer' : [ 0x0, ['pointer', ['void']]],
'Information' : [ 0x4, ['unsigned long']],
} ],
'_LPCP_MESSAGE' : [ 0x30, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'FreeEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Reserved0' : [ 0x4, ['unsigned long']],
'SenderPort' : [ 0x8, ['pointer', ['void']]],
'RepliedToThread' : [ 0xc, ['pointer', ['_ETHREAD']]],
'PortContext' : [ 0x10, ['pointer', ['void']]],
'Request' : [ 0x18, ['_PORT_MESSAGE']],
} ],
'_MMVAD_SHORT' : [ 0x18, {
'StartingVpn' : [ 0x0, ['unsigned long']],
'EndingVpn' : [ 0x4, ['unsigned long']],
'Parent' : [ 0x8, ['pointer', ['_MMVAD']]],
'LeftChild' : [ 0xc, ['pointer', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer', ['_MMVAD']]],
'u' : [ 0x14, ['__unnamed_1492']],
} ],
'__unnamed_175f' : [ 0x2c, {
'InitialPrivilegeSet' : [ 0x0, ['_INITIAL_PRIVILEGE_SET']],
'PrivilegeSet' : [ 0x0, ['_PRIVILEGE_SET']],
} ],
'_ACCESS_STATE' : [ 0x74, {
'OperationID' : [ 0x0, ['_LUID']],
'SecurityEvaluated' : [ 0x8, ['unsigned char']],
'GenerateAudit' : [ 0x9, ['unsigned char']],
'GenerateOnClose' : [ 0xa, ['unsigned char']],
'PrivilegesAllocated' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['unsigned long']],
'RemainingDesiredAccess' : [ 0x10, ['unsigned long']],
'PreviouslyGrantedAccess' : [ 0x14, ['unsigned long']],
'OriginalDesiredAccess' : [ 0x18, ['unsigned long']],
'SubjectSecurityContext' : [ 0x1c, ['_SECURITY_SUBJECT_CONTEXT']],
'SecurityDescriptor' : [ 0x2c, ['pointer', ['void']]],
'AuxData' : [ 0x30, ['pointer', ['void']]],
'Privileges' : [ 0x34, ['__unnamed_175f']],
'AuditPrivileges' : [ 0x60, ['unsigned char']],
'ObjectName' : [ 0x64, ['_UNICODE_STRING']],
'ObjectTypeName' : [ 0x6c, ['_UNICODE_STRING']],
} ],
'_PNP_DEVICE_EVENT_ENTRY' : [ 0x58, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Argument' : [ 0x8, ['unsigned long']],
'CallerEvent' : [ 0xc, ['pointer', ['_KEVENT']]],
'Callback' : [ 0x10, ['pointer', ['void']]],
'Context' : [ 0x14, ['pointer', ['void']]],
'VetoType' : [ 0x18, ['pointer', ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]]],
'VetoName' : [ 0x1c, ['pointer', ['_UNICODE_STRING']]],
'Data' : [ 0x20, ['_PLUGPLAY_EVENT_BLOCK']],
} ],
'_PRIVATE_CACHE_MAP_FLAGS' : [ 0x4, {
'DontUse' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'ReadAheadActive' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'ReadAheadEnabled' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Available' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 32, native_type='unsigned long')]],
} ],
'_PNP_DEVICE_EVENT_LIST' : [ 0x4c, {
'Status' : [ 0x0, ['long']],
'EventQueueMutex' : [ 0x4, ['_KMUTANT']],
'Lock' : [ 0x24, ['_FAST_MUTEX']],
'List' : [ 0x44, ['_LIST_ENTRY']],
} ],
'_KPROCESSOR_STATE' : [ 0x320, {
'ContextFrame' : [ 0x0, ['_CONTEXT']],
'SpecialRegisters' : [ 0x2cc, ['_KSPECIAL_REGISTERS']],
} ],
'_MMPTE_TRANSITION' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_TOKEN_SOURCE' : [ 0x10, {
'SourceName' : [ 0x0, ['array', 8, ['unsigned char']]],
'SourceIdentifier' : [ 0x8, ['_LUID']],
} ],
'_STRING' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['pointer', ['unsigned char']]],
} ],
'_MMVAD_FLAGS2' : [ 0x4, {
'FileOffset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'SecNoChange' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'OneSecured' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'MultipleSecured' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'LongVad' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'ExtendableFile' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'Inherit' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_flags' : [ 0x1, {
'Removable' : [ 0x0, ['unsigned char']],
} ],
'_CM_KEY_SECURITY_CACHE' : [ 0x28, {
'Cell' : [ 0x0, ['unsigned long']],
'ConvKey' : [ 0x4, ['unsigned long']],
'List' : [ 0x8, ['_LIST_ENTRY']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'Descriptor' : [ 0x14, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_PROCESSOR_POWER_POLICY_INFO' : [ 0x14, {
'TimeCheck' : [ 0x0, ['unsigned long']],
'DemoteLimit' : [ 0x4, ['unsigned long']],
'PromoteLimit' : [ 0x8, ['unsigned long']],
'DemotePercent' : [ 0xc, ['unsigned char']],
'PromotePercent' : [ 0xd, ['unsigned char']],
'Spare' : [ 0xe, ['array', 2, ['unsigned char']]],
'AllowDemotion' : [ 0x10, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AllowPromotion' : [ 0x10, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reserved' : [ 0x10, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_ARBITER_INSTANCE' : [ 0x9c, {
'Signature' : [ 0x0, ['unsigned long']],
'MutexEvent' : [ 0x4, ['pointer', ['_KEVENT']]],
'Name' : [ 0x8, ['pointer', ['unsigned short']]],
'ResourceType' : [ 0xc, ['long']],
'Allocation' : [ 0x10, ['pointer', ['_RTL_RANGE_LIST']]],
'PossibleAllocation' : [ 0x14, ['pointer', ['_RTL_RANGE_LIST']]],
'OrderingList' : [ 0x18, ['_ARBITER_ORDERING_LIST']],
'ReservedList' : [ 0x20, ['_ARBITER_ORDERING_LIST']],
'ReferenceCount' : [ 0x28, ['long']],
'Interface' : [ 0x2c, ['pointer', ['_ARBITER_INTERFACE']]],
'AllocationStackMaxSize' : [ 0x30, ['unsigned long']],
'AllocationStack' : [ 0x34, ['pointer', ['_ARBITER_ALLOCATION_STATE']]],
'UnpackRequirement' : [ 0x38, ['pointer', ['void']]],
'PackResource' : [ 0x3c, ['pointer', ['void']]],
'UnpackResource' : [ 0x40, ['pointer', ['void']]],
'ScoreRequirement' : [ 0x44, ['pointer', ['void']]],
'TestAllocation' : [ 0x48, ['pointer', ['void']]],
'RetestAllocation' : [ 0x4c, ['pointer', ['void']]],
'CommitAllocation' : [ 0x50, ['pointer', ['void']]],
'RollbackAllocation' : [ 0x54, ['pointer', ['void']]],
'BootAllocation' : [ 0x58, ['pointer', ['void']]],
'QueryArbitrate' : [ 0x5c, ['pointer', ['void']]],
'QueryConflict' : [ 0x60, ['pointer', ['void']]],
'AddReserved' : [ 0x64, ['pointer', ['void']]],
'StartArbiter' : [ 0x68, ['pointer', ['void']]],
'PreprocessEntry' : [ 0x6c, ['pointer', ['void']]],
'AllocateEntry' : [ 0x70, ['pointer', ['void']]],
'GetNextAllocationRange' : [ 0x74, ['pointer', ['void']]],
'FindSuitableRange' : [ 0x78, ['pointer', ['void']]],
'AddAllocation' : [ 0x7c, ['pointer', ['void']]],
'BacktrackAllocation' : [ 0x80, ['pointer', ['void']]],
'OverrideConflict' : [ 0x84, ['pointer', ['void']]],
'TransactionInProgress' : [ 0x88, ['unsigned char']],
'Extension' : [ 0x8c, ['pointer', ['void']]],
'BusDeviceObject' : [ 0x90, ['pointer', ['_DEVICE_OBJECT']]],
'ConflictCallbackContext' : [ 0x94, ['pointer', ['void']]],
'ConflictCallback' : [ 0x98, ['pointer', ['void']]],
} ],
'_BUS_HANDLER' : [ 0x6c, {
'Version' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ConfigurationType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'Cmos', 1: 'EisaConfiguration', 2: 'Pos', 3: 'CbusConfiguration', 4: 'PCIConfiguration', 5: 'VMEConfiguration', 6: 'NuBusConfiguration', 7: 'PCMCIAConfiguration', 8: 'MPIConfiguration', 9: 'MPSAConfiguration', 10: 'PNPISAConfiguration', 11: 'SgiInternalConfiguration', 12: 'MaximumBusDataType', -1: 'ConfigurationSpaceUndefined'})]],
'BusNumber' : [ 0xc, ['unsigned long']],
'DeviceObject' : [ 0x10, ['pointer', ['_DEVICE_OBJECT']]],
'ParentHandler' : [ 0x14, ['pointer', ['_BUS_HANDLER']]],
'BusData' : [ 0x18, ['pointer', ['void']]],
'DeviceControlExtensionSize' : [ 0x1c, ['unsigned long']],
'BusAddresses' : [ 0x20, ['pointer', ['_SUPPORTED_RANGES']]],
'Reserved' : [ 0x24, ['array', 4, ['unsigned long']]],
'GetBusData' : [ 0x34, ['pointer', ['void']]],
'SetBusData' : [ 0x38, ['pointer', ['void']]],
'AdjustResourceList' : [ 0x3c, ['pointer', ['void']]],
'AssignSlotResources' : [ 0x40, ['pointer', ['void']]],
'GetInterruptVector' : [ 0x44, ['pointer', ['void']]],
'TranslateBusAddress' : [ 0x48, ['pointer', ['void']]],
'Spare1' : [ 0x4c, ['pointer', ['void']]],
'Spare2' : [ 0x50, ['pointer', ['void']]],
'Spare3' : [ 0x54, ['pointer', ['void']]],
'Spare4' : [ 0x58, ['pointer', ['void']]],
'Spare5' : [ 0x5c, ['pointer', ['void']]],
'Spare6' : [ 0x60, ['pointer', ['void']]],
'Spare7' : [ 0x64, ['pointer', ['void']]],
'Spare8' : [ 0x68, ['pointer', ['void']]],
} ],
'_PCI_MN_DISPATCH_TABLE' : [ 0x8, {
'DispatchStyle' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'IRP_COMPLETE', 1: 'IRP_DOWNWARD', 2: 'IRP_UPWARD', 3: 'IRP_DISPATCH'})]],
'DispatchFunction' : [ 0x4, ['pointer', ['void']]],
} ],
'_POP_DEVICE_SYS_STATE' : [ 0x620, {
'IrpMinor' : [ 0x0, ['unsigned char']],
'SystemState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Event' : [ 0x8, ['_KEVENT']],
'SpinLock' : [ 0x18, ['unsigned long']],
'Thread' : [ 0x1c, ['pointer', ['_KTHREAD']]],
'GetNewDeviceList' : [ 0x20, ['unsigned char']],
'Order' : [ 0x24, ['_PO_DEVICE_NOTIFY_ORDER']],
'Status' : [ 0x26c, ['long']],
'FailedDevice' : [ 0x270, ['pointer', ['_DEVICE_OBJECT']]],
'Waking' : [ 0x274, ['unsigned char']],
'Cancelled' : [ 0x275, ['unsigned char']],
'IgnoreErrors' : [ 0x276, ['unsigned char']],
'IgnoreNotImplemented' : [ 0x277, ['unsigned char']],
'WaitAny' : [ 0x278, ['unsigned char']],
'WaitAll' : [ 0x279, ['unsigned char']],
'PresentIrpQueue' : [ 0x27c, ['_LIST_ENTRY']],
'Head' : [ 0x284, ['_POP_DEVICE_POWER_IRP']],
'PowerIrpState' : [ 0x2b0, ['array', 20, ['_POP_DEVICE_POWER_IRP']]],
} ],
'_OBJECT_DUMP_CONTROL' : [ 0x8, {
'Stream' : [ 0x0, ['pointer', ['void']]],
'Detail' : [ 0x4, ['unsigned long']],
} ],
'_SECURITY_SUBJECT_CONTEXT' : [ 0x10, {
'ClientToken' : [ 0x0, ['pointer', ['void']]],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'PrimaryToken' : [ 0x8, ['pointer', ['void']]],
'ProcessAuditId' : [ 0xc, ['pointer', ['void']]],
} ],
'_HEAP_STOP_ON_TAG' : [ 0x4, {
'HeapAndTagIndex' : [ 0x0, ['unsigned long']],
'TagIndex' : [ 0x0, ['unsigned short']],
'HeapIndex' : [ 0x2, ['unsigned short']],
} ],
'_ACTIVATION_CONTEXT_STACK' : [ 0x14, {
'Flags' : [ 0x0, ['unsigned long']],
'NextCookieSequenceNumber' : [ 0x4, ['unsigned long']],
'ActiveFrame' : [ 0x8, ['pointer', ['void']]],
'FrameListCache' : [ 0xc, ['_LIST_ENTRY']],
} ],
'_MMWSLE_HASH' : [ 0x8, {
'Key' : [ 0x0, ['pointer', ['void']]],
'Index' : [ 0x4, ['unsigned long']],
} ],
'_CM_NAME_CONTROL_BLOCK' : [ 0x10, {
'Compressed' : [ 0x0, ['unsigned char']],
'RefCount' : [ 0x2, ['unsigned short']],
'NameHash' : [ 0x4, ['_CM_NAME_HASH']],
'ConvKey' : [ 0x4, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer', ['_CM_KEY_HASH']]],
'NameLength' : [ 0xc, ['unsigned short']],
'Name' : [ 0xe, ['array', 1, ['unsigned short']]],
} ],
'_SECURITY_TOKEN_PROXY_DATA' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'ProxyClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ProxyFull', 1: 'ProxyService', 2: 'ProxyTree', 3: 'ProxyDirectory'})]],
'PathInfo' : [ 0x8, ['_UNICODE_STRING']],
'ContainerMask' : [ 0x10, ['unsigned long']],
'ObjectMask' : [ 0x14, ['unsigned long']],
} ],
'_HANDLE_TABLE_ENTRY' : [ 0x8, {
'Object' : [ 0x0, ['pointer', ['void']]],
'ObAttributes' : [ 0x0, ['unsigned long']],
'InfoTable' : [ 0x0, ['pointer', ['_HANDLE_TABLE_ENTRY_INFO']]],
'Value' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
'GrantedAccessIndex' : [ 0x4, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x6, ['unsigned short']],
'NextFreeTableEntry' : [ 0x4, ['long']],
} ],
'_HEAP_USERDATA_HEADER' : [ 0x10, {
'SFreeListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'SubSegment' : [ 0x0, ['pointer', ['_HEAP_SUBSEGMENT']]],
'HeapHandle' : [ 0x4, ['pointer', ['void']]],
'SizeIndex' : [ 0x8, ['unsigned long']],
'Signature' : [ 0xc, ['unsigned long']],
} ],
'_LPCP_PORT_OBJECT' : [ 0xa4, {
'ConnectionPort' : [ 0x0, ['pointer', ['_LPCP_PORT_OBJECT']]],
'ConnectedPort' : [ 0x4, ['pointer', ['_LPCP_PORT_OBJECT']]],
'MsgQueue' : [ 0x8, ['_LPCP_PORT_QUEUE']],
'Creator' : [ 0x18, ['_CLIENT_ID']],
'ClientSectionBase' : [ 0x20, ['pointer', ['void']]],
'ServerSectionBase' : [ 0x24, ['pointer', ['void']]],
'PortContext' : [ 0x28, ['pointer', ['void']]],
'ClientThread' : [ 0x2c, ['pointer', ['_ETHREAD']]],
'SecurityQos' : [ 0x30, ['_SECURITY_QUALITY_OF_SERVICE']],
'StaticSecurity' : [ 0x3c, ['_SECURITY_CLIENT_CONTEXT']],
'LpcReplyChainHead' : [ 0x78, ['_LIST_ENTRY']],
'LpcDataInfoChainHead' : [ 0x80, ['_LIST_ENTRY']],
'ServerProcess' : [ 0x88, ['pointer', ['_EPROCESS']]],
'MappingProcess' : [ 0x88, ['pointer', ['_EPROCESS']]],
'MaxMessageLength' : [ 0x8c, ['unsigned short']],
'MaxConnectionInfoLength' : [ 0x8e, ['unsigned short']],
'Flags' : [ 0x90, ['unsigned long']],
'WaitEvent' : [ 0x94, ['_KEVENT']],
} ],
'PCI_POWER_STATE' : [ 0x40, {
'CurrentSystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'CurrentDeviceState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'SystemWakeLevel' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWakeLevel' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'SystemStateMapping' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'WaitWakeIrp' : [ 0x2c, ['pointer', ['_IRP']]],
'SavedCancelRoutine' : [ 0x30, ['pointer', ['void']]],
'Paging' : [ 0x34, ['long']],
'Hibernate' : [ 0x38, ['long']],
'CrashDump' : [ 0x3c, ['long']],
} ],
'_POOL_HACKER' : [ 0x28, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'Contents' : [ 0x8, ['array', 8, ['unsigned long']]],
} ],
'_CM_INDEX_HINT_BLOCK' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'HashKey' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_TOKEN_CONTROL' : [ 0x28, {
'TokenId' : [ 0x0, ['_LUID']],
'AuthenticationId' : [ 0x8, ['_LUID']],
'ModifiedId' : [ 0x10, ['_LUID']],
'TokenSource' : [ 0x18, ['_TOKEN_SOURCE']],
} ],
'__unnamed_1803' : [ 0x10, {
'SecurityContext' : [ 0x0, ['pointer', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x4, ['unsigned long']],
'FileAttributes' : [ 0x8, ['unsigned short']],
'ShareAccess' : [ 0xa, ['unsigned short']],
'EaLength' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1807' : [ 0x10, {
'SecurityContext' : [ 0x0, ['pointer', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned short']],
'ShareAccess' : [ 0xa, ['unsigned short']],
'Parameters' : [ 0xc, ['pointer', ['_NAMED_PIPE_CREATE_PARAMETERS']]],
} ],
'__unnamed_180b' : [ 0x10, {
'SecurityContext' : [ 0x0, ['pointer', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned short']],
'ShareAccess' : [ 0xa, ['unsigned short']],
'Parameters' : [ 0xc, ['pointer', ['_MAILSLOT_CREATE_PARAMETERS']]],
} ],
'__unnamed_180d' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'Key' : [ 0x4, ['unsigned long']],
'ByteOffset' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'__unnamed_1812' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileName' : [ 0x4, ['pointer', ['_STRING']]],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileMaximumInformation'})]],
'FileIndex' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1814' : [ 0x8, {
'Length' : [ 0x0, ['unsigned long']],
'CompletionFilter' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1816' : [ 0x8, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileMaximumInformation'})]],
} ],
'__unnamed_1818' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileMaximumInformation'})]],
'FileObject' : [ 0x8, ['pointer', ['_FILE_OBJECT']]],
'ReplaceIfExists' : [ 0xc, ['unsigned char']],
'AdvanceOnly' : [ 0xd, ['unsigned char']],
'ClusterCount' : [ 0xc, ['unsigned long']],
'DeleteHandle' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_181a' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'EaList' : [ 0x4, ['pointer', ['void']]],
'EaListLength' : [ 0x8, ['unsigned long']],
'EaIndex' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_181c' : [ 0x4, {
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1820' : [ 0x8, {
'Length' : [ 0x0, ['unsigned long']],
'FsInformationClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1: 'FileFsVolumeInformation', 2: 'FileFsLabelInformation', 3: 'FileFsSizeInformation', 4: 'FileFsDeviceInformation', 5: 'FileFsAttributeInformation', 6: 'FileFsControlInformation', 7: 'FileFsFullSizeInformation', 8: 'FileFsObjectIdInformation', 9: 'FileFsDriverPathInformation', 10: 'FileFsMaximumInformation'})]],
} ],
'__unnamed_1822' : [ 0x10, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x4, ['unsigned long']],
'FsControlCode' : [ 0x8, ['unsigned long']],
'Type3InputBuffer' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_1824' : [ 0x10, {
'Length' : [ 0x0, ['pointer', ['_LARGE_INTEGER']]],
'Key' : [ 0x4, ['unsigned long']],
'ByteOffset' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'__unnamed_1826' : [ 0x10, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x4, ['unsigned long']],
'IoControlCode' : [ 0x8, ['unsigned long']],
'Type3InputBuffer' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_1828' : [ 0x8, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_182a' : [ 0x8, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'SecurityDescriptor' : [ 0x4, ['pointer', ['void']]],
} ],
'__unnamed_182c' : [ 0x8, {
'Vpb' : [ 0x0, ['pointer', ['_VPB']]],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_1830' : [ 0x4, {
'Srb' : [ 0x0, ['pointer', ['_SCSI_REQUEST_BLOCK']]],
} ],
'__unnamed_1834' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'StartSid' : [ 0x4, ['pointer', ['void']]],
'SidList' : [ 0x8, ['pointer', ['_FILE_GET_QUOTA_INFORMATION']]],
'SidListLength' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1838' : [ 0x4, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusRelations', 1: 'EjectionRelations', 2: 'PowerRelations', 3: 'RemovalRelations', 4: 'TargetDeviceRelation', 5: 'SingleBusRelations'})]],
} ],
'__unnamed_183a' : [ 0x10, {
'InterfaceType' : [ 0x0, ['pointer', ['_GUID']]],
'Size' : [ 0x4, ['unsigned short']],
'Version' : [ 0x6, ['unsigned short']],
'Interface' : [ 0x8, ['pointer', ['_INTERFACE']]],
'InterfaceSpecificData' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_183e' : [ 0x4, {
'Capabilities' : [ 0x0, ['pointer', ['_DEVICE_CAPABILITIES']]],
} ],
'__unnamed_1840' : [ 0x4, {
'IoResourceRequirementList' : [ 0x0, ['pointer', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
} ],
'__unnamed_1842' : [ 0x10, {
'WhichSpace' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x4, ['pointer', ['void']]],
'Offset' : [ 0x8, ['unsigned long']],
'Length' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1844' : [ 0x1, {
'Lock' : [ 0x0, ['unsigned char']],
} ],
'__unnamed_1848' : [ 0x4, {
'IdType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusQueryDeviceID', 1: 'BusQueryHardwareIDs', 2: 'BusQueryCompatibleIDs', 3: 'BusQueryInstanceID', 4: 'BusQueryDeviceSerialNumber'})]],
} ],
'__unnamed_184c' : [ 0x8, {
'DeviceTextType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceTextDescription', 1: 'DeviceTextLocationInformation'})]],
'LocaleId' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1850' : [ 0x8, {
'InPath' : [ 0x0, ['unsigned char']],
'Reserved' : [ 0x1, ['array', 3, ['unsigned char']]],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'__unnamed_1852' : [ 0x4, {
'PowerState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_1856' : [ 0x4, {
'PowerSequence' : [ 0x0, ['pointer', ['_POWER_SEQUENCE']]],
} ],
'__unnamed_185a' : [ 0x10, {
'SystemContext' : [ 0x0, ['unsigned long']],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SystemPowerState', 1: 'DevicePowerState'})]],
'State' : [ 0x8, ['_POWER_STATE']],
'ShutdownType' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
} ],
'__unnamed_185c' : [ 0x8, {
'AllocatedResources' : [ 0x0, ['pointer', ['_CM_RESOURCE_LIST']]],
'AllocatedResourcesTranslated' : [ 0x4, ['pointer', ['_CM_RESOURCE_LIST']]],
} ],
'__unnamed_185e' : [ 0x10, {
'ProviderId' : [ 0x0, ['unsigned long']],
'DataPath' : [ 0x4, ['pointer', ['void']]],
'BufferSize' : [ 0x8, ['unsigned long']],
'Buffer' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_1860' : [ 0x10, {
'Argument1' : [ 0x0, ['pointer', ['void']]],
'Argument2' : [ 0x4, ['pointer', ['void']]],
'Argument3' : [ 0x8, ['pointer', ['void']]],
'Argument4' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_1862' : [ 0x10, {
'Create' : [ 0x0, ['__unnamed_1803']],
'CreatePipe' : [ 0x0, ['__unnamed_1807']],
'CreateMailslot' : [ 0x0, ['__unnamed_180b']],
'Read' : [ 0x0, ['__unnamed_180d']],
'Write' : [ 0x0, ['__unnamed_180d']],
'QueryDirectory' : [ 0x0, ['__unnamed_1812']],
'NotifyDirectory' : [ 0x0, ['__unnamed_1814']],
'QueryFile' : [ 0x0, ['__unnamed_1816']],
'SetFile' : [ 0x0, ['__unnamed_1818']],
'QueryEa' : [ 0x0, ['__unnamed_181a']],
'SetEa' : [ 0x0, ['__unnamed_181c']],
'QueryVolume' : [ 0x0, ['__unnamed_1820']],
'SetVolume' : [ 0x0, ['__unnamed_1820']],
'FileSystemControl' : [ 0x0, ['__unnamed_1822']],
'LockControl' : [ 0x0, ['__unnamed_1824']],
'DeviceIoControl' : [ 0x0, ['__unnamed_1826']],
'QuerySecurity' : [ 0x0, ['__unnamed_1828']],
'SetSecurity' : [ 0x0, ['__unnamed_182a']],
'MountVolume' : [ 0x0, ['__unnamed_182c']],
'VerifyVolume' : [ 0x0, ['__unnamed_182c']],
'Scsi' : [ 0x0, ['__unnamed_1830']],
'QueryQuota' : [ 0x0, ['__unnamed_1834']],
'SetQuota' : [ 0x0, ['__unnamed_181c']],
'QueryDeviceRelations' : [ 0x0, ['__unnamed_1838']],
'QueryInterface' : [ 0x0, ['__unnamed_183a']],
'DeviceCapabilities' : [ 0x0, ['__unnamed_183e']],
'FilterResourceRequirements' : [ 0x0, ['__unnamed_1840']],
'ReadWriteConfig' : [ 0x0, ['__unnamed_1842']],
'SetLock' : [ 0x0, ['__unnamed_1844']],
'QueryId' : [ 0x0, ['__unnamed_1848']],
'QueryDeviceText' : [ 0x0, ['__unnamed_184c']],
'UsageNotification' : [ 0x0, ['__unnamed_1850']],
'WaitWake' : [ 0x0, ['__unnamed_1852']],
'PowerSequence' : [ 0x0, ['__unnamed_1856']],
'Power' : [ 0x0, ['__unnamed_185a']],
'StartDevice' : [ 0x0, ['__unnamed_185c']],
'WMI' : [ 0x0, ['__unnamed_185e']],
'Others' : [ 0x0, ['__unnamed_1860']],
} ],
'_IO_STACK_LOCATION' : [ 0x24, {
'MajorFunction' : [ 0x0, ['unsigned char']],
'MinorFunction' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned char']],
'Control' : [ 0x3, ['unsigned char']],
'Parameters' : [ 0x4, ['__unnamed_1862']],
'DeviceObject' : [ 0x14, ['pointer', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x18, ['pointer', ['_FILE_OBJECT']]],
'CompletionRoutine' : [ 0x1c, ['pointer', ['void']]],
'Context' : [ 0x20, ['pointer', ['void']]],
} ],
'__unnamed_1869' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Alignment' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_186b' : [ 0x8, {
'MinimumVector' : [ 0x0, ['unsigned long']],
'MaximumVector' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_186d' : [ 0x8, {
'MinimumChannel' : [ 0x0, ['unsigned long']],
'MaximumChannel' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_186f' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'MinBusNumber' : [ 0x4, ['unsigned long']],
'MaxBusNumber' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1871' : [ 0xc, {
'Priority' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1873' : [ 0x18, {
'Port' : [ 0x0, ['__unnamed_1869']],
'Memory' : [ 0x0, ['__unnamed_1869']],
'Interrupt' : [ 0x0, ['__unnamed_186b']],
'Dma' : [ 0x0, ['__unnamed_186d']],
'Generic' : [ 0x0, ['__unnamed_1869']],
'DevicePrivate' : [ 0x0, ['__unnamed_1557']],
'BusNumber' : [ 0x0, ['__unnamed_186f']],
'ConfigData' : [ 0x0, ['__unnamed_1871']],
} ],
'_IO_RESOURCE_DESCRIPTOR' : [ 0x20, {
'Option' : [ 0x0, ['unsigned char']],
'Type' : [ 0x1, ['unsigned char']],
'ShareDisposition' : [ 0x2, ['unsigned char']],
'Spare1' : [ 0x3, ['unsigned char']],
'Flags' : [ 0x4, ['unsigned short']],
'Spare2' : [ 0x6, ['unsigned short']],
'u' : [ 0x8, ['__unnamed_1873']],
} ],
'_LUID_AND_ATTRIBUTES' : [ 0xc, {
'Luid' : [ 0x0, ['_LUID']],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_MI_VERIFIER_POOL_HEADER' : [ 0x8, {
'ListIndex' : [ 0x0, ['unsigned long']],
'Verifier' : [ 0x4, ['pointer', ['_MI_VERIFIER_DRIVER_ENTRY']]],
} ],
'_CM_KEY_BODY' : [ 0x44, {
'Type' : [ 0x0, ['unsigned long']],
'KeyControlBlock' : [ 0x4, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'NotifyBlock' : [ 0x8, ['pointer', ['_CM_NOTIFY_BLOCK']]],
'ProcessID' : [ 0xc, ['pointer', ['void']]],
'Callers' : [ 0x10, ['unsigned long']],
'CallerAddress' : [ 0x14, ['array', 10, ['pointer', ['void']]]],
'KeyBodyList' : [ 0x3c, ['_LIST_ENTRY']],
} ],
'__unnamed_1884' : [ 0x4, {
'DataLength' : [ 0x0, ['short']],
'TotalLength' : [ 0x2, ['short']],
} ],
'__unnamed_1886' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1884']],
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1888' : [ 0x4, {
'Type' : [ 0x0, ['short']],
'DataInfoOffset' : [ 0x2, ['short']],
} ],
'__unnamed_188a' : [ 0x4, {
's2' : [ 0x0, ['__unnamed_1888']],
'ZeroInit' : [ 0x0, ['unsigned long']],
} ],
'_PORT_MESSAGE' : [ 0x18, {
'u1' : [ 0x0, ['__unnamed_1886']],
'u2' : [ 0x4, ['__unnamed_188a']],
'ClientId' : [ 0x8, ['_CLIENT_ID']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x10, ['unsigned long']],
'ClientViewSize' : [ 0x14, ['unsigned long']],
'CallbackId' : [ 0x14, ['unsigned long']],
} ],
'_DBGKD_ANY_CONTROL_SET' : [ 0x1c, {
'X86ControlSet' : [ 0x0, ['_X86_DBGKD_CONTROL_SET']],
'AlphaControlSet' : [ 0x0, ['unsigned long']],
'IA64ControlSet' : [ 0x0, ['_IA64_DBGKD_CONTROL_SET']],
'Amd64ControlSet' : [ 0x0, ['_AMD64_DBGKD_CONTROL_SET']],
} ],
'_ARBITER_ORDERING_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned short']],
'Maximum' : [ 0x2, ['unsigned short']],
'Orderings' : [ 0x4, ['pointer', ['_ARBITER_ORDERING']]],
} ],
'_HBASE_BLOCK' : [ 0x1000, {
'Signature' : [ 0x0, ['unsigned long']],
'Sequence1' : [ 0x4, ['unsigned long']],
'Sequence2' : [ 0x8, ['unsigned long']],
'TimeStamp' : [ 0xc, ['_LARGE_INTEGER']],
'Major' : [ 0x14, ['unsigned long']],
'Minor' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['unsigned long']],
'Format' : [ 0x20, ['unsigned long']],
'RootCell' : [ 0x24, ['unsigned long']],
'Length' : [ 0x28, ['unsigned long']],
'Cluster' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x70, ['array', 99, ['unsigned long']]],
'CheckSum' : [ 0x1fc, ['unsigned long']],
'Reserved2' : [ 0x200, ['array', 894, ['unsigned long']]],
'BootType' : [ 0xff8, ['unsigned long']],
'BootRecover' : [ 0xffc, ['unsigned long']],
} ],
'_DUAL' : [ 0xdc, {
'Length' : [ 0x0, ['unsigned long']],
'Map' : [ 0x4, ['pointer', ['_HMAP_DIRECTORY']]],
'SmallDir' : [ 0x8, ['pointer', ['_HMAP_TABLE']]],
'Guard' : [ 0xc, ['unsigned long']],
'FreeDisplay' : [ 0x10, ['array', 24, ['_RTL_BITMAP']]],
'FreeSummary' : [ 0xd0, ['unsigned long']],
'FreeBins' : [ 0xd4, ['_LIST_ENTRY']],
} ],
'_COMPRESSED_DATA_INFO' : [ 0xc, {
'CompressionFormatAndEngine' : [ 0x0, ['unsigned short']],
'CompressionUnitShift' : [ 0x2, ['unsigned char']],
'ChunkShift' : [ 0x3, ['unsigned char']],
'ClusterShift' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'NumberOfChunks' : [ 0x6, ['unsigned short']],
'CompressedChunkSizes' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_LPCP_PORT_QUEUE' : [ 0x10, {
'NonPagedPortQueue' : [ 0x0, ['pointer', ['_LPCP_NONPAGED_PORT_QUEUE']]],
'Semaphore' : [ 0x4, ['pointer', ['_KSEMAPHORE']]],
'ReceiveHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_INITIAL_PRIVILEGE_SET' : [ 0x2c, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 3, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_POP_HIBER_CONTEXT' : [ 0xe0, {
'WriteToFile' : [ 0x0, ['unsigned char']],
'ReserveLoaderMemory' : [ 0x1, ['unsigned char']],
'ReserveFreeMemory' : [ 0x2, ['unsigned char']],
'VerifyOnWake' : [ 0x3, ['unsigned char']],
'Reset' : [ 0x4, ['unsigned char']],
'HiberFlags' : [ 0x5, ['unsigned char']],
'LinkFile' : [ 0x6, ['unsigned char']],
'LinkFileHandle' : [ 0x8, ['pointer', ['void']]],
'Lock' : [ 0xc, ['unsigned long']],
'MapFrozen' : [ 0x10, ['unsigned char']],
'MemoryMap' : [ 0x14, ['_RTL_BITMAP']],
'ClonedRanges' : [ 0x1c, ['_LIST_ENTRY']],
'ClonedRangeCount' : [ 0x24, ['unsigned long']],
'NextCloneRange' : [ 0x28, ['pointer', ['_LIST_ENTRY']]],
'NextPreserve' : [ 0x2c, ['unsigned long']],
'LoaderMdl' : [ 0x30, ['pointer', ['_MDL']]],
'Clones' : [ 0x34, ['pointer', ['_MDL']]],
'NextClone' : [ 0x38, ['pointer', ['unsigned char']]],
'NoClones' : [ 0x3c, ['unsigned long']],
'Spares' : [ 0x40, ['pointer', ['_MDL']]],
'PagesOut' : [ 0x48, ['unsigned long long']],
'IoPage' : [ 0x50, ['pointer', ['void']]],
'CurrentMcb' : [ 0x54, ['pointer', ['void']]],
'DumpStack' : [ 0x58, ['pointer', ['_DUMP_STACK_CONTEXT']]],
'WakeState' : [ 0x5c, ['pointer', ['_KPROCESSOR_STATE']]],
'NoRanges' : [ 0x60, ['unsigned long']],
'HiberVa' : [ 0x64, ['unsigned long']],
'HiberPte' : [ 0x68, ['_LARGE_INTEGER']],
'Status' : [ 0x70, ['long']],
'MemoryImage' : [ 0x74, ['pointer', ['PO_MEMORY_IMAGE']]],
'TableHead' : [ 0x78, ['pointer', ['_PO_MEMORY_RANGE_ARRAY']]],
'CompressionWorkspace' : [ 0x7c, ['pointer', ['unsigned char']]],
'CompressedWriteBuffer' : [ 0x80, ['pointer', ['unsigned char']]],
'PerformanceStats' : [ 0x84, ['pointer', ['unsigned long']]],
'CompressionBlock' : [ 0x88, ['pointer', ['void']]],
'DmaIO' : [ 0x8c, ['pointer', ['void']]],
'TemporaryHeap' : [ 0x90, ['pointer', ['void']]],
'PerfInfo' : [ 0x98, ['_PO_HIBER_PERF']],
} ],
'_TEB_ACTIVE_FRAME' : [ 0xc, {
'Flags' : [ 0x0, ['unsigned long']],
'Previous' : [ 0x4, ['pointer', ['_TEB_ACTIVE_FRAME']]],
'Context' : [ 0x8, ['pointer', ['_TEB_ACTIVE_FRAME_CONTEXT']]],
} ],
'_FILE_GET_QUOTA_INFORMATION' : [ 0x14, {
'NextEntryOffset' : [ 0x0, ['unsigned long']],
'SidLength' : [ 0x4, ['unsigned long']],
'Sid' : [ 0x8, ['_SID']],
} ],
'_MMADDRESS_LIST' : [ 0x8, {
'StartVpn' : [ 0x0, ['unsigned long']],
'EndVpn' : [ 0x4, ['unsigned long']],
} ],
'_OBJECT_NAME_INFORMATION' : [ 0x8, {
'Name' : [ 0x0, ['_UNICODE_STRING']],
} ],
'_SECURITY_QUALITY_OF_SERVICE' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'ContextTrackingMode' : [ 0x8, ['unsigned char']],
'EffectiveOnly' : [ 0x9, ['unsigned char']],
} ],
'_DUMP_STACK_CONTEXT' : [ 0xb0, {
'Init' : [ 0x0, ['_DUMP_INITIALIZATION_CONTEXT']],
'PartitionOffset' : [ 0x70, ['_LARGE_INTEGER']],
'DumpPointers' : [ 0x78, ['pointer', ['void']]],
'PointersLength' : [ 0x7c, ['unsigned long']],
'ModulePrefix' : [ 0x80, ['pointer', ['unsigned short']]],
'DriverList' : [ 0x84, ['_LIST_ENTRY']],
'InitMsg' : [ 0x8c, ['_STRING']],
'ProgMsg' : [ 0x94, ['_STRING']],
'DoneMsg' : [ 0x9c, ['_STRING']],
'FileObject' : [ 0xa4, ['pointer', ['void']]],
'UsageType' : [ 0xa8, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'_FILE_STANDARD_INFORMATION' : [ 0x18, {
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x8, ['_LARGE_INTEGER']],
'NumberOfLinks' : [ 0x10, ['unsigned long']],
'DeletePending' : [ 0x14, ['unsigned char']],
'Directory' : [ 0x15, ['unsigned char']],
} ],
'_POP_SHUTDOWN_BUG_CHECK' : [ 0x14, {
'Code' : [ 0x0, ['unsigned long']],
'Parameter1' : [ 0x4, ['unsigned long']],
'Parameter2' : [ 0x8, ['unsigned long']],
'Parameter3' : [ 0xc, ['unsigned long']],
'Parameter4' : [ 0x10, ['unsigned long']],
} ],
'__unnamed_18c9' : [ 0x4, {
'DeviceNumber' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long')]],
'FunctionNumber' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_18cb' : [ 0x4, {
'bits' : [ 0x0, ['__unnamed_18c9']],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_PCI_SLOT_NUMBER' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_18cb']],
} ],
'_Wx86ThreadState' : [ 0xc, {
'CallBx86Eip' : [ 0x0, ['pointer', ['unsigned long']]],
'DeallocationCpu' : [ 0x4, ['pointer', ['void']]],
'UseKnownWx86Dll' : [ 0x8, ['unsigned char']],
'OleStubInvoked' : [ 0x9, ['unsigned char']],
} ],
'_DRIVER_EXTENSION' : [ 0x1c, {
'DriverObject' : [ 0x0, ['pointer', ['_DRIVER_OBJECT']]],
'AddDevice' : [ 0x4, ['pointer', ['void']]],
'Count' : [ 0x8, ['unsigned long']],
'ServiceKeyName' : [ 0xc, ['_UNICODE_STRING']],
'ClientDriverExtension' : [ 0x14, ['pointer', ['_IO_CLIENT_EXTENSION']]],
'FsFilterCallbacks' : [ 0x18, ['pointer', ['_FS_FILTER_CALLBACKS']]],
} ],
'_CM_NOTIFY_BLOCK' : [ 0x2c, {
'HiveList' : [ 0x0, ['_LIST_ENTRY']],
'PostList' : [ 0x8, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x10, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'KeyBody' : [ 0x14, ['pointer', ['_CM_KEY_BODY']]],
'Filter' : [ 0x18, ['BitField', dict(start_bit = 0, end_bit = 30, native_type='unsigned long')]],
'WatchTree' : [ 0x18, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'NotifyPending' : [ 0x18, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubjectContext' : [ 0x1c, ['_SECURITY_SUBJECT_CONTEXT']],
} ],
'_SID' : [ 0xc, {
'Revision' : [ 0x0, ['unsigned char']],
'SubAuthorityCount' : [ 0x1, ['unsigned char']],
'IdentifierAuthority' : [ 0x2, ['_SID_IDENTIFIER_AUTHORITY']],
'SubAuthority' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_RTL_HANDLE_TABLE_ENTRY' : [ 0x4, {
'Flags' : [ 0x0, ['unsigned long']],
'NextFree' : [ 0x0, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_INTERFACE' : [ 0x10, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x4, ['pointer', ['void']]],
'InterfaceReference' : [ 0x8, ['pointer', ['void']]],
'InterfaceDereference' : [ 0xc, ['pointer', ['void']]],
} ],
'_SUPPORTED_RANGES' : [ 0xa0, {
'Version' : [ 0x0, ['unsigned short']],
'Sorted' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
'NoIO' : [ 0x4, ['unsigned long']],
'IO' : [ 0x8, ['_SUPPORTED_RANGE']],
'NoMemory' : [ 0x28, ['unsigned long']],
'Memory' : [ 0x30, ['_SUPPORTED_RANGE']],
'NoPrefetchMemory' : [ 0x50, ['unsigned long']],
'PrefetchMemory' : [ 0x58, ['_SUPPORTED_RANGE']],
'NoDma' : [ 0x78, ['unsigned long']],
'Dma' : [ 0x80, ['_SUPPORTED_RANGE']],
} ],
'_SID_IDENTIFIER_AUTHORITY' : [ 0x6, {
'Value' : [ 0x0, ['array', 6, ['unsigned char']]],
} ],
'_SECURITY_DESCRIPTOR_RELATIVE' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['unsigned long']],
'Group' : [ 0x8, ['unsigned long']],
'Sacl' : [ 0xc, ['unsigned long']],
'Dacl' : [ 0x10, ['unsigned long']],
} ],
'_PM_SUPPORT' : [ 0x1, {
'Rsvd2' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'D1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'D2' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'PMED0' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'PMED1' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'PMED2' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'PMED3Hot' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'PMED3Cold' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'__unnamed_18f1' : [ 0xc, {
'ArbitrationList' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x4, ['unsigned long']],
'AllocateFrom' : [ 0x8, ['pointer', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'__unnamed_18f3' : [ 0x4, {
'ArbitrationList' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
} ],
'__unnamed_18f7' : [ 0x4, {
'AllocatedResources' : [ 0x0, ['pointer', ['pointer', ['_CM_PARTIAL_RESOURCE_LIST']]]],
} ],
'__unnamed_18f9' : [ 0x10, {
'PhysicalDeviceObject' : [ 0x0, ['pointer', ['_DEVICE_OBJECT']]],
'ConflictingResource' : [ 0x4, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'ConflictCount' : [ 0x8, ['pointer', ['unsigned long']]],
'Conflicts' : [ 0xc, ['pointer', ['pointer', ['_ARBITER_CONFLICT_INFO']]]],
} ],
'__unnamed_18fb' : [ 0x4, {
'ReserveDevice' : [ 0x0, ['pointer', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_18fd' : [ 0x10, {
'TestAllocation' : [ 0x0, ['__unnamed_18f1']],
'RetestAllocation' : [ 0x0, ['__unnamed_18f1']],
'BootAllocation' : [ 0x0, ['__unnamed_18f3']],
'QueryAllocatedResources' : [ 0x0, ['__unnamed_18f7']],
'QueryConflict' : [ 0x0, ['__unnamed_18f9']],
'QueryArbitrate' : [ 0x0, ['__unnamed_18f3']],
'AddReserved' : [ 0x0, ['__unnamed_18fb']],
} ],
'_ARBITER_PARAMETERS' : [ 0x10, {
'Parameters' : [ 0x0, ['__unnamed_18fd']],
} ],
'_SECURITY_TOKEN_AUDIT_DATA' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'GrantMask' : [ 0x4, ['unsigned long']],
'DenyMask' : [ 0x8, ['unsigned long']],
} ],
'_HANDLE_TABLE_ENTRY_INFO' : [ 0x4, {
'AuditMask' : [ 0x0, ['unsigned long']],
} ],
'_POWER_SEQUENCE' : [ 0xc, {
'SequenceD1' : [ 0x0, ['unsigned long']],
'SequenceD2' : [ 0x4, ['unsigned long']],
'SequenceD3' : [ 0x8, ['unsigned long']],
} ],
'_IMAGE_DATA_DIRECTORY' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_MI_VERIFIER_DRIVER_ENTRY' : [ 0x60, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'Loads' : [ 0x8, ['unsigned long']],
'Unloads' : [ 0xc, ['unsigned long']],
'BaseName' : [ 0x10, ['_UNICODE_STRING']],
'StartAddress' : [ 0x18, ['pointer', ['void']]],
'EndAddress' : [ 0x1c, ['pointer', ['void']]],
'Flags' : [ 0x20, ['unsigned long']],
'Signature' : [ 0x24, ['unsigned long']],
'Reserved' : [ 0x28, ['unsigned long']],
'VerifierPoolLock' : [ 0x2c, ['unsigned long']],
'PoolHash' : [ 0x30, ['pointer', ['_VI_POOL_ENTRY']]],
'PoolHashSize' : [ 0x34, ['unsigned long']],
'PoolHashFree' : [ 0x38, ['unsigned long']],
'PoolHashReserved' : [ 0x3c, ['unsigned long']],
'CurrentPagedPoolAllocations' : [ 0x40, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x44, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x48, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x4c, ['unsigned long']],
'PagedBytes' : [ 0x50, ['unsigned long']],
'NonPagedBytes' : [ 0x54, ['unsigned long']],
'PeakPagedBytes' : [ 0x58, ['unsigned long']],
'PeakNonPagedBytes' : [ 0x5c, ['unsigned long']],
} ],
'_CURDIR' : [ 0xc, {
'DosPath' : [ 0x0, ['_UNICODE_STRING']],
'Handle' : [ 0x8, ['pointer', ['void']]],
} ],
'_MMMOD_WRITER_LISTHEAD' : [ 0x18, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Event' : [ 0x8, ['_KEVENT']],
} ],
'_PO_HIBER_PERF' : [ 0x48, {
'IoTicks' : [ 0x0, ['unsigned long long']],
'InitTicks' : [ 0x8, ['unsigned long long']],
'CopyTicks' : [ 0x10, ['unsigned long long']],
'StartCount' : [ 0x18, ['unsigned long long']],
'ElapsedTime' : [ 0x20, ['unsigned long']],
'IoTime' : [ 0x24, ['unsigned long']],
'CopyTime' : [ 0x28, ['unsigned long']],
'InitTime' : [ 0x2c, ['unsigned long']],
'PagesWritten' : [ 0x30, ['unsigned long']],
'PagesProcessed' : [ 0x34, ['unsigned long']],
'BytesCopied' : [ 0x38, ['unsigned long']],
'DumpCount' : [ 0x3c, ['unsigned long']],
'FileRuns' : [ 0x40, ['unsigned long']],
} ],
'_GDI_TEB_BATCH' : [ 0x4e0, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x4, ['unsigned long']],
'Buffer' : [ 0x8, ['array', 310, ['unsigned long']]],
} ],
'PO_MEMORY_IMAGE' : [ 0xa8, {
'Signature' : [ 0x0, ['unsigned long']],
'Version' : [ 0x4, ['unsigned long']],
'CheckSum' : [ 0x8, ['unsigned long']],
'LengthSelf' : [ 0xc, ['unsigned long']],
'PageSelf' : [ 0x10, ['unsigned long']],
'PageSize' : [ 0x14, ['unsigned long']],
'ImageType' : [ 0x18, ['unsigned long']],
'SystemTime' : [ 0x20, ['_LARGE_INTEGER']],
'InterruptTime' : [ 0x28, ['unsigned long long']],
'FeatureFlags' : [ 0x30, ['unsigned long']],
'HiberFlags' : [ 0x34, ['unsigned char']],
'spare' : [ 0x35, ['array', 3, ['unsigned char']]],
'NoHiberPtes' : [ 0x38, ['unsigned long']],
'HiberVa' : [ 0x3c, ['unsigned long']],
'HiberPte' : [ 0x40, ['_LARGE_INTEGER']],
'NoFreePages' : [ 0x48, ['unsigned long']],
'FreeMapCheck' : [ 0x4c, ['unsigned long']],
'WakeCheck' : [ 0x50, ['unsigned long']],
'TotalPages' : [ 0x54, ['unsigned long']],
'FirstTablePage' : [ 0x58, ['unsigned long']],
'LastFilePage' : [ 0x5c, ['unsigned long']],
'PerfInfo' : [ 0x60, ['_PO_HIBER_PERF']],
} ],
'BATTERY_REPORTING_SCALE' : [ 0x8, {
'Granularity' : [ 0x0, ['unsigned long']],
'Capacity' : [ 0x4, ['unsigned long']],
} ],
'_KDEVICE_QUEUE_ENTRY' : [ 0x10, {
'DeviceListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SortKey' : [ 0x8, ['unsigned long']],
'Inserted' : [ 0xc, ['unsigned char']],
} ],
'_DEVICE_CAPABILITIES' : [ 0x40, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'DeviceD1' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeviceD2' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LockSupported' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EjectSupported' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Removable' : [ 0x4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DockDevice' : [ 0x4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'UniqueID' : [ 0x4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SilentInstall' : [ 0x4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'RawDeviceOK' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SurpriseRemovalOK' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'WakeFromD0' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'WakeFromD1' : [ 0x4, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WakeFromD2' : [ 0x4, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'WakeFromD3' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HardwareDisabled' : [ 0x4, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NonDynamic' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'WarmEjectSupported' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NoDisplayInUI' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Reserved' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 32, native_type='unsigned long')]],
'Address' : [ 0x8, ['unsigned long']],
'UINumber' : [ 0xc, ['unsigned long']],
'DeviceState' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'SystemWake' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWake' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'D1Latency' : [ 0x34, ['unsigned long']],
'D2Latency' : [ 0x38, ['unsigned long']],
'D3Latency' : [ 0x3c, ['unsigned long']],
} ],
'_TEB_ACTIVE_FRAME_CONTEXT' : [ 0x8, {
'Flags' : [ 0x0, ['unsigned long']],
'FrameName' : [ 0x4, ['pointer', ['unsigned char']]],
} ],
'_RTL_RANGE_LIST' : [ 0x14, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x8, ['unsigned long']],
'Count' : [ 0xc, ['unsigned long']],
'Stamp' : [ 0x10, ['unsigned long']],
} ],
'_RTL_CRITICAL_SECTION_DEBUG' : [ 0x20, {
'Type' : [ 0x0, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x2, ['unsigned short']],
'CriticalSection' : [ 0x4, ['pointer', ['_RTL_CRITICAL_SECTION']]],
'ProcessLocksList' : [ 0x8, ['_LIST_ENTRY']],
'EntryCount' : [ 0x10, ['unsigned long']],
'ContentionCount' : [ 0x14, ['unsigned long']],
'Spare' : [ 0x18, ['array', 2, ['unsigned long']]],
} ],
'_SEP_AUDIT_POLICY' : [ 0x8, {
'PolicyElements' : [ 0x0, ['_SEP_AUDIT_POLICY_CATEGORIES']],
'PolicyOverlay' : [ 0x0, ['_SEP_AUDIT_POLICY_OVERLAY']],
'Overlay' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_192c' : [ 0x14, {
'ClassGuid' : [ 0x0, ['_GUID']],
'SymbolicLinkName' : [ 0x10, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_192e' : [ 0x2, {
'DeviceIds' : [ 0x0, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1930' : [ 0x2, {
'DeviceId' : [ 0x0, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1932' : [ 0x8, {
'NotificationStructure' : [ 0x0, ['pointer', ['void']]],
'DeviceIds' : [ 0x4, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1934' : [ 0x4, {
'Notification' : [ 0x0, ['pointer', ['void']]],
} ],
'__unnamed_1936' : [ 0x8, {
'NotificationCode' : [ 0x0, ['unsigned long']],
'NotificationData' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1938' : [ 0x8, {
'VetoType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]],
'DeviceIdVetoNameBuffer' : [ 0x4, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_193a' : [ 0x10, {
'BlockedDriverGuid' : [ 0x0, ['_GUID']],
} ],
'__unnamed_193c' : [ 0x14, {
'DeviceClass' : [ 0x0, ['__unnamed_192c']],
'TargetDevice' : [ 0x0, ['__unnamed_192e']],
'InstallDevice' : [ 0x0, ['__unnamed_1930']],
'CustomNotification' : [ 0x0, ['__unnamed_1932']],
'ProfileNotification' : [ 0x0, ['__unnamed_1934']],
'PowerNotification' : [ 0x0, ['__unnamed_1936']],
'VetoNotification' : [ 0x0, ['__unnamed_1938']],
'BlockedDriverNotification' : [ 0x0, ['__unnamed_193a']],
} ],
'_PLUGPLAY_EVENT_BLOCK' : [ 0x38, {
'EventGuid' : [ 0x0, ['_GUID']],
'EventCategory' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'HardwareProfileChangeEvent', 1: 'TargetDeviceChangeEvent', 2: 'DeviceClassChangeEvent', 3: 'CustomDeviceEvent', 4: 'DeviceInstallEvent', 5: 'DeviceArrivalEvent', 6: 'PowerEvent', 7: 'VetoEvent', 8: 'BlockedDriverEvent', 9: 'MaxPlugEventCategory'})]],
'Result' : [ 0x14, ['pointer', ['unsigned long']]],
'Flags' : [ 0x18, ['unsigned long']],
'TotalSize' : [ 0x1c, ['unsigned long']],
'DeviceObject' : [ 0x20, ['pointer', ['void']]],
'u' : [ 0x24, ['__unnamed_193c']],
} ],
'_CACHED_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'ValueList' : [ 0x4, ['unsigned long']],
'RealKcb' : [ 0x4, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
} ],
'__unnamed_1942' : [ 0x10, {
'PageNo' : [ 0x0, ['unsigned long']],
'StartPage' : [ 0x4, ['unsigned long']],
'EndPage' : [ 0x8, ['unsigned long']],
'CheckSum' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1944' : [ 0x10, {
'Next' : [ 0x0, ['pointer', ['_PO_MEMORY_RANGE_ARRAY']]],
'NextTable' : [ 0x4, ['unsigned long']],
'CheckSum' : [ 0x8, ['unsigned long']],
'EntryCount' : [ 0xc, ['unsigned long']],
} ],
'_PO_MEMORY_RANGE_ARRAY' : [ 0x10, {
'Range' : [ 0x0, ['__unnamed_1942']],
'Link' : [ 0x0, ['__unnamed_1944']],
} ],
'__unnamed_1956' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned long']],
'CheckSum' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1958' : [ 0x10, {
'DiskId' : [ 0x0, ['_GUID']],
} ],
'__unnamed_195a' : [ 0x10, {
'Mbr' : [ 0x0, ['__unnamed_1956']],
'Gpt' : [ 0x0, ['__unnamed_1958']],
} ],
'_DUMP_INITIALIZATION_CONTEXT' : [ 0x70, {
'Length' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'MemoryBlock' : [ 0x8, ['pointer', ['void']]],
'CommonBuffer' : [ 0xc, ['array', 2, ['pointer', ['void']]]],
'PhysicalAddress' : [ 0x18, ['array', 2, ['_LARGE_INTEGER']]],
'StallRoutine' : [ 0x28, ['pointer', ['void']]],
'OpenRoutine' : [ 0x2c, ['pointer', ['void']]],
'WriteRoutine' : [ 0x30, ['pointer', ['void']]],
'FinishRoutine' : [ 0x34, ['pointer', ['void']]],
'AdapterObject' : [ 0x38, ['pointer', ['_ADAPTER_OBJECT']]],
'MappedRegisterBase' : [ 0x3c, ['pointer', ['void']]],
'PortConfiguration' : [ 0x40, ['pointer', ['void']]],
'CrashDump' : [ 0x44, ['unsigned char']],
'MaximumTransferSize' : [ 0x48, ['unsigned long']],
'CommonBufferSize' : [ 0x4c, ['unsigned long']],
'TargetAddress' : [ 0x50, ['pointer', ['void']]],
'WritePendingRoutine' : [ 0x54, ['pointer', ['void']]],
'PartitionStyle' : [ 0x58, ['unsigned long']],
'DiskInfo' : [ 0x5c, ['__unnamed_195a']],
} ],
'_IO_CLIENT_EXTENSION' : [ 0x8, {
'NextExtension' : [ 0x0, ['pointer', ['_IO_CLIENT_EXTENSION']]],
'ClientIdentificationAddress' : [ 0x4, ['pointer', ['void']]],
} ],
'_KEXECUTE_OPTIONS' : [ 0x1, {
'ExecuteDisable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ExecuteEnable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'DisableThunkEmulation' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Permanent' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ExecuteDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ImageDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
} ],
'_CM_NAME_HASH' : [ 0xc, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x4, ['pointer', ['_CM_NAME_HASH']]],
'NameLength' : [ 0x8, ['unsigned short']],
'Name' : [ 0xa, ['array', 1, ['unsigned short']]],
} ],
'_ARBITER_ALLOCATION_STATE' : [ 0x38, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'CurrentMinimum' : [ 0x10, ['unsigned long long']],
'CurrentMaximum' : [ 0x18, ['unsigned long long']],
'Entry' : [ 0x20, ['pointer', ['_ARBITER_LIST_ENTRY']]],
'CurrentAlternative' : [ 0x24, ['pointer', ['_ARBITER_ALTERNATIVE']]],
'AlternativeCount' : [ 0x28, ['unsigned long']],
'Alternatives' : [ 0x2c, ['pointer', ['_ARBITER_ALTERNATIVE']]],
'Flags' : [ 0x30, ['unsigned short']],
'RangeAttributes' : [ 0x32, ['unsigned char']],
'RangeAvailableAttributes' : [ 0x33, ['unsigned char']],
'WorkSpace' : [ 0x34, ['unsigned long']],
} ],
'_SEP_AUDIT_POLICY_OVERLAY' : [ 0x8, {
'PolicyBits' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 36, native_type='unsigned long long')]],
'SetBit' : [ 0x0, ['BitField', dict(start_bit = 36, end_bit = 37, native_type='unsigned long long')]],
} ],
'_PCI_HEADER_TYPE_0' : [ 0x30, {
'BaseAddresses' : [ 0x0, ['array', 6, ['unsigned long']]],
'CIS' : [ 0x18, ['unsigned long']],
'SubVendorID' : [ 0x1c, ['unsigned short']],
'SubSystemID' : [ 0x1e, ['unsigned short']],
'ROMBaseAddress' : [ 0x20, ['unsigned long']],
'CapabilitiesPtr' : [ 0x24, ['unsigned char']],
'Reserved1' : [ 0x25, ['array', 3, ['unsigned char']]],
'Reserved2' : [ 0x28, ['unsigned long']],
'InterruptLine' : [ 0x2c, ['unsigned char']],
'InterruptPin' : [ 0x2d, ['unsigned char']],
'MinimumGrant' : [ 0x2e, ['unsigned char']],
'MaximumLatency' : [ 0x2f, ['unsigned char']],
} ],
'_PO_DEVICE_NOTIFY_ORDER' : [ 0x248, {
'DevNodeSequence' : [ 0x0, ['unsigned long']],
'WarmEjectPdoPointer' : [ 0x4, ['pointer', ['pointer', ['_DEVICE_OBJECT']]]],
'OrderLevel' : [ 0x8, ['array', 8, ['_PO_NOTIFY_ORDER_LEVEL']]],
} ],
'_FS_FILTER_CALLBACKS' : [ 0x38, {
'SizeOfFsFilterCallbacks' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'PreAcquireForSectionSynchronization' : [ 0x8, ['pointer', ['void']]],
'PostAcquireForSectionSynchronization' : [ 0xc, ['pointer', ['void']]],
'PreReleaseForSectionSynchronization' : [ 0x10, ['pointer', ['void']]],
'PostReleaseForSectionSynchronization' : [ 0x14, ['pointer', ['void']]],
'PreAcquireForCcFlush' : [ 0x18, ['pointer', ['void']]],
'PostAcquireForCcFlush' : [ 0x1c, ['pointer', ['void']]],
'PreReleaseForCcFlush' : [ 0x20, ['pointer', ['void']]],
'PostReleaseForCcFlush' : [ 0x24, ['pointer', ['void']]],
'PreAcquireForModifiedPageWriter' : [ 0x28, ['pointer', ['void']]],
'PostAcquireForModifiedPageWriter' : [ 0x2c, ['pointer', ['void']]],
'PreReleaseForModifiedPageWriter' : [ 0x30, ['pointer', ['void']]],
'PostReleaseForModifiedPageWriter' : [ 0x34, ['pointer', ['void']]],
} ],
'_IA64_DBGKD_CONTROL_SET' : [ 0x14, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long long']],
} ],
'_DEVICE_MAP' : [ 0x30, {
'DosDevicesDirectory' : [ 0x0, ['pointer', ['_OBJECT_DIRECTORY']]],
'GlobalDosDevicesDirectory' : [ 0x4, ['pointer', ['_OBJECT_DIRECTORY']]],
'ReferenceCount' : [ 0x8, ['unsigned long']],
'DriveMap' : [ 0xc, ['unsigned long']],
'DriveType' : [ 0x10, ['array', 32, ['unsigned char']]],
} ],
'_u' : [ 0x50, {
'KeyNode' : [ 0x0, ['_CM_KEY_NODE']],
'KeyValue' : [ 0x0, ['_CM_KEY_VALUE']],
'KeySecurity' : [ 0x0, ['_CM_KEY_SECURITY']],
'KeyIndex' : [ 0x0, ['_CM_KEY_INDEX']],
'ValueData' : [ 0x0, ['_CM_BIG_DATA']],
'KeyList' : [ 0x0, ['array', 1, ['unsigned long']]],
'KeyString' : [ 0x0, ['array', 1, ['unsigned short']]],
} ],
'_ARBITER_CONFLICT_INFO' : [ 0x18, {
'OwningObject' : [ 0x0, ['pointer', ['_DEVICE_OBJECT']]],
'Start' : [ 0x8, ['unsigned long long']],
'End' : [ 0x10, ['unsigned long long']],
} ],
'_PO_NOTIFY_ORDER_LEVEL' : [ 0x48, {
'LevelReady' : [ 0x0, ['_KEVENT']],
'DeviceCount' : [ 0x10, ['unsigned long']],
'ActiveCount' : [ 0x14, ['unsigned long']],
'WaitSleep' : [ 0x18, ['_LIST_ENTRY']],
'ReadySleep' : [ 0x20, ['_LIST_ENTRY']],
'Pending' : [ 0x28, ['_LIST_ENTRY']],
'Complete' : [ 0x30, ['_LIST_ENTRY']],
'ReadyS0' : [ 0x38, ['_LIST_ENTRY']],
'WaitS0' : [ 0x40, ['_LIST_ENTRY']],
} ],
'__unnamed_198f' : [ 0x8, {
'Base' : [ 0x0, ['unsigned long']],
'Limit' : [ 0x4, ['unsigned long']],
} ],
'_PCI_HEADER_TYPE_2' : [ 0x30, {
'SocketRegistersBaseAddress' : [ 0x0, ['unsigned long']],
'CapabilitiesPtr' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'SecondaryStatus' : [ 0x6, ['unsigned short']],
'PrimaryBus' : [ 0x8, ['unsigned char']],
'SecondaryBus' : [ 0x9, ['unsigned char']],
'SubordinateBus' : [ 0xa, ['unsigned char']],
'SecondaryLatency' : [ 0xb, ['unsigned char']],
'Range' : [ 0xc, ['array', 4, ['__unnamed_198f']]],
'InterruptLine' : [ 0x2c, ['unsigned char']],
'InterruptPin' : [ 0x2d, ['unsigned char']],
'BridgeControl' : [ 0x2e, ['unsigned short']],
} ],
'_SEP_AUDIT_POLICY_CATEGORIES' : [ 0x8, {
'System' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'Logon' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'ObjectAccess' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 12, native_type='unsigned long')]],
'PrivilegeUse' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 16, native_type='unsigned long')]],
'DetailedTracking' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'PolicyChange' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 24, native_type='unsigned long')]],
'AccountManagement' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 28, native_type='unsigned long')]],
'DirectoryServiceAccess' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 32, native_type='unsigned long')]],
'AccountLogon' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
} ],
'_CM_KEY_VALUE' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned short']],
'NameLength' : [ 0x2, ['unsigned short']],
'DataLength' : [ 0x4, ['unsigned long']],
'Data' : [ 0x8, ['unsigned long']],
'Type' : [ 0xc, ['unsigned long']],
'Flags' : [ 0x10, ['unsigned short']],
'Spare' : [ 0x12, ['unsigned short']],
'Name' : [ 0x14, ['array', 1, ['unsigned short']]],
} ],
'_AMD64_DBGKD_CONTROL_SET' : [ 0x1c, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long long']],
'CurrentSymbolStart' : [ 0xc, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0x14, ['unsigned long long']],
} ],
'_FS_FILTER_CALLBACK_DATA' : [ 0x24, {
'SizeOfFsFilterCallbackData' : [ 0x0, ['unsigned long']],
'Operation' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'DeviceObject' : [ 0x8, ['pointer', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0xc, ['pointer', ['_FILE_OBJECT']]],
'Parameters' : [ 0x10, ['_FS_FILTER_PARAMETERS']],
} ],
'_OBJECT_DIRECTORY_ENTRY' : [ 0x8, {
'ChainLink' : [ 0x0, ['pointer', ['_OBJECT_DIRECTORY_ENTRY']]],
'Object' : [ 0x4, ['pointer', ['void']]],
} ],
'_VI_POOL_ENTRY' : [ 0x10, {
'InUse' : [ 0x0, ['_VI_POOL_ENTRY_INUSE']],
'FreeListNext' : [ 0x0, ['unsigned long']],
} ],
'_POP_DEVICE_POWER_IRP' : [ 0x2c, {
'Free' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Irp' : [ 0x4, ['pointer', ['_IRP']]],
'Notify' : [ 0x8, ['pointer', ['_PO_DEVICE_NOTIFY']]],
'Pending' : [ 0xc, ['_LIST_ENTRY']],
'Complete' : [ 0x14, ['_LIST_ENTRY']],
'Abort' : [ 0x1c, ['_LIST_ENTRY']],
'Failed' : [ 0x24, ['_LIST_ENTRY']],
} ],
'_RTL_RANGE' : [ 0x20, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'UserData' : [ 0x10, ['pointer', ['void']]],
'Owner' : [ 0x14, ['pointer', ['void']]],
'Attributes' : [ 0x18, ['unsigned char']],
'Flags' : [ 0x19, ['unsigned char']],
} ],
'_PCI_HEADER_TYPE_1' : [ 0x30, {
'BaseAddresses' : [ 0x0, ['array', 2, ['unsigned long']]],
'PrimaryBus' : [ 0x8, ['unsigned char']],
'SecondaryBus' : [ 0x9, ['unsigned char']],
'SubordinateBus' : [ 0xa, ['unsigned char']],
'SecondaryLatency' : [ 0xb, ['unsigned char']],
'IOBase' : [ 0xc, ['unsigned char']],
'IOLimit' : [ 0xd, ['unsigned char']],
'SecondaryStatus' : [ 0xe, ['unsigned short']],
'MemoryBase' : [ 0x10, ['unsigned short']],
'MemoryLimit' : [ 0x12, ['unsigned short']],
'PrefetchBase' : [ 0x14, ['unsigned short']],
'PrefetchLimit' : [ 0x16, ['unsigned short']],
'PrefetchBaseUpper32' : [ 0x18, ['unsigned long']],
'PrefetchLimitUpper32' : [ 0x1c, ['unsigned long']],
'IOBaseUpper16' : [ 0x20, ['unsigned short']],
'IOLimitUpper16' : [ 0x22, ['unsigned short']],
'CapabilitiesPtr' : [ 0x24, ['unsigned char']],
'Reserved1' : [ 0x25, ['array', 3, ['unsigned char']]],
'ROMBaseAddress' : [ 0x28, ['unsigned long']],
'InterruptLine' : [ 0x2c, ['unsigned char']],
'InterruptPin' : [ 0x2d, ['unsigned char']],
'BridgeControl' : [ 0x2e, ['unsigned short']],
} ],
'_PRIVILEGE_SET' : [ 0x14, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 1, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_IO_SECURITY_CONTEXT' : [ 0x10, {
'SecurityQos' : [ 0x0, ['pointer', ['_SECURITY_QUALITY_OF_SERVICE']]],
'AccessState' : [ 0x4, ['pointer', ['_ACCESS_STATE']]],
'DesiredAccess' : [ 0x8, ['unsigned long']],
'FullCreateOptions' : [ 0xc, ['unsigned long']],
} ],
'_KSPECIAL_REGISTERS' : [ 0x54, {
'Cr0' : [ 0x0, ['unsigned long']],
'Cr2' : [ 0x4, ['unsigned long']],
'Cr3' : [ 0x8, ['unsigned long']],
'Cr4' : [ 0xc, ['unsigned long']],
'KernelDr0' : [ 0x10, ['unsigned long']],
'KernelDr1' : [ 0x14, ['unsigned long']],
'KernelDr2' : [ 0x18, ['unsigned long']],
'KernelDr3' : [ 0x1c, ['unsigned long']],
'KernelDr6' : [ 0x20, ['unsigned long']],
'KernelDr7' : [ 0x24, ['unsigned long']],
'Gdtr' : [ 0x28, ['_DESCRIPTOR']],
'Idtr' : [ 0x30, ['_DESCRIPTOR']],
'Tr' : [ 0x38, ['unsigned short']],
'Ldtr' : [ 0x3a, ['unsigned short']],
'Reserved' : [ 0x3c, ['array', 6, ['unsigned long']]],
} ],
'_MAILSLOT_CREATE_PARAMETERS' : [ 0x18, {
'MailslotQuota' : [ 0x0, ['unsigned long']],
'MaximumMessageSize' : [ 0x4, ['unsigned long']],
'ReadTimeout' : [ 0x8, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x10, ['unsigned char']],
} ],
'_NAMED_PIPE_CREATE_PARAMETERS' : [ 0x28, {
'NamedPipeType' : [ 0x0, ['unsigned long']],
'ReadMode' : [ 0x4, ['unsigned long']],
'CompletionMode' : [ 0x8, ['unsigned long']],
'MaximumInstances' : [ 0xc, ['unsigned long']],
'InboundQuota' : [ 0x10, ['unsigned long']],
'OutboundQuota' : [ 0x14, ['unsigned long']],
'DefaultTimeout' : [ 0x18, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x20, ['unsigned char']],
} ],
'_CM_BIG_DATA' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_SUPPORTED_RANGE' : [ 0x20, {
'Next' : [ 0x0, ['pointer', ['_SUPPORTED_RANGE']]],
'SystemAddressSpace' : [ 0x4, ['unsigned long']],
'SystemBase' : [ 0x8, ['long long']],
'Base' : [ 0x10, ['long long']],
'Limit' : [ 0x18, ['long long']],
} ],
'_CM_KEY_NODE' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'LastWriteTime' : [ 0x4, ['_LARGE_INTEGER']],
'Spare' : [ 0xc, ['unsigned long']],
'Parent' : [ 0x10, ['unsigned long']],
'SubKeyCounts' : [ 0x14, ['array', 2, ['unsigned long']]],
'SubKeyLists' : [ 0x1c, ['array', 2, ['unsigned long']]],
'ValueList' : [ 0x24, ['_CHILD_LIST']],
'ChildHiveReference' : [ 0x1c, ['_CM_KEY_REFERENCE']],
'Security' : [ 0x2c, ['unsigned long']],
'Class' : [ 0x30, ['unsigned long']],
'MaxNameLen' : [ 0x34, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'UserFlags' : [ 0x34, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'VirtControlFlags' : [ 0x34, ['BitField', dict(start_bit = 20, end_bit = 24, native_type='unsigned long')]],
'Debug' : [ 0x34, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'MaxClassLen' : [ 0x38, ['unsigned long']],
'MaxValueNameLen' : [ 0x3c, ['unsigned long']],
'MaxValueDataLen' : [ 0x40, ['unsigned long']],
'WorkVar' : [ 0x44, ['unsigned long']],
'NameLength' : [ 0x48, ['unsigned short']],
'ClassLength' : [ 0x4a, ['unsigned short']],
'Name' : [ 0x4c, ['array', 1, ['unsigned short']]],
} ],
'_ARBITER_ORDERING' : [ 0x10, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
} ],
'_ARBITER_LIST_ENTRY' : [ 0x38, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'AlternativeCount' : [ 0x8, ['unsigned long']],
'Alternatives' : [ 0xc, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'PhysicalDeviceObject' : [ 0x10, ['pointer', ['_DEVICE_OBJECT']]],
'RequestSource' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Flags' : [ 0x18, ['unsigned long']],
'WorkSpace' : [ 0x1c, ['long']],
'InterfaceType' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'SlotNumber' : [ 0x24, ['unsigned long']],
'BusNumber' : [ 0x28, ['unsigned long']],
'Assignment' : [ 0x2c, ['pointer', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
'SelectedAlternative' : [ 0x30, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'Result' : [ 0x34, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterResultSuccess', 1: 'ArbiterResultExternalConflict', 2: 'ArbiterResultNullRequest', -1: 'ArbiterResultUndefined'})]],
} ],
'_LPCP_NONPAGED_PORT_QUEUE' : [ 0x18, {
'Semaphore' : [ 0x0, ['_KSEMAPHORE']],
'BackPointer' : [ 0x14, ['pointer', ['_LPCP_PORT_OBJECT']]],
} ],
'_CM_KEY_INDEX' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_CM_KEY_REFERENCE' : [ 0x8, {
'KeyCell' : [ 0x0, ['unsigned long']],
'KeyHive' : [ 0x4, ['pointer', ['_HHIVE']]],
} ],
'_ARBITER_ALTERNATIVE' : [ 0x30, {
'Minimum' : [ 0x0, ['unsigned long long']],
'Maximum' : [ 0x8, ['unsigned long long']],
'Length' : [ 0x10, ['unsigned long']],
'Alignment' : [ 0x14, ['unsigned long']],
'Priority' : [ 0x18, ['long']],
'Flags' : [ 0x1c, ['unsigned long']],
'Descriptor' : [ 0x20, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'Reserved' : [ 0x24, ['array', 3, ['unsigned long']]],
} ],
'__unnamed_19d2' : [ 0x8, {
'EndingOffset' : [ 0x0, ['pointer', ['_LARGE_INTEGER']]],
'ResourceToRelease' : [ 0x4, ['pointer', ['pointer', ['_ERESOURCE']]]],
} ],
'__unnamed_19d4' : [ 0x4, {
'ResourceToRelease' : [ 0x0, ['pointer', ['_ERESOURCE']]],
} ],
'__unnamed_19d8' : [ 0x8, {
'SyncType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SyncTypeOther', 1: 'SyncTypeCreateSection'})]],
'PageProtection' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_19da' : [ 0x14, {
'Argument1' : [ 0x0, ['pointer', ['void']]],
'Argument2' : [ 0x4, ['pointer', ['void']]],
'Argument3' : [ 0x8, ['pointer', ['void']]],
'Argument4' : [ 0xc, ['pointer', ['void']]],
'Argument5' : [ 0x10, ['pointer', ['void']]],
} ],
'_FS_FILTER_PARAMETERS' : [ 0x14, {
'AcquireForModifiedPageWriter' : [ 0x0, ['__unnamed_19d2']],
'ReleaseForModifiedPageWriter' : [ 0x0, ['__unnamed_19d4']],
'AcquireForSectionSynchronization' : [ 0x0, ['__unnamed_19d8']],
'Others' : [ 0x0, ['__unnamed_19da']],
} ],
'_DESCRIPTOR' : [ 0x8, {
'Pad' : [ 0x0, ['unsigned short']],
'Limit' : [ 0x2, ['unsigned short']],
'Base' : [ 0x4, ['unsigned long']],
} ],
'_VI_POOL_ENTRY_INUSE' : [ 0x10, {
'VirtualAddress' : [ 0x0, ['pointer', ['void']]],
'CallingAddress' : [ 0x4, ['pointer', ['void']]],
'NumberOfBytes' : [ 0x8, ['unsigned long']],
'Tag' : [ 0xc, ['unsigned long']],
} ],
'_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_CM_KEY_SECURITY' : [ 0x28, {
'Signature' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'Flink' : [ 0x4, ['unsigned long']],
'Blink' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['unsigned long']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'Descriptor' : [ 0x14, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
}
|
tpsatish95/Python-Workshop
|
refs/heads/master
|
Python Environment Setup/Alternate/1. Python/1. Installer/Python-3.4.0(Linux)/Lib/turtledemo/colormixer.py
|
65
|
# colormixer
from turtle import Screen, Turtle, mainloop
import sys
sys.setrecursionlimit(20000) # overcomes, for now, an instability of Python 3.0
class ColorTurtle(Turtle):
def __init__(self, x, y):
Turtle.__init__(self)
self.shape("turtle")
self.resizemode("user")
self.shapesize(3,3,5)
self.pensize(10)
self._color = [0,0,0]
self.x = x
self._color[x] = y
self.color(self._color)
self.speed(0)
self.left(90)
self.pu()
self.goto(x,0)
self.pd()
self.sety(1)
self.pu()
self.sety(y)
self.pencolor("gray25")
self.ondrag(self.shift)
def shift(self, x, y):
self.sety(max(0,min(y,1)))
self._color[self.x] = self.ycor()
self.fillcolor(self._color)
setbgcolor()
def setbgcolor():
screen.bgcolor(red.ycor(), green.ycor(), blue.ycor())
def main():
global screen, red, green, blue
screen = Screen()
screen.delay(0)
screen.setworldcoordinates(-1, -0.3, 3, 1.3)
red = ColorTurtle(0, .5)
green = ColorTurtle(1, .5)
blue = ColorTurtle(2, .5)
setbgcolor()
writer = Turtle()
writer.ht()
writer.pu()
writer.goto(1,1.15)
writer.write("DRAG!",align="center",font=("Arial",30,("bold","italic")))
return "EVENTLOOP"
if __name__ == "__main__":
msg = main()
print(msg)
mainloop()
|
haowu4682/gem5
|
refs/heads/master
|
src/arch/x86/isa/insts/simd128/floating_point/data_conversion/convert_floating_point_to_floating_point.py
|
90
|
# Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop CVTSS2SD_XMM_XMM {
cvtf2f xmml, xmmlm, destSize=8, srcSize=4, ext=Scalar
};
def macroop CVTSS2SD_XMM_M {
ldfp ufp1, seg, sib, disp, dataSize=8
cvtf2f xmml, ufp1, destSize=8, srcSize=4, ext=Scalar
};
def macroop CVTSS2SD_XMM_P {
rdip t7
ldfp ufp1, seg, riprel, disp, dataSize=8
cvtf2f xmml, ufp1, destSize=8, srcSize=4, ext=Scalar
};
def macroop CVTSD2SS_XMM_XMM {
cvtf2f xmml, xmmlm, destSize=4, srcSize=8, ext=Scalar
};
def macroop CVTSD2SS_XMM_M {
ldfp ufp1, seg, sib, disp, dataSize=8
cvtf2f xmml, ufp1, destSize=4, srcSize=8, ext=Scalar
};
def macroop CVTSD2SS_XMM_P {
rdip t7
ldfp ufp1, seg, riprel, disp, dataSize=8
cvtf2f xmml, ufp1, destSize=4, srcSize=8, ext=Scalar
};
def macroop CVTPS2PD_XMM_XMM {
cvtf2f xmmh, xmmlm, destSize=8, srcSize=4, ext=2
cvtf2f xmml, xmmlm, destSize=8, srcSize=4, ext=0
};
def macroop CVTPS2PD_XMM_M {
ldfp ufp1, seg, sib, disp, dataSize=8
cvtf2f xmmh, ufp1, destSize=8, srcSize=4, ext=2
cvtf2f xmml, ufp1, destSize=8, srcSize=4, ext=0
};
def macroop CVTPS2PD_XMM_P {
rdip t7
ldfp ufp1, seg, riprel, disp, dataSize=8
cvtf2f xmmh, ufp1, destSize=8, srcSize=4, ext=2
cvtf2f xmml, ufp1, destSize=8, srcSize=4, ext=0
};
def macroop CVTPD2PS_XMM_XMM {
cvtf2f xmml, xmmlm, destSize=4, srcSize=8, ext=0
cvtf2f xmml, xmmhm, destSize=4, srcSize=8, ext=2
lfpimm xmmh, 0
};
def macroop CVTPD2PS_XMM_M {
ldfp ufp1, seg, sib, "DISPLACEMENT", dataSize=8
ldfp ufp2, seg, sib, "DISPLACEMENT + 8", dataSize=8
cvtf2f xmml, ufp1, destSize=4, srcSize=8, ext=0
cvtf2f xmml, ufp2, destSize=4, srcSize=8, ext=2
lfpimm xmmh, 0
};
def macroop CVTPD2PS_XMM_P {
rdip t7
ldfp ufp1, seg, riprel, "DISPLACEMENT", dataSize=8
ldfp ufp2, seg, riprel, "DISPLACEMENT + 8", dataSize=8
cvtf2f xmml, ufp1, destSize=4, srcSize=8, ext=0
cvtf2f xmml, ufp2, destSize=4, srcSize=8, ext=2
lfpimm xmmh, 0
};
'''
|
Onirik79/aaritmud
|
refs/heads/master
|
data/proto_rooms/krablath/krablath_room_bocca-leone-02-w.py
|
10
|
# -*- coding: utf-8 -*-
import_path = "data.proto_rooms.krablath.krablath_room_bocca-leone-02-e"
krablath_room_module = __import__(import_path, globals(), locals(), [""])
after_move = krablath_room_module.after_move
after_touch = krablath_room_module.after_touch
|
lunyang/pylearn2
|
refs/heads/master
|
pylearn2/utils/python26.py
|
49
|
"""
.. todo::
WRITEME
"""
import functools
import sys
if sys.version_info[:2] < (2, 7):
def cmp_to_key(mycmp):
"""Convert a cmp= function into a key= function"""
class K(object):
__slots__ = ['obj']
def __init__(self, obj, *args):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) < 0
def __gt__(self, other):
return mycmp(self.obj, other.obj) > 0
def __eq__(self, other):
return mycmp(self.obj, other.obj) == 0
def __le__(self, other):
return mycmp(self.obj, other.obj) <= 0
def __ge__(self, other):
return mycmp(self.obj, other.obj) >= 0
def __ne__(self, other):
return mycmp(self.obj, other.obj) != 0
def __hash__(self):
raise TypeError('hash not implemented')
return K
else:
from functools import cmp_to_key
|
kenshay/ImageScripter
|
refs/heads/master
|
ProgramData/SystemFiles/Python/Lib/site-packages/pygments/lexers/oberon.py
|
31
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.oberon
~~~~~~~~~~~~~~~~~~~~~~
Lexers for Oberon family languages.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['ComponentPascalLexer']
class ComponentPascalLexer(RegexLexer):
"""
For `Component Pascal <http://www.oberon.ch/pdf/CP-Lang.pdf>`_ source code.
.. versionadded:: 2.1
"""
name = 'Component Pascal'
aliases = ['componentpascal', 'cp']
filenames = ['*.cp', '*.cps']
mimetypes = ['text/x-component-pascal']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
include('whitespace'),
include('comments'),
include('punctuation'),
include('numliterals'),
include('strings'),
include('operators'),
include('builtins'),
include('identifiers'),
],
'whitespace': [
(r'\n+', Text), # blank lines
(r'\s+', Text), # whitespace
],
'comments': [
(r'\(\*([^$].*?)\*\)', Comment.Multiline),
# TODO: nested comments (* (* ... *) ... (* ... *) *) not supported!
],
'punctuation': [
(r'[()\[\]{},.:;|]', Punctuation),
],
'numliterals': [
(r'[0-9A-F]+X\b', Number.Hex), # char code
(r'[0-9A-F]+[HL]\b', Number.Hex), # hexadecimal number
(r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number
(r'[0-9]+\.[0-9]+', Number.Float), # real number
(r'[0-9]+', Number.Integer), # decimal whole number
],
'strings': [
(r"'[^\n']*'", String), # single quoted string
(r'"[^\n"]*"', String), # double quoted string
],
'operators': [
# Arithmetic Operators
(r'[+-]', Operator),
(r'[*/]', Operator),
# Relational Operators
(r'[=#<>]', Operator),
# Dereferencing Operator
(r'\^', Operator),
# Logical AND Operator
(r'&', Operator),
# Logical NOT Operator
(r'~', Operator),
# Assignment Symbol
(r':=', Operator),
# Range Constructor
(r'\.\.', Operator),
(r'\$', Operator),
],
'identifiers': [
(r'([a-zA-Z_$][\w$]*)', Name),
],
'builtins': [
(words((
'ANYPTR', 'ANYREC', 'BOOLEAN', 'BYTE', 'CHAR', 'INTEGER', 'LONGINT',
'REAL', 'SET', 'SHORTCHAR', 'SHORTINT', 'SHORTREAL'
), suffix=r'\b'), Keyword.Type),
(words((
'ABS', 'ABSTRACT', 'ARRAY', 'ASH', 'ASSERT', 'BEGIN', 'BITS', 'BY',
'CAP', 'CASE', 'CHR', 'CLOSE', 'CONST', 'DEC', 'DIV', 'DO', 'ELSE',
'ELSIF', 'EMPTY', 'END', 'ENTIER', 'EXCL', 'EXIT', 'EXTENSIBLE', 'FOR',
'HALT', 'IF', 'IMPORT', 'IN', 'INC', 'INCL', 'IS', 'LEN', 'LIMITED',
'LONG', 'LOOP', 'MAX', 'MIN', 'MOD', 'MODULE', 'NEW', 'ODD', 'OF',
'OR', 'ORD', 'OUT', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
'SHORT', 'SHORTCHAR', 'SHORTINT', 'SIZE', 'THEN', 'TYPE', 'TO', 'UNTIL',
'VAR', 'WHILE', 'WITH'
), suffix=r'\b'), Keyword.Reserved),
(r'(TRUE|FALSE|NIL|INF)\b', Keyword.Constant),
]
}
|
koparasy/gemfi
|
refs/heads/master
|
src/unittest/stattestmain.py
|
76
|
def main():
from m5.internal.stattest import stattest_init, stattest_run
import m5.stats
stattest_init()
# Initialize the global statistics
m5.stats.initSimStats()
m5.stats.initText("cout")
# We're done registering statistics. Enable the stats package now.
m5.stats.enable()
# Reset to put the stats in a consistent state.
m5.stats.reset()
stattest_run()
m5.stats.dump()
|
Gchorba/Ask
|
refs/heads/master
|
lib/python2.7/site-packages/flask/json.py
|
428
|
# -*- coding: utf-8 -*-
"""
flask.jsonimpl
~~~~~~~~~~~~~~
Implementation helpers for the JSON support in Flask.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import io
import uuid
from datetime import datetime
from .globals import current_app, request
from ._compat import text_type, PY2
from werkzeug.http import http_date
from jinja2 import Markup
# Use the same json implementation as itsdangerous on which we
# depend anyways.
try:
from itsdangerous import simplejson as _json
except ImportError:
from itsdangerous import json as _json
# figure out if simplejson escapes slashes. This behavior was changed
# from one version to another without reason.
_slash_escape = '\\/' not in _json.dumps('/')
__all__ = ['dump', 'dumps', 'load', 'loads', 'htmlsafe_dump',
'htmlsafe_dumps', 'JSONDecoder', 'JSONEncoder',
'jsonify']
def _wrap_reader_for_text(fp, encoding):
if isinstance(fp.read(0), bytes):
fp = io.TextIOWrapper(io.BufferedReader(fp), encoding)
return fp
def _wrap_writer_for_text(fp, encoding):
try:
fp.write('')
except TypeError:
fp = io.TextIOWrapper(fp, encoding)
return fp
class JSONEncoder(_json.JSONEncoder):
"""The default Flask JSON encoder. This one extends the default simplejson
encoder by also supporting ``datetime`` objects, ``UUID`` as well as
``Markup`` objects which are serialized as RFC 822 datetime strings (same
as the HTTP date format). In order to support more data types override the
:meth:`default` method.
"""
def default(self, o):
"""Implement this method in a subclass such that it returns a
serializable object for ``o``, or calls the base implementation (to
raise a ``TypeError``).
For example, to support arbitrary iterators, you could implement
default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
if isinstance(o, datetime):
return http_date(o)
if isinstance(o, uuid.UUID):
return str(o)
if hasattr(o, '__html__'):
return text_type(o.__html__())
return _json.JSONEncoder.default(self, o)
class JSONDecoder(_json.JSONDecoder):
"""The default JSON decoder. This one does not change the behavior from
the default simplejson encoder. Consult the :mod:`json` documentation
for more information. This decoder is not only used for the load
functions of this module but also :attr:`~flask.Request`.
"""
def _dump_arg_defaults(kwargs):
"""Inject default arguments for dump functions."""
if current_app:
kwargs.setdefault('cls', current_app.json_encoder)
if not current_app.config['JSON_AS_ASCII']:
kwargs.setdefault('ensure_ascii', False)
kwargs.setdefault('sort_keys', current_app.config['JSON_SORT_KEYS'])
else:
kwargs.setdefault('sort_keys', True)
kwargs.setdefault('cls', JSONEncoder)
def _load_arg_defaults(kwargs):
"""Inject default arguments for load functions."""
if current_app:
kwargs.setdefault('cls', current_app.json_decoder)
else:
kwargs.setdefault('cls', JSONDecoder)
def dumps(obj, **kwargs):
"""Serialize ``obj`` to a JSON formatted ``str`` by using the application's
configured encoder (:attr:`~flask.Flask.json_encoder`) if there is an
application on the stack.
This function can return ``unicode`` strings or ascii-only bytestrings by
default which coerce into unicode strings automatically. That behavior by
default is controlled by the ``JSON_AS_ASCII`` configuration variable
and can be overriden by the simplejson ``ensure_ascii`` parameter.
"""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
rv = _json.dumps(obj, **kwargs)
if encoding is not None and isinstance(rv, text_type):
rv = rv.encode(encoding)
return rv
def dump(obj, fp, **kwargs):
"""Like :func:`dumps` but writes into a file object."""
_dump_arg_defaults(kwargs)
encoding = kwargs.pop('encoding', None)
if encoding is not None:
fp = _wrap_writer_for_text(fp, encoding)
_json.dump(obj, fp, **kwargs)
def loads(s, **kwargs):
"""Unserialize a JSON object from a string ``s`` by using the application's
configured decoder (:attr:`~flask.Flask.json_decoder`) if there is an
application on the stack.
"""
_load_arg_defaults(kwargs)
if isinstance(s, bytes):
s = s.decode(kwargs.pop('encoding', None) or 'utf-8')
return _json.loads(s, **kwargs)
def load(fp, **kwargs):
"""Like :func:`loads` but reads from a file object.
"""
_load_arg_defaults(kwargs)
if not PY2:
fp = _wrap_reader_for_text(fp, kwargs.pop('encoding', None) or 'utf-8')
return _json.load(fp, **kwargs)
def htmlsafe_dumps(obj, **kwargs):
"""Works exactly like :func:`dumps` but is safe for use in ``<script>``
tags. It accepts the same arguments and returns a JSON string. Note that
this is available in templates through the ``|tojson`` filter which will
also mark the result as safe. Due to how this function escapes certain
characters this is safe even if used outside of ``<script>`` tags.
The following characters are escaped in strings:
- ``<``
- ``>``
- ``&``
- ``'``
This makes it safe to embed such strings in any place in HTML with the
notable exception of double quoted attributes. In that case single
quote your attributes or HTML escape it in addition.
.. versionchanged:: 0.10
This function's return value is now always safe for HTML usage, even
if outside of script tags or if used in XHTML. This rule does not
hold true when using this function in HTML attributes that are double
quoted. Always single quote attributes if you use the ``|tojson``
filter. Alternatively use ``|tojson|forceescape``.
"""
rv = dumps(obj, **kwargs) \
.replace(u'<', u'\\u003c') \
.replace(u'>', u'\\u003e') \
.replace(u'&', u'\\u0026') \
.replace(u"'", u'\\u0027')
if not _slash_escape:
rv = rv.replace('\\/', '/')
return rv
def htmlsafe_dump(obj, fp, **kwargs):
"""Like :func:`htmlsafe_dumps` but writes into a file object."""
fp.write(unicode(htmlsafe_dumps(obj, **kwargs)))
def jsonify(*args, **kwargs):
"""Creates a :class:`~flask.Response` with the JSON representation of
the given arguments with an `application/json` mimetype. The arguments
to this function are the same as to the :class:`dict` constructor.
Example usage::
from flask import jsonify
@app.route('/_get_current_user')
def get_current_user():
return jsonify(username=g.user.username,
email=g.user.email,
id=g.user.id)
This will send a JSON response like this to the browser::
{
"username": "admin",
"email": "admin@localhost",
"id": 42
}
For security reasons only objects are supported toplevel. For more
information about this, have a look at :ref:`json-security`.
This function's response will be pretty printed if it was not requested
with ``X-Requested-With: XMLHttpRequest`` to simplify debugging unless
the ``JSONIFY_PRETTYPRINT_REGULAR`` config parameter is set to false.
.. versionadded:: 0.2
"""
indent = None
if current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] \
and not request.is_xhr:
indent = 2
return current_app.response_class(dumps(dict(*args, **kwargs),
indent=indent),
mimetype='application/json')
def tojson_filter(obj, **kwargs):
return Markup(htmlsafe_dumps(obj, **kwargs))
|
arunchaganty/presidential-debates
|
refs/heads/master
|
django/javanlp/migrations/0002_auto_20151105_2216.py
|
3
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('javanlp', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='sentence',
name='constituencies',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='sentence',
name='dependencies',
field=models.TextField(null=True),
),
]
|
satishgoda/bokeh
|
refs/heads/master
|
sphinx/source/docs/user_guide/source_examples/plotting_arcs.py
|
50
|
from bokeh.plotting import figure, show
p = figure(width=400, height=400)
p.arc(x=[1, 2, 3], y=[1, 2, 3], radius=0.1, start_angle=0.4, end_angle=4.8, color="navy")
show(p)
|
google-code/abc2esac
|
refs/heads/master
|
abcesac/__init__.py
|
1
|
"""
================================================================================
abc2esac - ABC to EsAC conversion
Copyright (C) 2010, Marcus Weseloh
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
================================================================================
"""
import re
from abcesac.parser import ABCParser
from abcesac.music import Song
from abcesac.formatter import ESACFormatter
from abcesac.grouper import TemperleyGrouper
__version__ = '0.1'
def abc_to_esac(abc, collection=None, prefix='', group=False, meter_cmd=None,
grouper_cmd=None, grouper_params=None):
"""Convert notes in ABC format to roughly equivalent ESAC"""
blocks = []
block = []
for line in abc.splitlines():
if line.startswith('X:'):
if block:
blocks.append('\n'.join(block))
block = []
block.append(line)
if block:
blocks.append('\n'.join(block))
output = []
for i, block in enumerate(blocks):
if group:
grouper = TemperleyGrouper(meter_cmd=meter_cmd,
grouper_cmd=grouper_cmd,
grouper_params=grouper_params)
else:
grouper = None
song = Song()
ABCParser(song).parse(block)
song.resolve_repeats()
output.append(ESACFormatter(collection=collection, prefix=prefix,
index=i, grouper=grouper).to_string(song))
return '\n\n'.join(output)
|
proxysh/Safejumper-for-Desktop
|
refs/heads/master
|
buildmac/Resources/env/lib/python2.7/site-packages/pip/_vendor/html5lib/__init__.py
|
336
|
"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage:
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
# this has to be at the top level, see how setup.py parses this
__version__ = "1.0b10"
|
kstrauser/ansible
|
refs/heads/devel
|
plugins/inventory/vbox.py
|
111
|
#!/usr/bin/env python
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import sys
from subprocess import Popen,PIPE
try:
import json
except ImportError:
import simplejson as json
VBOX="VBoxManage"
def get_hosts(host=None):
returned = {}
try:
if host:
p = Popen([VBOX, 'showvminfo', host], stdout=PIPE)
else:
returned = { 'all': set(), '_metadata': {} }
p = Popen([VBOX, 'list', '-l', 'vms'], stdout=PIPE)
except:
sys.exit(1)
hostvars = {}
prevkey = pref_k = ''
for line in p.stdout.readlines():
try:
k,v = line.split(':',1)
except:
continue
if k == '':
continue
v = v.strip()
if k.startswith('Name'):
if v not in hostvars:
curname = v
hostvars[curname] = {}
try: # try to get network info
x = Popen([VBOX, 'guestproperty', 'get', curname,"/VirtualBox/GuestInfo/Net/0/V4/IP"],stdout=PIPE)
ipinfo = x.stdout.read()
if 'Value' in ipinfo:
a,ip = ipinfo.split(':',1)
hostvars[curname]['ansible_ssh_host'] = ip.strip()
except:
pass
continue
if not host:
if k == 'Groups':
for group in v.split('/'):
if group:
if group not in returned:
returned[group] = set()
returned[group].add(curname)
returned['all'].add(curname)
continue
pref_k = 'vbox_' + k.strip().replace(' ','_')
if k.startswith(' '):
if prevkey not in hostvars[curname]:
hostvars[curname][prevkey] = {}
hostvars[curname][prevkey][pref_k]= v
else:
if v != '':
hostvars[curname][pref_k] = v
prevkey = pref_k
if not host:
returned['_metadata']['hostvars'] = hostvars
else:
returned = hostvars[host]
return returned
if __name__ == '__main__':
inventory = {}
hostname = None
if len(sys.argv) > 1:
if sys.argv[1] == "--host":
hostname = sys.argv[2]
if hostname:
inventory = get_hosts(hostname)
else:
inventory = get_hosts()
import pprint
print pprint.pprint(inventory)
|
BorgERP/borg-erp-6of3
|
refs/heads/master
|
addons/event/res_partner.py
|
9
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
class res_partner(osv.osv):
_inherit = 'res.partner'
_columns = {
'speaker': fields.boolean('Speaker'),
'event_ids': fields.one2many('event.event','main_speaker_id', readonly=True),
'event_registration_ids': fields.one2many('event.registration','partner_id', readonly=True),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
mglukhikh/intellij-community
|
refs/heads/master
|
python/helpers/py3only/docutils/parsers/rst/roles.py
|
46
|
# $Id: roles.py 7514 2012-09-14 14:27:12Z milde $
# Author: Edward Loper <edloper@gradient.cis.upenn.edu>
# Copyright: This module has been placed in the public domain.
"""
This module defines standard interpreted text role functions, a registry for
interpreted text roles, and an API for adding to and retrieving from the
registry.
The interface for interpreted role functions is as follows::
def role_fn(name, rawtext, text, lineno, inliner,
options={}, content=[]):
code...
# Set function attributes for customization:
role_fn.options = ...
role_fn.content = ...
Parameters:
- ``name`` is the local name of the interpreted text role, the role name
actually used in the document.
- ``rawtext`` is a string containing the entire interpreted text construct.
Return it as a ``problematic`` node linked to a system message if there is a
problem.
- ``text`` is the interpreted text content, with backslash escapes converted
to nulls (``\x00``).
- ``lineno`` is the line number where the interpreted text beings.
- ``inliner`` is the Inliner object that called the role function.
It defines the following useful attributes: ``reporter``,
``problematic``, ``memo``, ``parent``, ``document``.
- ``options``: A dictionary of directive options for customization, to be
interpreted by the role function. Used for additional attributes for the
generated elements and other functionality.
- ``content``: A list of strings, the directive content for customization
("role" directive). To be interpreted by the role function.
Function attributes for customization, interpreted by the "role" directive:
- ``options``: A dictionary, mapping known option names to conversion
functions such as `int` or `float`. ``None`` or an empty dict implies no
options to parse. Several directive option conversion functions are defined
in the `directives` module.
All role functions implicitly support the "class" option, unless disabled
with an explicit ``{'class': None}``.
- ``content``: A boolean; true if content is allowed. Client code must handle
the case where content is required but not supplied (an empty content list
will be supplied).
Note that unlike directives, the "arguments" function attribute is not
supported for role customization. Directive arguments are handled by the
"role" directive itself.
Interpreted role functions return a tuple of two values:
- A list of nodes which will be inserted into the document tree at the
point where the interpreted role was encountered (can be an empty
list).
- A list of system messages, which will be inserted into the document tree
immediately after the end of the current inline block (can also be empty).
"""
__docformat__ = 'reStructuredText'
from docutils import nodes, utils
from docutils.parsers.rst import directives
from docutils.parsers.rst.languages import en as _fallback_language_module
from docutils.utils.code_analyzer import Lexer, LexerError
DEFAULT_INTERPRETED_ROLE = 'title-reference'
"""
The canonical name of the default interpreted role. This role is used
when no role is specified for a piece of interpreted text.
"""
_role_registry = {}
"""Mapping of canonical role names to role functions. Language-dependent role
names are defined in the ``language`` subpackage."""
_roles = {}
"""Mapping of local or language-dependent interpreted text role names to role
functions."""
def role(role_name, language_module, lineno, reporter):
"""
Locate and return a role function from its language-dependent name, along
with a list of system messages. If the role is not found in the current
language, check English. Return a 2-tuple: role function (``None`` if the
named role cannot be found) and a list of system messages.
"""
normname = role_name.lower()
messages = []
msg_text = []
if normname in _roles:
return _roles[normname], messages
if role_name:
canonicalname = None
try:
canonicalname = language_module.roles[normname]
except AttributeError as error:
msg_text.append('Problem retrieving role entry from language '
'module %r: %s.' % (language_module, error))
except KeyError:
msg_text.append('No role entry for "%s" in module "%s".'
% (role_name, language_module.__name__))
else:
canonicalname = DEFAULT_INTERPRETED_ROLE
# If we didn't find it, try English as a fallback.
if not canonicalname:
try:
canonicalname = _fallback_language_module.roles[normname]
msg_text.append('Using English fallback for role "%s".'
% role_name)
except KeyError:
msg_text.append('Trying "%s" as canonical role name.'
% role_name)
# The canonical name should be an English name, but just in case:
canonicalname = normname
# Collect any messages that we generated.
if msg_text:
message = reporter.info('\n'.join(msg_text), line=lineno)
messages.append(message)
# Look the role up in the registry, and return it.
if canonicalname in _role_registry:
role_fn = _role_registry[canonicalname]
register_local_role(normname, role_fn)
return role_fn, messages
else:
return None, messages # Error message will be generated by caller.
def register_canonical_role(name, role_fn):
"""
Register an interpreted text role by its canonical name.
:Parameters:
- `name`: The canonical name of the interpreted role.
- `role_fn`: The role function. See the module docstring.
"""
set_implicit_options(role_fn)
_role_registry[name] = role_fn
def register_local_role(name, role_fn):
"""
Register an interpreted text role by its local or language-dependent name.
:Parameters:
- `name`: The local or language-dependent name of the interpreted role.
- `role_fn`: The role function. See the module docstring.
"""
set_implicit_options(role_fn)
_roles[name] = role_fn
def set_implicit_options(role_fn):
"""
Add customization options to role functions, unless explicitly set or
disabled.
"""
if not hasattr(role_fn, 'options') or role_fn.options is None:
role_fn.options = {'class': directives.class_option}
elif 'class' not in role_fn.options:
role_fn.options['class'] = directives.class_option
def register_generic_role(canonical_name, node_class):
"""For roles which simply wrap a given `node_class` around the text."""
role = GenericRole(canonical_name, node_class)
register_canonical_role(canonical_name, role)
class GenericRole:
"""
Generic interpreted text role, where the interpreted text is simply
wrapped with the provided node class.
"""
def __init__(self, role_name, node_class):
self.name = role_name
self.node_class = node_class
def __call__(self, role, rawtext, text, lineno, inliner,
options={}, content=[]):
set_classes(options)
return [self.node_class(rawtext, utils.unescape(text), **options)], []
class CustomRole:
"""
Wrapper for custom interpreted text roles.
"""
def __init__(self, role_name, base_role, options={}, content=[]):
self.name = role_name
self.base_role = base_role
self.options = None
if hasattr(base_role, 'options'):
self.options = base_role.options
self.content = None
if hasattr(base_role, 'content'):
self.content = base_role.content
self.supplied_options = options
self.supplied_content = content
def __call__(self, role, rawtext, text, lineno, inliner,
options={}, content=[]):
opts = self.supplied_options.copy()
opts.update(options)
cont = list(self.supplied_content)
if cont and content:
cont += '\n'
cont.extend(content)
return self.base_role(role, rawtext, text, lineno, inliner,
options=opts, content=cont)
def generic_custom_role(role, rawtext, text, lineno, inliner,
options={}, content=[]):
""""""
# Once nested inline markup is implemented, this and other methods should
# recursively call inliner.nested_parse().
set_classes(options)
return [nodes.inline(rawtext, utils.unescape(text), **options)], []
generic_custom_role.options = {'class': directives.class_option}
######################################################################
# Define and register the standard roles:
######################################################################
register_generic_role('abbreviation', nodes.abbreviation)
register_generic_role('acronym', nodes.acronym)
register_generic_role('emphasis', nodes.emphasis)
register_generic_role('literal', nodes.literal)
register_generic_role('strong', nodes.strong)
register_generic_role('subscript', nodes.subscript)
register_generic_role('superscript', nodes.superscript)
register_generic_role('title-reference', nodes.title_reference)
def pep_reference_role(role, rawtext, text, lineno, inliner,
options={}, content=[]):
try:
pepnum = int(text)
if pepnum < 0 or pepnum > 9999:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'PEP number must be a number from 0 to 9999; "%s" is invalid.'
% text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
# Base URL mainly used by inliner.pep_reference; so this is correct:
ref = (inliner.document.settings.pep_base_url
+ inliner.document.settings.pep_file_url_template % pepnum)
set_classes(options)
return [nodes.reference(rawtext, 'PEP ' + utils.unescape(text), refuri=ref,
**options)], []
register_canonical_role('pep-reference', pep_reference_role)
def rfc_reference_role(role, rawtext, text, lineno, inliner,
options={}, content=[]):
try:
rfcnum = int(text)
if rfcnum <= 0:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'RFC number must be a number greater than or equal to 1; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
# Base URL mainly used by inliner.rfc_reference, so this is correct:
ref = inliner.document.settings.rfc_base_url + inliner.rfc_url % rfcnum
set_classes(options)
node = nodes.reference(rawtext, 'RFC ' + utils.unescape(text), refuri=ref,
**options)
return [node], []
register_canonical_role('rfc-reference', rfc_reference_role)
def raw_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
if not inliner.document.settings.raw_enabled:
msg = inliner.reporter.warning('raw (and derived) roles disabled')
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
if 'format' not in options:
msg = inliner.reporter.error(
'No format (Writer name) is associated with this role: "%s".\n'
'The "raw" role cannot be used directly.\n'
'Instead, use the "role" directive to create a new role with '
'an associated format.' % role, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
set_classes(options)
node = nodes.raw(rawtext, utils.unescape(text, 1), **options)
node.source, node.line = inliner.reporter.get_source_and_line(lineno)
return [node], []
raw_role.options = {'format': directives.unchanged}
register_canonical_role('raw', raw_role)
def code_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
set_classes(options)
language = options.get('language', '')
classes = ['code']
if 'classes' in options:
classes.extend(options['classes'])
if language and language not in classes:
classes.append(language)
try:
tokens = Lexer(utils.unescape(text, 1), language,
inliner.document.settings.syntax_highlight)
except LexerError as error:
msg = inliner.reporter.warning(error)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
node = nodes.literal(rawtext, '', classes=classes)
# analyze content and add nodes for every token
for classes, value in tokens:
# print (classes, value)
if classes:
node += nodes.inline(value, value, classes=classes)
else:
# insert as Text to decrease the verbosity of the output
node += nodes.Text(value, value)
return [node], []
code_role.options = {'class': directives.class_option,
'language': directives.unchanged}
register_canonical_role('code', code_role)
def math_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
i = rawtext.find('`')
text = rawtext.split('`')[1]
node = nodes.math(rawtext, text)
return [node], []
register_canonical_role('math', math_role)
######################################################################
# Register roles that are currently unimplemented.
######################################################################
def unimplemented_role(role, rawtext, text, lineno, inliner, attributes={}):
msg = inliner.reporter.error(
'Interpreted text role "%s" not implemented.' % role, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
register_canonical_role('index', unimplemented_role)
register_canonical_role('named-reference', unimplemented_role)
register_canonical_role('anonymous-reference', unimplemented_role)
register_canonical_role('uri-reference', unimplemented_role)
register_canonical_role('footnote-reference', unimplemented_role)
register_canonical_role('citation-reference', unimplemented_role)
register_canonical_role('substitution-reference', unimplemented_role)
register_canonical_role('target', unimplemented_role)
# This should remain unimplemented, for testing purposes:
register_canonical_role('restructuredtext-unimplemented-role',
unimplemented_role)
def set_classes(options):
"""
Auxiliary function to set options['classes'] and delete
options['class'].
"""
if 'class' in options:
assert 'classes' not in options
options['classes'] = options['class']
del options['class']
|
JB1tz/kernel-msm
|
refs/heads/master
|
tools/perf/scripts/python/failed-syscalls-by-pid.py
|
11180
|
# failed system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
|
bmannix/selenium
|
refs/heads/master
|
py/selenium/webdriver/phantomjs/service.py
|
55
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import platform
import subprocess
import time
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common import utils
class Service(object):
"""
Object that manages the starting and stopping of PhantomJS / Ghostdriver
"""
def __init__(self, executable_path, port=0, service_args=None, log_path=None):
"""
Creates a new instance of the Service
:Args:
- executable_path : Path to PhantomJS binary
- port : Port the service is running on
- service_args : A List of other command line options to pass to PhantomJS
- log_path: Path for PhantomJS service to log to
"""
self.port = port
self.path = executable_path
self.service_args= service_args
if self.port == 0:
self.port = utils.free_port()
if self.service_args is None:
self.service_args = []
else:
self.service_args=service_args[:]
self.service_args.insert(0, self.path)
self.service_args.append("--webdriver=%d" % self.port)
self.process = None
if not log_path:
log_path = "ghostdriver.log"
self._log = open(log_path, 'w')
def __del__(self):
# subprocess.Popen doesn't send signal on __del__;
# we have to try to stop the launched process.
self.stop()
def start(self):
"""
Starts PhantomJS with GhostDriver.
:Exceptions:
- WebDriverException : Raised either when it can't start the service
or when it can't connect to the service.
"""
try:
self.process = subprocess.Popen(self.service_args, stdin=subprocess.PIPE,
close_fds=platform.system() != 'Windows',
stdout=self._log, stderr=self._log)
except Exception as e:
raise WebDriverException("Unable to start phantomjs with ghostdriver: %s" % e)
count = 0
while not utils.is_connectable(self.port):
count += 1
time.sleep(1)
if count == 30:
raise WebDriverException(
"Can not connect to GhostDriver on port {}".format(self.port))
@property
def service_url(self):
"""
Gets the url of the GhostDriver Service
"""
return "http://localhost:%d/wd/hub" % self.port
def stop(self):
"""
Cleans up the process
"""
if self._log:
self._log.close()
self._log = None
#If its dead dont worry
if self.process is None:
return
#Tell the Server to properly die in case
try:
if self.process:
self.process.stdin.close()
self.process.kill()
self.process.wait()
self.process = None
except OSError:
# kill may not be available under windows environment
pass
|
digidudeofdw/enigma2
|
refs/heads/master
|
lib/python/Components/ResourceManager.py
|
33
|
class ResourceManager:
def __init__(self):
self.resourceList = {}
def addResource(self, name, resource):
print "adding Resource", name
self.resourceList[name] = resource
print "resources:", self.resourceList
def getResource(self, name):
if not self.hasResource(name):
return None
return self.resourceList[name]
def hasResource(self, name):
return self.resourceList.has_key(name)
def removeResource(self, name):
if self.hasResource(name):
del self.resourceList[name]
resourcemanager = ResourceManager()
|
iamjakob/lumiCalc
|
refs/heads/master
|
LumiDB/scripts/pixelLumiCalc.py
|
1
|
#!/usr/bin/env python
########################################################################
# Command to calculate luminosity from HF measurement stored in lumiDB #
# #
# Author: Zhen Xie #
########################################################################
import os,sys,time
from RecoLuminosity.LumiDB import sessionManager,lumiTime,inputFilesetParser,csvSelectionParser,selectionParser,csvReporter,argparse,CommonUtil,lumiCalcAPI,revisionDML,normDML,lumiReport,lumiCorrections,RegexValidator
def parseInputFiles(inputfilename):
'''
output ({run:[cmsls,cmsls,...]},[[resultlines]])
'''
selectedrunlsInDB={}
resultlines=[]
p=inputFilesetParser.inputFilesetParser(inputfilename)
runlsbyfile=p.runsandls()
selectedProcessedRuns=p.selectedRunsWithresult()
selectedNonProcessedRuns=p.selectedRunsWithoutresult()
resultlines=p.resultlines()
for runinfile in selectedNonProcessedRuns:
selectedrunlsInDB[runinfile]=runlsbyfile[runinfile]
return (selectedrunlsInDB,resultlines)
##############################
## ######################## ##
## ## ################## ## ##
## ## ## Main Program ## ## ##
## ## ################## ## ##
## ######################## ##
##############################
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),description = "Lumi Calculation Based on Pixel",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
allowedActions = ['overview', 'recorded', 'lumibyls']
#
# parse arguments
#
# basic arguments
#
parser.add_argument('action',choices=allowedActions,
help='command actions')
parser.add_argument('-c',dest='connect',action='store',
required=False,
help='connect string to lumiDB,optional',
default='frontier://LumiCalc/CMS_LUMI_PROD')
parser.add_argument('-P',dest='authpath',action='store',
required=False,
help='path to authentication file (optional)')
parser.add_argument('-r',dest='runnumber',action='store',
type=int,
required=False,
help='run number (optional)')
parser.add_argument('-o',dest='outputfile',action='store',
required=False,
help='output to csv file (optional)')
#################################################
#arg to select exact run and ls
#################################################
parser.add_argument('-i',dest='inputfile',action='store',
required=False,
help='lumi range selection file (optional)')
#################################################
#arg to select exact hltpath or pattern
#################################################
parser.add_argument('--hltpath',dest='hltpath',action='store',
default=None,required=False,
help='specific hltpath or hltpath pattern to calculate the effectived luminosity (optional)')
#################################################
#versions control
#################################################
parser.add_argument('--normtag',dest='normtag',action='store',
required=False,
help='version of lumi norm/correction')
parser.add_argument('--datatag',dest='datatag',action='store',
required=False,
help='version of lumi/trg/hlt data')
###############################################
# run filters
###############################################
parser.add_argument('-f','--fill',dest='fillnum',action='store',
default=None,required=False,
help='fill number (optional) ')
parser.add_argument('--begin',dest='begin',action='store',
default=None,
required=False,
type=RegexValidator.RegexValidator("^\d\d/\d\d/\d\d \d\d:\d\d:\d\d$|^\d{6}$|^\d{4}$","wrong format"),
help='min run start time (mm/dd/yy hh:mm:ss),min fill or min run'
)
parser.add_argument('--end',dest='end',action='store',
required=False,
type=RegexValidator.RegexValidator("^\d\d/\d\d/\d\d \d\d:\d\d:\d\d$|^\d{6}$|^\d{4}$","wrong format"),
help='max run start time (mm/dd/yy hh:mm:ss),max fill or max run'
)
parser.add_argument('--minBiasXsec',dest='minbiasxsec',action='store',
default=69300.0,
type=float,
required=False,
help='minbias cross-section in ub'
)
#############################################
#global scale factor
#############################################
parser.add_argument('-n',dest='scalefactor',action='store',
type=float,
default=1.0,
required=False,
help='user defined global scaling factor on displayed lumi values,optional')
#################################################
#command configuration
#################################################
parser.add_argument('--siteconfpath',dest='siteconfpath',action='store',
default=None,
required=False,
help='specific path to site-local-config.xml file, optional. If path undefined, fallback to cern proxy&server')
parser.add_argument('--headerfile',dest='headerfile',action='store',
default=None,
required=False,
help='write command header output to specified file'
)
#################################################
#switches
#################################################
parser.add_argument('--without-correction',
dest='withoutNorm',
action='store_true',
help='without afterglow correction'
)
parser.add_argument('--without-checkforupdate',
dest='withoutCheckforupdate',
action='store_true',
help='without check for update'
)
#parser.add_argument('--verbose',dest='verbose',
# action='store_true',
# help='verbose mode for printing' )
parser.add_argument('--nowarning',
dest='nowarning',
action='store_true',
help='suppress bad for lumi warnings' )
parser.add_argument('--debug',dest='debug',
action='store_true',
help='debug')
options=parser.parse_args()
if not options.runnumber and not options.inputfile and not options.fillnum and not options.begin:
raise RuntimeError('at least one run selection argument in [-r,-f,-i,--begin] is required')
#
# check working environment
#
reqrunmin=None
reqfillmin=None
reqtimemin=None
reqrunmax=None
reqfillmax=None
reqtimemax=None
timeFilter=[None,None]
noWarning=options.nowarning
iresults=[]
reqTrg=False
reqHlt=False
if options.action=='overview' or options.action=='lumibyls':
reqTrg=True
if options.action=='recorded':
reqTrg=True
reqHlt=True
if options.runnumber:
reqrunmax=options.runnumber
reqrunmin=options.runnumber
if options.fillnum:
reqfillmin=options.fillnum
reqfillmax=options.fillnum
if options.begin:
(runbeg,fillbeg,timebeg)=CommonUtil.parseTime(options.begin)
if runbeg: #there's --begin runnum #priority run,fill,time
if not reqrunmin:# there's no -r, then take this
reqrunmin=runbeg
elif fillbeg:
if not reqfillmin:
reqfillmin=fillbeg
elif timebeg:
reqtimemin=timebeg
if reqtimemin:
lute=lumiTime.lumiTime()
reqtimeminT=lute.StrToDatetime(reqtimemin,customfm='%m/%d/%y %H:%M:%S')
timeFilter[0]=reqtimeminT
if options.end:
(runend,fillend,timeend)=CommonUtil.parseTime(options.end)
if runend:
if not reqrunmax:#priority run,fill,time
reqrunmax=runend
elif fillend:
if not reqfillmax:
reqfillmax=fillend
elif timeend:
reqtimemax=timeend
if reqtimemax:
lute=lumiTime.lumiTime()
reqtimemaxT=lute.StrToDatetime(reqtimemax,customfm='%m/%d/%y %H:%M:%S')
timeFilter[1]=reqtimemaxT
if options.inputfile and (reqtimemax or reqtimemin):
#if use time and file filter together, there's no point of warning about missing LS,switch off
noWarning=True
##############################################################
# check working environment
##############################################################
workingversion='UNKNOWN'
updateversion='NONE'
thiscmmd=sys.argv[0]
if not options.withoutCheckforupdate:
from RecoLuminosity.LumiDB import checkforupdate
cmsswWorkingBase=os.environ['CMSSW_BASE']
if not cmsswWorkingBase:
print 'Please check out RecoLuminosity/LumiDB from CVS,scram b,cmsenv'
sys.exit(11)
c=checkforupdate.checkforupdate('pixeltagstatus.txt')
workingversion=c.runningVersion(cmsswWorkingBase,'pixelLumiCalc.py',isverbose=False)
if workingversion:
updateversionList=c.checkforupdate(workingversion,isverbose=False)
if updateversionList:
updateversion=updateversionList[-1][0]
#
# check DB environment
#
if options.authpath:
os.environ['CORAL_AUTH_PATH'] = options.authpath
#############################################################
#pre-check option compatibility
#############################################################
if options.action=='recorded':
if not options.hltpath:
raise RuntimeError('argument --hltpath pathname is required for recorded action')
svc=sessionManager.sessionManager(options.connect,
authpath=options.authpath,
siteconfpath=options.siteconfpath,
debugON=options.debug)
session=svc.openSession(isReadOnly=True,cpp2sqltype=[('unsigned int','NUMBER(10)'),('unsigned long long','NUMBER(20)')])
##############################################################
# check run/ls list
##############################################################
irunlsdict={}
rruns=[]
session.transaction().start(True)
filerunlist=None
if options.inputfile:
(irunlsdict,iresults)=parseInputFiles(options.inputfile)
filerunlist=irunlsdict.keys()
##############################################################
# check datatag
# #############################################################
datatagname=options.datatag
if not datatagname:
(datatagid,datatagname)=revisionDML.currentDataTag(session.nominalSchema())
else:
datatagid=revisionDML.getDataTagId(session.nominalSchema(),datatagname)
dataidmap=lumiCalcAPI.runList(session.nominalSchema(),datatagid,runmin=reqrunmin,runmax=reqrunmax,fillmin=reqfillmin,fillmax=reqfillmax,startT=reqtimemin,stopT=reqtimemax,l1keyPattern=None,hltkeyPattern=None,amodetag=None,nominalEnergy=None,energyFlut=None,requiretrg=reqTrg,requirehlt=reqHlt,preselectedruns=filerunlist,lumitype='PIXEL')
if not dataidmap:
print '[INFO] No qualified run found, do nothing'
sys.exit(14)
rruns=[]
for irun,(lid,tid,hid) in dataidmap.items():
if not lid:
print '[INFO] No qualified lumi data found for run, ',irun
if reqTrg and not tid:
print '[INFO] No qualified trg data found for run ',irun
continue
if reqHlt and not hid:
print '[INFO] No qualified hlt data found for run ',irun
continue
rruns.append(irun)
if not irunlsdict: #no file
irunlsdict=dict(zip(rruns,[None]*len(rruns)))
else:
for selectedrun in irunlsdict.keys():#if there's further filter on the runlist,clean input dict
if selectedrun not in rruns:
del irunlsdict[selectedrun]
if not irunlsdict:
print '[INFO] No qualified run found, do nothing'
sys.exit(13)
###############################################################
# check normtag and get norm values if required
###############################################################
normname='NONE'
normid=0
normvalueDict={}
if not options.withoutNorm:
normname=options.normtag
if not normname:
normmap=normDML.normIdByType(session.nominalSchema(),lumitype='PIXEL',defaultonly=True)
if len(normmap):
normname=normmap.keys()[0]
normid=normmap[normname]
else:
normid=normDML.normIdByName(session.nominalSchema(),normname)
if not normid:
raise RuntimeError('[ERROR] cannot resolve norm/correction')
sys.exit(12)
normvalueDict=normDML.normValueById(session.nominalSchema(),normid) #{since:[corrector(0),{paramname:paramvalue}(1),amodetag(2),egev(3),comment(4)]}
session.transaction().commit()
lumiReport.toScreenHeader(thiscmmd,datatagname,normname,workingversion,updateversion,'PIXEL',toFile=options.headerfile)
##################
# ls level #
##################
session.transaction().start(True)
GrunsummaryData=lumiCalcAPI.runsummaryMap(session.nominalSchema(),irunlsdict,dataidmap,lumitype='PIXEL')
if options.action == 'overview':
result=lumiCalcAPI.lumiForIds(session.nominalSchema(),irunlsdict,dataidmap,runsummaryMap=GrunsummaryData,beamstatusfilter=None,timeFilter=timeFilter,normmap=normvalueDict,lumitype='PIXEL')
lumiReport.toScreenOverview(result,iresults,options.scalefactor,irunlsdict=irunlsdict,noWarning=noWarning,toFile=options.outputfile)
if options.action == 'lumibyls':
if not options.hltpath:
result=lumiCalcAPI.lumiForIds(session.nominalSchema(),irunlsdict,dataidmap,runsummaryMap=GrunsummaryData,beamstatusfilter=None,timeFilter=timeFilter,normmap=normvalueDict,lumitype='PIXEL',minbiasXsec=options.minbiasxsec)
lumiReport.toScreenLumiByLS(result,iresults,options.scalefactor,irunlsdict=irunlsdict,noWarning=noWarning,toFile=options.outputfile)
else:
hltname=options.hltpath
hltpat=None
if hltname=='*' or hltname=='all':
hltname=None
elif 1 in [c in hltname for c in '*?[]']: #is a fnmatch pattern
hltpat=hltname
hltname=None
result=lumiCalcAPI.effectiveLumiForIds(session.nominalSchema(),irunlsdict,dataidmap,runsummaryMap=GrunsummaryData,beamstatusfilter=None,timeFilter=timeFilter,normmap=normvalueDict,hltpathname=hltname,hltpathpattern=hltpat,withBXInfo=False,bxAlgo=None,withBeamIntensity=False,lumitype='PIXEL')
lumiReport.toScreenLSEffective(result,iresults,options.scalefactor,irunlsdict=irunlsdict,noWarning=noWarning,toFile=options.outputfile,)
if options.action == 'recorded':#recorded actually means effective because it needs to show all the hltpaths...
hltname=options.hltpath
hltpat=None
if hltname is not None:
if hltname=='*' or hltname=='all':
hltname=None
elif 1 in [c in hltname for c in '*?[]']: #is a fnmatch pattern
hltpat=hltname
hltname=None
result=lumiCalcAPI.effectiveLumiForIds(session.nominalSchema(),irunlsdict,dataidmap,runsummaryMap=GrunsummaryData,beamstatusfilter=None,normmap=normvalueDict,hltpathname=hltname,hltpathpattern=hltpat,withBXInfo=False,bxAlgo=None,withBeamIntensity=False,lumitype='PIXEL')
lumiReport.toScreenTotEffective(result,iresults,options.scalefactor,irunlsdict=irunlsdict,noWarning=noWarning,toFile=options.outputfile)
session.transaction().commit()
del session
del svc
|
bcornwellmott/erpnext
|
refs/heads/develop
|
erpnext/patches/v5_0/update_frozen_accounts_permission_role.py
|
97
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import frappe
def execute():
account_settings = frappe.get_doc("Accounts Settings")
if not account_settings.frozen_accounts_modifier and account_settings.bde_auth_role:
frappe.db.set_value("Accounts Settings", None,
"frozen_accounts_modifier", account_settings.bde_auth_role)
|
mal/chef
|
refs/heads/master
|
lib/chef/provider/package/yum/yum-dump.py
|
29
|
#
# Author:: Matthew Kent (<mkent@magoazul.com>)
# Copyright:: Copyright 2009-2016, Matthew Kent
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# yum-dump.py
# Inspired by yumhelper.py by David Lutterkort
#
# Produce a list of installed, available and re-installable packages using yum
# and dump the results to stdout.
#
# yum-dump invokes yum similarly to the command line interface which makes it
# subject to most of the configuration parameters in yum.conf. yum-dump will
# also load yum plugins in the same manor as yum - these can affect the output.
#
# Can be run as non root, but that won't update the cache.
#
# Intended to support yum 2.x and 3.x
import os
import sys
import time
import yum
import re
import errno
from yum import Errors
from optparse import OptionParser
from distutils import version
YUM_PID_FILE='/var/run/yum.pid'
YUM_VER = version.StrictVersion(yum.__version__)
YUM_MAJOR = YUM_VER.version[0]
if YUM_MAJOR > 3 or YUM_MAJOR < 2:
print >> sys.stderr, "yum-dump Error: Can't match supported yum version" \
" (%s)" % yum.__version__
sys.exit(1)
# Required for Provides output
if YUM_MAJOR == 2:
import rpm
import rpmUtils.miscutils
def setup(yb, options):
# Only want our output
#
if YUM_MAJOR == 3:
try:
if YUM_VER >= version.StrictVersion("3.2.22"):
yb.preconf.errorlevel=0
yb.preconf.debuglevel=0
# initialize the config
yb.conf
else:
yb.doConfigSetup(errorlevel=0, debuglevel=0)
except yum.Errors.ConfigError, e:
# suppresses an ignored exception at exit
yb.preconf = None
print >> sys.stderr, "yum-dump Config Error: %s" % e
return 1
except ValueError, e:
yb.preconf = None
print >> sys.stderr, "yum-dump Options Error: %s" % e
return 1
elif YUM_MAJOR == 2:
yb.doConfigSetup()
def __log(a,b): pass
yb.log = __log
yb.errorlog = __log
# Give Chef every possible package version, it can decide what to do with them
if YUM_MAJOR == 3:
yb.conf.showdupesfromrepos = True
elif YUM_MAJOR == 2:
yb.conf.setConfigOption('showdupesfromrepos', True)
# Optionally run only on cached repositories, but non root must use the cache
if os.geteuid() != 0:
if YUM_MAJOR == 3:
yb.conf.cache = True
elif YUM_MAJOR == 2:
yb.conf.setConfigOption('cache', True)
else:
if YUM_MAJOR == 3:
yb.conf.cache = options.cache
elif YUM_MAJOR == 2:
yb.conf.setConfigOption('cache', options.cache)
# Handle repo toggle via id or glob exactly like yum
for opt, repos in options.repo_control:
for repo in repos:
if opt == '--enablerepo':
yb.repos.enableRepo(repo)
elif opt == '--disablerepo':
yb.repos.disableRepo(repo)
return 0
def dump_packages(yb, list, output_provides):
packages = {}
if YUM_MAJOR == 2:
yb.doTsSetup()
yb.doRepoSetup()
yb.doSackSetup()
db = yb.doPackageLists(list)
for pkg in db.installed:
pkg.type = 'i'
packages[str(pkg)] = pkg
if YUM_VER >= version.StrictVersion("3.2.21"):
for pkg in db.available:
pkg.type = 'a'
packages[str(pkg)] = pkg
# These are both installed and available
for pkg in db.reinstall_available:
pkg.type = 'r'
packages[str(pkg)] = pkg
else:
# Old style method - no reinstall list
for pkg in yb.pkgSack.returnPackages():
if str(pkg) in packages:
if packages[str(pkg)].type == "i":
packages[str(pkg)].type = 'r'
continue
pkg.type = 'a'
packages[str(pkg)] = pkg
unique_packages = packages.values()
unique_packages.sort(lambda x, y: cmp(x.name, y.name))
for pkg in unique_packages:
if output_provides == "all" or \
(output_provides == "installed" and (pkg.type == "i" or pkg.type == "r")):
# yum 2 doesn't have provides_print, implement it ourselves using methods
# based on requires gathering in packages.py
if YUM_MAJOR == 2:
provlist = []
# Installed and available are gathered in different ways
if pkg.type == 'i' or pkg.type == 'r':
names = pkg.hdr[rpm.RPMTAG_PROVIDENAME]
flags = pkg.hdr[rpm.RPMTAG_PROVIDEFLAGS]
ver = pkg.hdr[rpm.RPMTAG_PROVIDEVERSION]
if names is not None:
tmplst = zip(names, flags, ver)
for (n, f, v) in tmplst:
prov = rpmUtils.miscutils.formatRequire(n, v, f)
provlist.append(prov)
# This is slow :(
elif pkg.type == 'a':
for prcoTuple in pkg.returnPrco('provides'):
prcostr = pkg.prcoPrintable(prcoTuple)
provlist.append(prcostr)
provides = provlist
else:
provides = pkg.provides_print
else:
provides = "[]"
print '%s %s %s %s %s %s %s %s' % (
pkg.name,
pkg.epoch,
pkg.version,
pkg.release,
pkg.arch,
provides,
pkg.type,
pkg.repoid )
return 0
def yum_dump(options):
lock_obtained = False
yb = yum.YumBase()
status = setup(yb, options)
if status != 0:
return status
if options.output_options:
print "[option installonlypkgs] %s" % " ".join(yb.conf.installonlypkgs)
# Non root can't handle locking on rhel/centos 4
if os.geteuid() != 0:
return dump_packages(yb, options.package_list, options.output_provides)
# Wrap the collection and output of packages in yum's global lock to prevent
# any inconsistencies.
try:
# Spin up to --yum-lock-timeout option
countdown = options.yum_lock_timeout
while True:
try:
yb.doLock(YUM_PID_FILE)
lock_obtained = True
except Errors.LockError, e:
time.sleep(1)
countdown -= 1
if countdown == 0:
print >> sys.stderr, "yum-dump Locking Error! Couldn't obtain an " \
"exclusive yum lock in %d seconds. Giving up." % options.yum_lock_timeout
return 200
else:
break
return dump_packages(yb, options.package_list, options.output_provides)
# Ensure we clear the lock and cleanup any resources
finally:
try:
yb.closeRpmDB()
if lock_obtained == True:
yb.doUnlock(YUM_PID_FILE)
except Errors.LockError, e:
print >> sys.stderr, "yum-dump Unlock Error: %s" % e
return 200
# Preserve order of enable/disable repo args like yum does
def gather_repo_opts(option, opt, value, parser):
if getattr(parser.values, option.dest, None) is None:
setattr(parser.values, option.dest, [])
getattr(parser.values, option.dest).append((opt, value.split(',')))
def main():
usage = "Usage: %prog [options]\n" + \
"Output a list of installed, available and re-installable packages via yum"
parser = OptionParser(usage=usage)
parser.add_option("-C", "--cache",
action="store_true", dest="cache", default=False,
help="run entirely from cache, don't update cache")
parser.add_option("-o", "--options",
action="store_true", dest="output_options", default=False,
help="output select yum options useful to Chef")
parser.add_option("-p", "--installed-provides",
action="store_const", const="installed", dest="output_provides", default="none",
help="output Provides for installed packages, big/wide output")
parser.add_option("-P", "--all-provides",
action="store_const", const="all", dest="output_provides", default="none",
help="output Provides for all package, slow, big/wide output")
parser.add_option("-i", "--installed",
action="store_const", const="installed", dest="package_list", default="all",
help="output only installed packages")
parser.add_option("-a", "--available",
action="store_const", const="available", dest="package_list", default="all",
help="output only available and re-installable packages")
parser.add_option("--enablerepo",
action="callback", callback=gather_repo_opts, type="string", dest="repo_control", default=[],
help="enable disabled repositories by id or glob")
parser.add_option("--disablerepo",
action="callback", callback=gather_repo_opts, type="string", dest="repo_control", default=[],
help="disable repositories by id or glob")
parser.add_option("--yum-lock-timeout",
action="store", type="int", dest="yum_lock_timeout", default=30,
help="Time in seconds to wait for yum process lock")
(options, args) = parser.parse_args()
try:
return yum_dump(options)
except yum.Errors.RepoError, e:
print >> sys.stderr, "yum-dump Repository Error: %s" % e
return 1
except yum.Errors.YumBaseError, e:
print >> sys.stderr, "yum-dump General Error: %s" % e
return 1
try:
status = main()
# Suppress a nasty broken pipe error when output is piped to utilities like 'head'
except IOError, e:
if e.errno == errno.EPIPE:
sys.exit(1)
else:
raise
sys.exit(status)
|
rackerlabs/horizon
|
refs/heads/master
|
horizon/tabs/views.py
|
3
|
from django import http
from django.views import generic
from horizon import exceptions
from horizon import tables
from horizon.tabs.base import TableTab
class TabView(generic.TemplateView):
"""
A generic class-based view for displaying a :class:`horizon.tabs.TabGroup`.
This view handles selecting specific tabs and deals with AJAX requests
gracefully.
.. attribute:: tab_group_class
The only required attribute for ``TabView``. It should be a class which
inherits from :class:`horizon.tabs.TabGroup`.
"""
tab_group_class = None
_tab_group = None
def __init__(self):
if not self.tab_group_class:
raise AttributeError("You must set the tab_group_class attribute "
"on %s." % self.__class__.__name__)
def get_tabs(self, request, **kwargs):
""" Returns the initialized tab group for this view. """
if self._tab_group is None:
self._tab_group = self.tab_group_class(request, **kwargs)
return self._tab_group
def get_context_data(self, **kwargs):
""" Adds the ``tab_group`` variable to the context data. """
context = super(TabView, self).get_context_data(**kwargs)
try:
tab_group = self.get_tabs(self.request, **kwargs)
context["tab_group"] = tab_group
# Make sure our data is pre-loaded to capture errors.
context["tab_group"].load_tab_data()
except:
exceptions.handle(self.request)
return context
def handle_tabbed_response(self, tab_group, context):
"""
Sends back an AJAX-appropriate response for the tab group if
required, otherwise renders the response as normal.
"""
if self.request.is_ajax():
if tab_group.selected:
return http.HttpResponse(tab_group.selected.render())
else:
return http.HttpResponse(tab_group.render())
return self.render_to_response(context)
def get(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
return self.handle_tabbed_response(context["tab_group"], context)
def render_to_response(self, *args, **kwargs):
response = super(TabView, self).render_to_response(*args, **kwargs)
# Because Django's TemplateView uses the TemplateResponse class
# to provide deferred rendering (which is usually helpful), if
# a tab group raises an Http302 redirect (from exceptions.handle for
# example) the exception is actually raised *after* the final pass
# of the exception-handling middleware.
response.render()
return response
class TabbedTableView(tables.MultiTableMixin, TabView):
def __init__(self, *args, **kwargs):
super(TabbedTableView, self).__init__(*args, **kwargs)
self.table_classes = []
self._table_dict = {}
def load_tabs(self):
"""
Loads the tab group, and compiles the table instances for each
table attached to any :class:`horizon.tabs.TableTab` instances on
the tab group. This step is necessary before processing any
tab or table actions.
"""
tab_group = self.get_tabs(self.request, **self.kwargs)
tabs = tab_group.get_tabs()
for tab in [t for t in tabs if issubclass(t.__class__, TableTab)]:
self.table_classes.extend(tab.table_classes)
for table in tab._tables.values():
self._table_dict[table._meta.name] = {'table': table,
'tab': tab}
def get_tables(self):
""" A no-op on this class. Tables are handled at the tab level. """
# Override the base class implementation so that the MultiTableMixin
# doesn't freak out. We do the processing at the TableTab level.
return {}
def handle_table(self, table_dict):
"""
For the given dict containing a ``DataTable`` and a ``TableTab``
instance, it loads the table data for that tab and calls the
table's :meth:`~horizon.tables.DataTable.maybe_handle` method. The
return value will be the result of ``maybe_handle``.
"""
table = table_dict['table']
tab = table_dict['tab']
tab.load_table_data()
table_name = table._meta.name
tab._tables[table_name]._meta.has_more_data = self.has_more_data(table)
handled = tab._tables[table_name].maybe_handle()
return handled
def get(self, request, *args, **kwargs):
self.load_tabs()
# Gather our table instances. It's important that they're the
# actual instances and not the classes!
table_instances = [t['table'] for t in self._table_dict.values()]
# Early out before any tab or table data is loaded
for table in table_instances:
preempted = table.maybe_preempt()
if preempted:
return preempted
# If we have an action, determine if it belongs to one of our tables.
# We don't iterate through all of the tables' maybes_handle
# methods; just jump to the one that's got the matching name.
table_name, action, obj_id = tables.DataTable.check_handler(request)
if table_name in self._table_dict:
handled = self.handle_table(self._table_dict[table_name])
if handled:
return handled
context = self.get_context_data(**kwargs)
return self.handle_tabbed_response(context["tab_group"], context)
def post(self, request, *args, **kwargs):
# GET and POST handling are the same
return self.get(request, *args, **kwargs)
|
tiberiucorbu/av-website
|
refs/heads/master
|
main/auth/__init__.py
|
1
|
# coding: utf-8
from .auth import *
from .bitbucket import *
from .dropbox import *
from .facebook import *
from .gae import *
from .github import *
from .google import *
from .instagram import *
from .linkedin import *
from .microsoft import *
from .reddit import *
from .twitter import *
from .vk import *
from .yahoo import *
|
rdo-management/ironic
|
refs/heads/mgt-master
|
ironic/drivers/modules/fake.py
|
3
|
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fake driver interfaces used in testing.
This is also an example of some kinds of things which can be done within
drivers. For instance, the MultipleVendorInterface class demonstrates how to
load more than one interface and wrap them in some logic to route incoming
vendor_passthru requests appropriately. This can be useful eg. when mixing
functionality between a power interface and a deploy interface, when both rely
on seprate vendor_passthru methods.
"""
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import states
from ironic.drivers import base
class FakePower(base.PowerInterface):
"""Example implementation of a simple power interface."""
def get_properties(self):
return {}
def validate(self, task):
pass
def get_power_state(self, task):
return task.node.power_state
def set_power_state(self, task, power_state):
if power_state not in [states.POWER_ON, states.POWER_OFF]:
raise exception.InvalidParameterValue(_("set_power_state called "
"with an invalid power state: %s.") % power_state)
task.node.power_state = power_state
def reboot(self, task):
pass
class FakeDeploy(base.DeployInterface):
"""Class for a fake deployment driver.
Example imlementation of a deploy interface that uses a
separate power interface.
"""
def get_properties(self):
return {}
def validate(self, task):
pass
def deploy(self, task):
return states.DEPLOYDONE
def tear_down(self, task):
return states.DELETED
def prepare(self, task):
pass
def clean_up(self, task):
pass
def take_over(self, task):
pass
class FakeVendorA(base.VendorInterface):
"""Example implementation of a vendor passthru interface."""
def get_properties(self):
return {'A1': 'A1 description. Required.',
'A2': 'A2 description. Optional.'}
def validate(self, task, method, **kwargs):
if method == 'first_method':
bar = kwargs.get('bar')
if not bar:
raise exception.MissingParameterValue(_(
"Parameter 'bar' not passed to method 'first_method'."))
@base.passthru(['POST'],
description=_("Test if the value of bar is baz"))
def first_method(self, task, http_method, bar):
return True if bar == 'baz' else False
class FakeVendorB(base.VendorInterface):
"""Example implementation of a secondary vendor passthru."""
def get_properties(self):
return {'B1': 'B1 description. Required.',
'B2': 'B2 description. Required.'}
def validate(self, task, method, **kwargs):
if method in ('second_method', 'third_method_sync'):
bar = kwargs.get('bar')
if not bar:
raise exception.MissingParameterValue(_(
"Parameter 'bar' not passed to method '%s'.") % method)
@base.passthru(['POST'],
description=_("Test if the value of bar is kazoo"))
def second_method(self, task, http_method, bar):
return True if bar == 'kazoo' else False
@base.passthru(['POST'], async=False,
description=_("Test if the value of bar is meow"))
def third_method_sync(self, task, http_method, bar):
return True if bar == 'meow' else False
class FakeConsole(base.ConsoleInterface):
"""Example implementation of a simple console interface."""
def get_properties(self):
return {}
def validate(self, task):
pass
def start_console(self, task):
pass
def stop_console(self, task):
pass
def get_console(self, task):
return {}
class FakeManagement(base.ManagementInterface):
"""Example implementation of a simple management interface."""
def get_properties(self):
return {}
def validate(self, task):
pass
def get_supported_boot_devices(self):
return [boot_devices.PXE]
def set_boot_device(self, task, device, persistent=False):
if device not in self.get_supported_boot_devices():
raise exception.InvalidParameterValue(_(
"Invalid boot device %s specified.") % device)
def get_boot_device(self, task):
return {'boot_device': boot_devices.PXE, 'persistent': False}
def get_sensors_data(self, task):
return {}
class FakeInspect(base.InspectInterface):
"""Example implementation of a simple inspect interface."""
def get_properties(self):
return {}
def validate(self, task):
pass
def inspect_hardware(self, task):
return states.MANAGEABLE
|
aghozlane/masque
|
refs/heads/master
|
rename_otu/rename_otu.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# A copy of the GNU General Public License is available at
# http://www.gnu.org/licenses/gpl-3.0.html
from __future__ import print_function
import os
import sys
import argparse
__author__ = "Amine Ghozlane"
__copyright__ = "Copyright 2015, Institut Pasteur"
__credits__ = ["Amine Ghozlane"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "Amine Ghozlane"
__email__ = "amine.ghozlane@pasteur.fr"
__status__ = "Developpement"
def isfile(path):
"""Check if path is an existing file.
Arguments:
path: Path to the file
"""
if not os.path.isfile(path):
if os.path.isdir(path):
msg = "{0} is a directory".format(path)
else:
msg = "{0} does not exist.".format(path)
raise argparse.ArgumentTypeError(msg)
return path
def getArguments():
"""Retrieves the arguments of the program.
Returns: An object that contains the arguments
"""
# Parsing arguments
parser = argparse.ArgumentParser(description=__doc__, usage=
"{0} -h".format(sys.argv[0]))
parser.add_argument('-i', dest='fasta_file', type=isfile, required=True,
help='Path to the fasta file.')
parser.add_argument('-n', dest='name', type=str, default="OTU_",
help='Relabel name (default= OTU_).')
parser.add_argument('-o', dest='output_file', type=str, default=None,
help='Output file.')
args = parser.parse_args()
return args
def fill(text, width=80):
"""Split text"""
return os.linesep.join(text[i:i+width] for i in xrange(0, len(text), width))
def rename_otu(fasta_file, name, output_file):
"""Add new label and rewrite text
"""
count = 1
if not output_file:
output = sys.stdout
else:
output = open(output_file, "wt")
header = ""
sequence = ""
try:
with open(fasta_file, "rt") as fast:
for line in fast:
if line.startswith(">"):
if len(header) > 0:
print(">{0}{1}{2}{3}".format(name, count, os.linesep,
fill(sequence)),
file=output)
sequence = ""
count +=1
header = line
else:
sequence += line.replace("\n", "").replace("\r", "")
print(">{0}{1}{2}{3}".format(name, count, os.linesep,
fill(sequence)),
file=output)
except IOError:
sys.exit("Error cannot open {0}".format(fasta_file))
if output_file:
output.close()
def main():
"""Main program
"""
args = getArguments()
rename_otu(args.fasta_file, args.name, args.output_file)
if __name__ == '__main__':
main()
|
g-weatherill/catalogue_toolkit
|
refs/heads/master
|
eqcat/__init__.py
|
6
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# LICENSE
#
# Copyright (c) 2015 GEM Foundation
#
# The Catalogue Toolkit is free software: you can redistribute
# it and/or modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# with this download. If not, see <http://www.gnu.org/licenses/>
#!/usr/bin/env/python
|
tequa/ammisoft
|
refs/heads/master
|
ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/site-packages/numpy/core/setup.py
|
13
|
from __future__ import division, print_function
import os
import sys
import pickle
import copy
import sysconfig
import warnings
from os.path import join
from numpy.distutils import log
from distutils.dep_util import newer
from distutils.sysconfig import get_config_var
from numpy._build_utils.apple_accelerate import (
uses_accelerate_framework, get_sgemv_fix
)
from numpy.compat import npy_load_module
from setup_common import *
# Set to True to enable relaxed strides checking. This (mostly) means
# that `strides[dim]` is ignored if `shape[dim] == 1` when setting flags.
NPY_RELAXED_STRIDES_CHECKING = (os.environ.get('NPY_RELAXED_STRIDES_CHECKING', "1") != "0")
# Put NPY_RELAXED_STRIDES_DEBUG=1 in the environment if you want numpy to use a
# bogus value for affected strides in order to help smoke out bad stride usage
# when relaxed stride checking is enabled.
NPY_RELAXED_STRIDES_DEBUG = (os.environ.get('NPY_RELAXED_STRIDES_DEBUG', "0") != "0")
NPY_RELAXED_STRIDES_DEBUG = NPY_RELAXED_STRIDES_DEBUG and NPY_RELAXED_STRIDES_CHECKING
# XXX: ugly, we use a class to avoid calling twice some expensive functions in
# config.h/numpyconfig.h. I don't see a better way because distutils force
# config.h generation inside an Extension class, and as such sharing
# configuration informations between extensions is not easy.
# Using a pickled-based memoize does not work because config_cmd is an instance
# method, which cPickle does not like.
#
# Use pickle in all cases, as cPickle is gone in python3 and the difference
# in time is only in build. -- Charles Harris, 2013-03-30
class CallOnceOnly(object):
def __init__(self):
self._check_types = None
self._check_ieee_macros = None
self._check_complex = None
def check_types(self, *a, **kw):
if self._check_types is None:
out = check_types(*a, **kw)
self._check_types = pickle.dumps(out)
else:
out = copy.deepcopy(pickle.loads(self._check_types))
return out
def check_ieee_macros(self, *a, **kw):
if self._check_ieee_macros is None:
out = check_ieee_macros(*a, **kw)
self._check_ieee_macros = pickle.dumps(out)
else:
out = copy.deepcopy(pickle.loads(self._check_ieee_macros))
return out
def check_complex(self, *a, **kw):
if self._check_complex is None:
out = check_complex(*a, **kw)
self._check_complex = pickle.dumps(out)
else:
out = copy.deepcopy(pickle.loads(self._check_complex))
return out
def pythonlib_dir():
"""return path where libpython* is."""
if sys.platform == 'win32':
return os.path.join(sys.prefix, "libs")
else:
return get_config_var('LIBDIR')
def is_npy_no_signal():
"""Return True if the NPY_NO_SIGNAL symbol must be defined in configuration
header."""
return sys.platform == 'win32'
def is_npy_no_smp():
"""Return True if the NPY_NO_SMP symbol must be defined in public
header (when SMP support cannot be reliably enabled)."""
# Perhaps a fancier check is in order here.
# so that threads are only enabled if there
# are actually multiple CPUS? -- but
# threaded code can be nice even on a single
# CPU so that long-calculating code doesn't
# block.
return 'NPY_NOSMP' in os.environ
def win32_checks(deflist):
from numpy.distutils.misc_util import get_build_architecture
a = get_build_architecture()
# Distutils hack on AMD64 on windows
print('BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' %
(a, os.name, sys.platform))
if a == 'AMD64':
deflist.append('DISTUTILS_USE_SDK')
# On win32, force long double format string to be 'g', not
# 'Lg', since the MS runtime does not support long double whose
# size is > sizeof(double)
if a == "Intel" or a == "AMD64":
deflist.append('FORCE_NO_LONG_DOUBLE_FORMATTING')
def check_math_capabilities(config, moredefs, mathlibs):
def check_func(func_name):
return config.check_func(func_name, libraries=mathlibs,
decl=True, call=True)
def check_funcs_once(funcs_name):
decl = dict([(f, True) for f in funcs_name])
st = config.check_funcs_once(funcs_name, libraries=mathlibs,
decl=decl, call=decl)
if st:
moredefs.extend([(fname2def(f), 1) for f in funcs_name])
return st
def check_funcs(funcs_name):
# Use check_funcs_once first, and if it does not work, test func per
# func. Return success only if all the functions are available
if not check_funcs_once(funcs_name):
# Global check failed, check func per func
for f in funcs_name:
if check_func(f):
moredefs.append((fname2def(f), 1))
return 0
else:
return 1
#use_msvc = config.check_decl("_MSC_VER")
if not check_funcs_once(MANDATORY_FUNCS):
raise SystemError("One of the required function to build numpy is not"
" available (the list is %s)." % str(MANDATORY_FUNCS))
# Standard functions which may not be available and for which we have a
# replacement implementation. Note that some of these are C99 functions.
# XXX: hack to circumvent cpp pollution from python: python put its
# config.h in the public namespace, so we have a clash for the common
# functions we test. We remove every function tested by python's
# autoconf, hoping their own test are correct
for f in OPTIONAL_STDFUNCS_MAYBE:
if config.check_decl(fname2def(f),
headers=["Python.h", "math.h"]):
OPTIONAL_STDFUNCS.remove(f)
check_funcs(OPTIONAL_STDFUNCS)
for h in OPTIONAL_HEADERS:
if config.check_func("", decl=False, call=False, headers=[h]):
moredefs.append((fname2def(h).replace(".", "_"), 1))
for tup in OPTIONAL_INTRINSICS:
headers = None
if len(tup) == 2:
f, args, m = tup[0], tup[1], fname2def(tup[0])
elif len(tup) == 3:
f, args, headers, m = tup[0], tup[1], [tup[2]], fname2def(tup[0])
else:
f, args, headers, m = tup[0], tup[1], [tup[2]], fname2def(tup[3])
if config.check_func(f, decl=False, call=True, call_args=args,
headers=headers):
moredefs.append((m, 1))
for dec, fn in OPTIONAL_FUNCTION_ATTRIBUTES:
if config.check_gcc_function_attribute(dec, fn):
moredefs.append((fname2def(fn), 1))
for fn in OPTIONAL_VARIABLE_ATTRIBUTES:
if config.check_gcc_variable_attribute(fn):
m = fn.replace("(", "_").replace(")", "_")
moredefs.append((fname2def(m), 1))
# C99 functions: float and long double versions
check_funcs(C99_FUNCS_SINGLE)
check_funcs(C99_FUNCS_EXTENDED)
def check_complex(config, mathlibs):
priv = []
pub = []
try:
if os.uname()[0] == "Interix":
warnings.warn("Disabling broken complex support. See #1365", stacklevel=2)
return priv, pub
except:
# os.uname not available on all platforms. blanket except ugly but safe
pass
# Check for complex support
st = config.check_header('complex.h')
if st:
priv.append(('HAVE_COMPLEX_H', 1))
pub.append(('NPY_USE_C99_COMPLEX', 1))
for t in C99_COMPLEX_TYPES:
st = config.check_type(t, headers=["complex.h"])
if st:
pub.append(('NPY_HAVE_%s' % type2def(t), 1))
def check_prec(prec):
flist = [f + prec for f in C99_COMPLEX_FUNCS]
decl = dict([(f, True) for f in flist])
if not config.check_funcs_once(flist, call=decl, decl=decl,
libraries=mathlibs):
for f in flist:
if config.check_func(f, call=True, decl=True,
libraries=mathlibs):
priv.append((fname2def(f), 1))
else:
priv.extend([(fname2def(f), 1) for f in flist])
check_prec('')
check_prec('f')
check_prec('l')
return priv, pub
def check_ieee_macros(config):
priv = []
pub = []
macros = []
def _add_decl(f):
priv.append(fname2def("decl_%s" % f))
pub.append('NPY_%s' % fname2def("decl_%s" % f))
# XXX: hack to circumvent cpp pollution from python: python put its
# config.h in the public namespace, so we have a clash for the common
# functions we test. We remove every function tested by python's
# autoconf, hoping their own test are correct
_macros = ["isnan", "isinf", "signbit", "isfinite"]
for f in _macros:
py_symbol = fname2def("decl_%s" % f)
already_declared = config.check_decl(py_symbol,
headers=["Python.h", "math.h"])
if already_declared:
if config.check_macro_true(py_symbol,
headers=["Python.h", "math.h"]):
pub.append('NPY_%s' % fname2def("decl_%s" % f))
else:
macros.append(f)
# Normally, isnan and isinf are macro (C99), but some platforms only have
# func, or both func and macro version. Check for macro only, and define
# replacement ones if not found.
# Note: including Python.h is necessary because it modifies some math.h
# definitions
for f in macros:
st = config.check_decl(f, headers=["Python.h", "math.h"])
if st:
_add_decl(f)
return priv, pub
def check_types(config_cmd, ext, build_dir):
private_defines = []
public_defines = []
# Expected size (in number of bytes) for each type. This is an
# optimization: those are only hints, and an exhaustive search for the size
# is done if the hints are wrong.
expected = {'short': [2], 'int': [4], 'long': [8, 4],
'float': [4], 'double': [8], 'long double': [16, 12, 8],
'Py_intptr_t': [8, 4], 'PY_LONG_LONG': [8], 'long long': [8],
'off_t': [8, 4]}
# Check we have the python header (-dev* packages on Linux)
result = config_cmd.check_header('Python.h')
if not result:
python = 'python'
if '__pypy__' in sys.builtin_module_names:
python = 'pypy'
raise SystemError(
"Cannot compile 'Python.h'. Perhaps you need to "
"install {0}-dev|{0}-devel.".format(python))
res = config_cmd.check_header("endian.h")
if res:
private_defines.append(('HAVE_ENDIAN_H', 1))
public_defines.append(('NPY_HAVE_ENDIAN_H', 1))
res = config_cmd.check_header("sys/endian.h")
if res:
private_defines.append(('HAVE_SYS_ENDIAN_H', 1))
public_defines.append(('NPY_HAVE_SYS_ENDIAN_H', 1))
# Check basic types sizes
for type in ('short', 'int', 'long'):
res = config_cmd.check_decl("SIZEOF_%s" % sym2def(type), headers=["Python.h"])
if res:
public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), "SIZEOF_%s" % sym2def(type)))
else:
res = config_cmd.check_type_size(type, expected=expected[type])
if res >= 0:
public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), '%d' % res))
else:
raise SystemError("Checking sizeof (%s) failed !" % type)
for type in ('float', 'double', 'long double'):
already_declared = config_cmd.check_decl("SIZEOF_%s" % sym2def(type),
headers=["Python.h"])
res = config_cmd.check_type_size(type, expected=expected[type])
if res >= 0:
public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), '%d' % res))
if not already_declared and not type == 'long double':
private_defines.append(('SIZEOF_%s' % sym2def(type), '%d' % res))
else:
raise SystemError("Checking sizeof (%s) failed !" % type)
# Compute size of corresponding complex type: used to check that our
# definition is binary compatible with C99 complex type (check done at
# build time in npy_common.h)
complex_def = "struct {%s __x; %s __y;}" % (type, type)
res = config_cmd.check_type_size(complex_def,
expected=[2 * x for x in expected[type]])
if res >= 0:
public_defines.append(('NPY_SIZEOF_COMPLEX_%s' % sym2def(type), '%d' % res))
else:
raise SystemError("Checking sizeof (%s) failed !" % complex_def)
for type in ('Py_intptr_t', 'off_t'):
res = config_cmd.check_type_size(type, headers=["Python.h"],
library_dirs=[pythonlib_dir()],
expected=expected[type])
if res >= 0:
private_defines.append(('SIZEOF_%s' % sym2def(type), '%d' % res))
public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), '%d' % res))
else:
raise SystemError("Checking sizeof (%s) failed !" % type)
# We check declaration AND type because that's how distutils does it.
if config_cmd.check_decl('PY_LONG_LONG', headers=['Python.h']):
res = config_cmd.check_type_size('PY_LONG_LONG', headers=['Python.h'],
library_dirs=[pythonlib_dir()],
expected=expected['PY_LONG_LONG'])
if res >= 0:
private_defines.append(('SIZEOF_%s' % sym2def('PY_LONG_LONG'), '%d' % res))
public_defines.append(('NPY_SIZEOF_%s' % sym2def('PY_LONG_LONG'), '%d' % res))
else:
raise SystemError("Checking sizeof (%s) failed !" % 'PY_LONG_LONG')
res = config_cmd.check_type_size('long long',
expected=expected['long long'])
if res >= 0:
#private_defines.append(('SIZEOF_%s' % sym2def('long long'), '%d' % res))
public_defines.append(('NPY_SIZEOF_%s' % sym2def('long long'), '%d' % res))
else:
raise SystemError("Checking sizeof (%s) failed !" % 'long long')
if not config_cmd.check_decl('CHAR_BIT', headers=['Python.h']):
raise RuntimeError(
"Config wo CHAR_BIT is not supported"
", please contact the maintainers")
return private_defines, public_defines
def check_mathlib(config_cmd):
# Testing the C math library
mathlibs = []
mathlibs_choices = [[], ['m'], ['cpml']]
mathlib = os.environ.get('MATHLIB')
if mathlib:
mathlibs_choices.insert(0, mathlib.split(','))
for libs in mathlibs_choices:
if config_cmd.check_func("exp", libraries=libs, decl=True, call=True):
mathlibs = libs
break
else:
raise EnvironmentError("math library missing; rerun "
"setup.py after setting the "
"MATHLIB env variable")
return mathlibs
def visibility_define(config):
"""Return the define value to use for NPY_VISIBILITY_HIDDEN (may be empty
string)."""
if config.check_compiler_gcc4():
return '__attribute__((visibility("hidden")))'
else:
return ''
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration, dot_join
from numpy.distutils.system_info import get_info
config = Configuration('core', parent_package, top_path)
local_dir = config.local_path
codegen_dir = join(local_dir, 'code_generators')
if is_released(config):
warnings.simplefilter('error', MismatchCAPIWarning)
# Check whether we have a mismatch between the set C API VERSION and the
# actual C API VERSION
check_api_version(C_API_VERSION, codegen_dir)
generate_umath_py = join(codegen_dir, 'generate_umath.py')
n = dot_join(config.name, 'generate_umath')
generate_umath = npy_load_module('_'.join(n.split('.')),
generate_umath_py, ('.py', 'U', 1))
header_dir = 'include/numpy' # this is relative to config.path_in_package
cocache = CallOnceOnly()
def generate_config_h(ext, build_dir):
target = join(build_dir, header_dir, 'config.h')
d = os.path.dirname(target)
if not os.path.exists(d):
os.makedirs(d)
if newer(__file__, target):
config_cmd = config.get_config_cmd()
log.info('Generating %s', target)
# Check sizeof
moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir)
# Check math library and C99 math funcs availability
mathlibs = check_mathlib(config_cmd)
moredefs.append(('MATHLIB', ','.join(mathlibs)))
check_math_capabilities(config_cmd, moredefs, mathlibs)
moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])
# Signal check
if is_npy_no_signal():
moredefs.append('__NPY_PRIVATE_NO_SIGNAL')
# Windows checks
if sys.platform == 'win32' or os.name == 'nt':
win32_checks(moredefs)
# C99 restrict keyword
moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict()))
# Inline check
inline = config_cmd.check_inline()
# Use relaxed stride checking
if NPY_RELAXED_STRIDES_CHECKING:
moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))
# Use bogus stride debug aid when relaxed strides are enabled
if NPY_RELAXED_STRIDES_DEBUG:
moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1))
# Get long double representation
if sys.platform != 'darwin':
rep = check_long_double_representation(config_cmd)
if rep in ['INTEL_EXTENDED_12_BYTES_LE',
'INTEL_EXTENDED_16_BYTES_LE',
'MOTOROLA_EXTENDED_12_BYTES_BE',
'IEEE_QUAD_LE', 'IEEE_QUAD_BE',
'IEEE_DOUBLE_LE', 'IEEE_DOUBLE_BE',
'DOUBLE_DOUBLE_BE', 'DOUBLE_DOUBLE_LE']:
moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1))
else:
raise ValueError("Unrecognized long double format: %s" % rep)
# Py3K check
if sys.version_info[0] == 3:
moredefs.append(('NPY_PY3K', 1))
# Generate the config.h file from moredefs
target_f = open(target, 'w')
for d in moredefs:
if isinstance(d, str):
target_f.write('#define %s\n' % (d))
else:
target_f.write('#define %s %s\n' % (d[0], d[1]))
# define inline to our keyword, or nothing
target_f.write('#ifndef __cplusplus\n')
if inline == 'inline':
target_f.write('/* #undef inline */\n')
else:
target_f.write('#define inline %s\n' % inline)
target_f.write('#endif\n')
# add the guard to make sure config.h is never included directly,
# but always through npy_config.h
target_f.write("""
#ifndef _NPY_NPY_CONFIG_H_
#error config.h should never be included directly, include npy_config.h instead
#endif
""")
target_f.close()
print('File:', target)
target_f = open(target)
print(target_f.read())
target_f.close()
print('EOF')
else:
mathlibs = []
target_f = open(target)
for line in target_f:
s = '#define MATHLIB'
if line.startswith(s):
value = line[len(s):].strip()
if value:
mathlibs.extend(value.split(','))
target_f.close()
# Ugly: this can be called within a library and not an extension,
# in which case there is no libraries attributes (and none is
# needed).
if hasattr(ext, 'libraries'):
ext.libraries.extend(mathlibs)
incl_dir = os.path.dirname(target)
if incl_dir not in config.numpy_include_dirs:
config.numpy_include_dirs.append(incl_dir)
return target
def generate_numpyconfig_h(ext, build_dir):
"""Depends on config.h: generate_config_h has to be called before !"""
# put private include directory in build_dir on search path
# allows using code generation in headers headers
config.add_include_dirs(join(build_dir, "src", "private"))
config.add_include_dirs(join(build_dir, "src", "npymath"))
target = join(build_dir, header_dir, '_numpyconfig.h')
d = os.path.dirname(target)
if not os.path.exists(d):
os.makedirs(d)
if newer(__file__, target):
config_cmd = config.get_config_cmd()
log.info('Generating %s', target)
# Check sizeof
ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir)
if is_npy_no_signal():
moredefs.append(('NPY_NO_SIGNAL', 1))
if is_npy_no_smp():
moredefs.append(('NPY_NO_SMP', 1))
else:
moredefs.append(('NPY_NO_SMP', 0))
mathlibs = check_mathlib(config_cmd)
moredefs.extend(cocache.check_ieee_macros(config_cmd)[1])
moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1])
if NPY_RELAXED_STRIDES_CHECKING:
moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))
if NPY_RELAXED_STRIDES_DEBUG:
moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1))
# Check wether we can use inttypes (C99) formats
if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']):
moredefs.append(('NPY_USE_C99_FORMATS', 1))
# visibility check
hidden_visibility = visibility_define(config_cmd)
moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility))
# Add the C API/ABI versions
moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION))
moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION))
# Add moredefs to header
target_f = open(target, 'w')
for d in moredefs:
if isinstance(d, str):
target_f.write('#define %s\n' % (d))
else:
target_f.write('#define %s %s\n' % (d[0], d[1]))
# Define __STDC_FORMAT_MACROS
target_f.write("""
#ifndef __STDC_FORMAT_MACROS
#define __STDC_FORMAT_MACROS 1
#endif
""")
target_f.close()
# Dump the numpyconfig.h header to stdout
print('File: %s' % target)
target_f = open(target)
print(target_f.read())
target_f.close()
print('EOF')
config.add_data_files((header_dir, target))
return target
def generate_api_func(module_name):
def generate_api(ext, build_dir):
script = join(codegen_dir, module_name + '.py')
sys.path.insert(0, codegen_dir)
try:
m = __import__(module_name)
log.info('executing %s', script)
h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir))
finally:
del sys.path[0]
config.add_data_files((header_dir, h_file),
(header_dir, doc_file))
return (h_file,)
return generate_api
generate_numpy_api = generate_api_func('generate_numpy_api')
generate_ufunc_api = generate_api_func('generate_ufunc_api')
config.add_include_dirs(join(local_dir, "src", "private"))
config.add_include_dirs(join(local_dir, "src"))
config.add_include_dirs(join(local_dir))
config.add_data_files('include/numpy/*.h')
config.add_include_dirs(join('src', 'npymath'))
config.add_include_dirs(join('src', 'multiarray'))
config.add_include_dirs(join('src', 'umath'))
config.add_include_dirs(join('src', 'npysort'))
config.add_define_macros([("NPY_INTERNAL_BUILD", "1")]) # this macro indicates that Numpy build is in process
config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")])
if sys.platform[:3] == "aix":
config.add_define_macros([("_LARGE_FILES", None)])
else:
config.add_define_macros([("_FILE_OFFSET_BITS", "64")])
config.add_define_macros([('_LARGEFILE_SOURCE', '1')])
config.add_define_macros([('_LARGEFILE64_SOURCE', '1')])
config.numpy_include_dirs.extend(config.paths('include'))
deps = [join('src', 'npymath', '_signbit.c'),
join('include', 'numpy', '*object.h'),
join(codegen_dir, 'genapi.py'),
]
#######################################################################
# dummy module #
#######################################################################
# npymath needs the config.h and numpyconfig.h files to be generated, but
# build_clib cannot handle generate_config_h and generate_numpyconfig_h
# (don't ask). Because clib are generated before extensions, we have to
# explicitly add an extension which has generate_config_h and
# generate_numpyconfig_h as sources *before* adding npymath.
config.add_extension('_dummy',
sources=[join('src', 'dummymodule.c'),
generate_config_h,
generate_numpyconfig_h,
generate_numpy_api]
)
#######################################################################
# npymath library #
#######################################################################
subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])
def get_mathlib_info(*args):
# Another ugly hack: the mathlib info is known once build_src is run,
# but we cannot use add_installed_pkg_config here either, so we only
# update the substition dictionary during npymath build
config_cmd = config.get_config_cmd()
# Check that the toolchain works, to fail early if it doesn't
# (avoid late errors with MATHLIB which are confusing if the
# compiler does not work).
st = config_cmd.try_link('int main(void) { return 0;}')
if not st:
raise RuntimeError("Broken toolchain: cannot link a simple C program")
mlibs = check_mathlib(config_cmd)
posix_mlib = ' '.join(['-l%s' % l for l in mlibs])
msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs])
subst_dict["posix_mathlib"] = posix_mlib
subst_dict["msvc_mathlib"] = msvc_mlib
npymath_sources = [join('src', 'npymath', 'npy_math_internal.h.src'),
join('src', 'npymath', 'npy_math.c'),
join('src', 'npymath', 'ieee754.c.src'),
join('src', 'npymath', 'npy_math_complex.c.src'),
join('src', 'npymath', 'halffloat.c')
]
config.add_installed_library('npymath',
sources=npymath_sources + [get_mathlib_info],
install_dir='lib',
build_info={'include_dirs' : []}) # empty list required for creating npy_math_internal.h
config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config",
subst_dict)
config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config",
subst_dict)
#######################################################################
# npysort library #
#######################################################################
# This library is created for the build but it is not installed
npysort_sources = [join('src', 'npysort', 'quicksort.c.src'),
join('src', 'npysort', 'mergesort.c.src'),
join('src', 'npysort', 'heapsort.c.src'),
join('src', 'private', 'npy_partition.h.src'),
join('src', 'npysort', 'selection.c.src'),
join('src', 'private', 'npy_binsearch.h.src'),
join('src', 'npysort', 'binsearch.c.src'),
]
config.add_library('npysort',
sources=npysort_sources,
include_dirs=[])
#######################################################################
# multiarray module #
#######################################################################
multiarray_deps = [
join('src', 'multiarray', 'arrayobject.h'),
join('src', 'multiarray', 'arraytypes.h'),
join('src', 'multiarray', 'array_assign.h'),
join('src', 'multiarray', 'buffer.h'),
join('src', 'multiarray', 'calculation.h'),
join('src', 'multiarray', 'cblasfuncs.h'),
join('src', 'multiarray', 'common.h'),
join('src', 'multiarray', 'convert_datatype.h'),
join('src', 'multiarray', 'convert.h'),
join('src', 'multiarray', 'conversion_utils.h'),
join('src', 'multiarray', 'ctors.h'),
join('src', 'multiarray', 'descriptor.h'),
join('src', 'multiarray', 'getset.h'),
join('src', 'multiarray', 'hashdescr.h'),
join('src', 'multiarray', 'iterators.h'),
join('src', 'multiarray', 'mapping.h'),
join('src', 'multiarray', 'methods.h'),
join('src', 'multiarray', 'multiarraymodule.h'),
join('src', 'multiarray', 'nditer_impl.h'),
join('src', 'multiarray', 'number.h'),
join('src', 'multiarray', 'numpyos.h'),
join('src', 'multiarray', 'refcount.h'),
join('src', 'multiarray', 'scalartypes.h'),
join('src', 'multiarray', 'sequence.h'),
join('src', 'multiarray', 'shape.h'),
join('src', 'multiarray', 'ucsnarrow.h'),
join('src', 'multiarray', 'usertypes.h'),
join('src', 'multiarray', 'vdot.h'),
join('src', 'private', 'npy_config.h'),
join('src', 'private', 'templ_common.h.src'),
join('src', 'private', 'lowlevel_strided_loops.h'),
join('src', 'private', 'mem_overlap.h'),
join('src', 'private', 'ufunc_override.h'),
join('src', 'private', 'binop_override.h'),
join('src', 'private', 'npy_extint128.h'),
join('include', 'numpy', 'arrayobject.h'),
join('include', 'numpy', '_neighborhood_iterator_imp.h'),
join('include', 'numpy', 'npy_endian.h'),
join('include', 'numpy', 'arrayscalars.h'),
join('include', 'numpy', 'noprefix.h'),
join('include', 'numpy', 'npy_interrupt.h'),
join('include', 'numpy', 'npy_3kcompat.h'),
join('include', 'numpy', 'npy_math.h'),
join('include', 'numpy', 'halffloat.h'),
join('include', 'numpy', 'npy_common.h'),
join('include', 'numpy', 'npy_os.h'),
join('include', 'numpy', 'utils.h'),
join('include', 'numpy', 'ndarrayobject.h'),
join('include', 'numpy', 'npy_cpu.h'),
join('include', 'numpy', 'numpyconfig.h'),
join('include', 'numpy', 'ndarraytypes.h'),
join('include', 'numpy', 'npy_1_7_deprecated_api.h'),
# add library sources as distuils does not consider libraries
# dependencies
] + npysort_sources + npymath_sources
multiarray_src = [
join('src', 'multiarray', 'alloc.c'),
join('src', 'multiarray', 'arrayobject.c'),
join('src', 'multiarray', 'arraytypes.c.src'),
join('src', 'multiarray', 'array_assign.c'),
join('src', 'multiarray', 'array_assign_scalar.c'),
join('src', 'multiarray', 'array_assign_array.c'),
join('src', 'multiarray', 'buffer.c'),
join('src', 'multiarray', 'calculation.c'),
join('src', 'multiarray', 'compiled_base.c'),
join('src', 'multiarray', 'common.c'),
join('src', 'multiarray', 'convert.c'),
join('src', 'multiarray', 'convert_datatype.c'),
join('src', 'multiarray', 'conversion_utils.c'),
join('src', 'multiarray', 'ctors.c'),
join('src', 'multiarray', 'datetime.c'),
join('src', 'multiarray', 'datetime_strings.c'),
join('src', 'multiarray', 'datetime_busday.c'),
join('src', 'multiarray', 'datetime_busdaycal.c'),
join('src', 'multiarray', 'descriptor.c'),
join('src', 'multiarray', 'dtype_transfer.c'),
join('src', 'multiarray', 'einsum.c.src'),
join('src', 'multiarray', 'flagsobject.c'),
join('src', 'multiarray', 'getset.c'),
join('src', 'multiarray', 'hashdescr.c'),
join('src', 'multiarray', 'item_selection.c'),
join('src', 'multiarray', 'iterators.c'),
join('src', 'multiarray', 'lowlevel_strided_loops.c.src'),
join('src', 'multiarray', 'mapping.c'),
join('src', 'multiarray', 'methods.c'),
join('src', 'multiarray', 'multiarraymodule.c'),
join('src', 'multiarray', 'nditer_templ.c.src'),
join('src', 'multiarray', 'nditer_api.c'),
join('src', 'multiarray', 'nditer_constr.c'),
join('src', 'multiarray', 'nditer_pywrap.c'),
join('src', 'multiarray', 'number.c'),
join('src', 'multiarray', 'numpyos.c'),
join('src', 'multiarray', 'refcount.c'),
join('src', 'multiarray', 'sequence.c'),
join('src', 'multiarray', 'shape.c'),
join('src', 'multiarray', 'scalarapi.c'),
join('src', 'multiarray', 'scalartypes.c.src'),
join('src', 'multiarray', 'temp_elide.c'),
join('src', 'multiarray', 'usertypes.c'),
join('src', 'multiarray', 'ucsnarrow.c'),
join('src', 'multiarray', 'vdot.c'),
join('src', 'private', 'templ_common.h.src'),
join('src', 'private', 'mem_overlap.c'),
join('src', 'private', 'ufunc_override.c'),
]
blas_info = get_info('blas_opt', 0)
if blas_info and ('HAVE_CBLAS', None) in blas_info.get('define_macros', []):
extra_info = blas_info
# These files are also in MANIFEST.in so that they are always in
# the source distribution independently of HAVE_CBLAS.
multiarray_src.extend([join('src', 'multiarray', 'cblasfuncs.c'),
join('src', 'multiarray', 'python_xerbla.c'),
])
if uses_accelerate_framework(blas_info):
multiarray_src.extend(get_sgemv_fix())
else:
extra_info = {}
config.add_extension('multiarray',
sources=multiarray_src +
[generate_config_h,
generate_numpyconfig_h,
generate_numpy_api,
join(codegen_dir, 'generate_numpy_api.py'),
join('*.py')],
depends=deps + multiarray_deps,
libraries=['npymath', 'npysort'],
extra_info=extra_info)
#######################################################################
# umath module #
#######################################################################
def generate_umath_c(ext, build_dir):
target = join(build_dir, header_dir, '__umath_generated.c')
dir = os.path.dirname(target)
if not os.path.exists(dir):
os.makedirs(dir)
script = generate_umath_py
if newer(script, target):
f = open(target, 'w')
f.write(generate_umath.make_code(generate_umath.defdict,
generate_umath.__file__))
f.close()
return []
umath_src = [
join('src', 'umath', 'umathmodule.c'),
join('src', 'umath', 'reduction.c'),
join('src', 'umath', 'funcs.inc.src'),
join('src', 'umath', 'simd.inc.src'),
join('src', 'umath', 'loops.h.src'),
join('src', 'umath', 'loops.c.src'),
join('src', 'umath', 'ufunc_object.c'),
join('src', 'umath', 'scalarmath.c.src'),
join('src', 'umath', 'ufunc_type_resolution.c'),
join('src', 'umath', 'override.c'),
join('src', 'private', 'mem_overlap.c'),
join('src', 'private', 'ufunc_override.c')]
umath_deps = [
generate_umath_py,
join('include', 'numpy', 'npy_math.h'),
join('include', 'numpy', 'halffloat.h'),
join('src', 'multiarray', 'common.h'),
join('src', 'private', 'templ_common.h.src'),
join('src', 'umath', 'simd.inc.src'),
join('src', 'umath', 'override.h'),
join(codegen_dir, 'generate_ufunc_api.py'),
join('src', 'private', 'lowlevel_strided_loops.h'),
join('src', 'private', 'mem_overlap.h'),
join('src', 'private', 'ufunc_override.h'),
join('src', 'private', 'binop_override.h')] + npymath_sources
config.add_extension('umath',
sources=umath_src +
[generate_config_h,
generate_numpyconfig_h,
generate_umath_c,
generate_ufunc_api],
depends=deps + umath_deps,
libraries=['npymath'],
)
#######################################################################
# umath_tests module #
#######################################################################
config.add_extension('umath_tests',
sources=[join('src', 'umath', 'umath_tests.c.src')])
#######################################################################
# custom rational dtype module #
#######################################################################
config.add_extension('test_rational',
sources=[join('src', 'umath', 'test_rational.c.src')])
#######################################################################
# struct_ufunc_test module #
#######################################################################
config.add_extension('struct_ufunc_test',
sources=[join('src', 'umath', 'struct_ufunc_test.c.src')])
#######################################################################
# multiarray_tests module #
#######################################################################
config.add_extension('multiarray_tests',
sources=[join('src', 'multiarray', 'multiarray_tests.c.src'),
join('src', 'private', 'mem_overlap.c')],
depends=[join('src', 'private', 'mem_overlap.h'),
join('src', 'private', 'npy_extint128.h')])
#######################################################################
# operand_flag_tests module #
#######################################################################
config.add_extension('operand_flag_tests',
sources=[join('src', 'umath', 'operand_flag_tests.c.src')])
config.add_data_dir('tests')
config.add_data_dir('tests/data')
config.make_svn_version_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
Yubico/python-u2flib-server
|
refs/heads/master
|
u2flib_server/attestation/data.py
|
1
|
YUBICO = {
"identifier": "2fb54029-7613-4f1d-94f1-fb876c14a6fe",
"version": 4,
"vendorInfo": {
"url": "https://yubico.com",
"imageUrl": "https://developers.yubico.com/U2F/Images/yubico.png",
"name": "Yubico"
},
"trustedCertificates": [
"-----BEGIN CERTIFICATE-----\nMIIDHjCCAgagAwIBAgIEG1BT9zANBgkqhkiG9w0BAQsFADAuMSwwKgYDVQQDEyNZ\ndWJpY28gVTJGIFJvb3QgQ0EgU2VyaWFsIDQ1NzIwMDYzMTAgFw0xNDA4MDEwMDAw\nMDBaGA8yMDUwMDkwNDAwMDAwMFowLjEsMCoGA1UEAxMjWXViaWNvIFUyRiBSb290\nIENBIFNlcmlhbCA0NTcyMDA2MzEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK\nAoIBAQC/jwYuhBVlqaiYWEMsrWFisgJ+PtM91eSrpI4TK7U53mwCIawSDHy8vUmk\n5N2KAj9abvT9NP5SMS1hQi3usxoYGonXQgfO6ZXyUA9a+KAkqdFnBnlyugSeCOep\n8EdZFfsaRFtMjkwz5Gcz2Py4vIYvCdMHPtwaz0bVuzneueIEz6TnQjE63Rdt2zbw\nnebwTG5ZybeWSwbzy+BJ34ZHcUhPAY89yJQXuE0IzMZFcEBbPNRbWECRKgjq//qT\n9nmDOFVlSRCt2wiqPSzluwn+v+suQEBsUjTGMEd25tKXXTkNW21wIWbxeSyUoTXw\nLvGS6xlwQSgNpk2qXYwf8iXg7VWZAgMBAAGjQjBAMB0GA1UdDgQWBBQgIvz0bNGJ\nhjgpToksyKpP9xv9oDAPBgNVHRMECDAGAQH/AgEAMA4GA1UdDwEB/wQEAwIBBjAN\nBgkqhkiG9w0BAQsFAAOCAQEAjvjuOMDSa+JXFCLyBKsycXtBVZsJ4Ue3LbaEsPY4\nMYN/hIQ5ZM5p7EjfcnMG4CtYkNsfNHc0AhBLdq45rnT87q/6O3vUEtNMafbhU6kt\nhX7Y+9XFN9NpmYxr+ekVY5xOxi8h9JDIgoMP4VB1uS0aunL1IGqrNooL9mmFnL2k\nLVVee6/VR6C5+KSTCMCWppMuJIZII2v9o4dkoZ8Y7QRjQlLfYzd3qGtKbw7xaF1U\nsG/5xUb/Btwb2X2g4InpiB/yt/3CpQXpiWX/K4mBvUKiGn05ZsqeY1gx4g0xLBqc\nU9psmyPzK+Vsgw2jeRQ5JlKDyqE0hebfC1tvFu0CCrJFcw==\n-----END CERTIFICATE-----"
],
"devices": [
{
"deviceId": "1.3.6.1.4.1.41482.1.1",
"displayName": "Security Key by Yubico",
"transports": 4,
"deviceUrl": "https://www.yubico.com/products/yubikey-hardware/fido-u2f-security-key/",
"imageUrl": "https://developers.yubico.com/U2F/Images/SKY.png",
"selectors": [
{
"type": "x509Extension",
"parameters": {
"key": "1.3.6.1.4.1.41482.1.1"
}
},
{
"type": "x509Extension",
"parameters": {
"value": "1.3.6.1.4.1.41482.1.1",
"key": "1.3.6.1.4.1.41482.2"
}
}
]
},
{
"deviceId": "1.3.6.1.4.1.41482.1.2",
"displayName": "YubiKey NEO/NEO-n",
"transports": 4,
"deviceUrl": "https://www.yubico.com/products/yubikey-hardware/yubikey-neo/",
"imageUrl": "https://developers.yubico.com/U2F/Images/NEO.png",
"selectors": [
{
"type": "x509Extension",
"parameters": {
"key": "1.3.6.1.4.1.41482.1.2"
}
},
{
"type": "x509Extension",
"parameters": {
"value": "1.3.6.1.4.1.41482.1.2",
"key": "1.3.6.1.4.1.41482.2"
}
}
]
},
{
"deviceId": "1.3.6.1.4.1.41482.1.3",
"displayName": "YubiKey Plus",
"transports": 4,
"deviceUrl": "https://www.yubico.com/products/yubikey-hardware/",
"imageUrl": "https://developers.yubico.com/U2F/Images/PLS.png",
"selectors": [
{
"type": "x509Extension",
"parameters": {
"key": "1.3.6.1.4.1.41482.1.3"
}
},
{
"type": "x509Extension",
"parameters": {
"value": "1.3.6.1.4.1.41482.1.3",
"key": "1.3.6.1.4.1.41482.2"
}
}
]
},
{
"deviceId": "1.3.6.1.4.1.41482.1.4",
"displayName": "YubiKey Edge",
"transports": 4,
"deviceUrl": "https://www.yubico.com/products/yubikey-hardware/",
"imageUrl": "https://developers.yubico.com/U2F/Images/YKE.png",
"selectors": [
{
"type": "x509Extension",
"parameters": {
"value": "1.3.6.1.4.1.41482.1.4",
"key": "1.3.6.1.4.1.41482.2"
}
}
]
},
{
"deviceId": "1.3.6.1.4.1.41482.1.5",
"displayName": "YubiKey 4/YubiKey 4 Nano",
"transports": 4,
"deviceUrl": "https://www.yubico.com/products/yubikey-hardware/yubikey4/",
"imageUrl": "https://developers.yubico.com/U2F/Images/YK4.png",
"selectors": [
{
"type": "x509Extension",
"parameters": {
"value": "1.3.6.1.4.1.41482.1.5",
"key": "1.3.6.1.4.1.41482.2"
}
}
]
}
]
}
|
mlperf/inference_results_v0.7
|
refs/heads/master
|
closed/QCT/code/dlrm/tensorrt/infer.py
|
18
|
#! /usr/bin/env python3
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, sys
import ctypes
sys.path.insert(0, os.getcwd())
# The plugin .so file has to be loaded at global scope and before `import torch` to avoid cuda version mismatch.
DLRM_INTERACTIONS_PLUGIN_LIBRARY="build/plugins/DLRMInteractionsPlugin/libdlrminteractionsplugin.so"
if not os.path.isfile(DLRM_INTERACTIONS_PLUGIN_LIBRARY):
raise IOError("{}\n{}\n".format(
"Failed to load library ({}).".format(DLRM_INTERACTIONS_PLUGIN_LIBRARY),
"Please build the DLRM Interactions plugin."
))
ctypes.CDLL(DLRM_INTERACTIONS_PLUGIN_LIBRARY)
DLRM_BOTTOM_MLP_PLUGIN_LIBRARY="build/plugins/DLRMBottomMLPPlugin/libdlrmbottommlpplugin.so"
if not os.path.isfile(DLRM_BOTTOM_MLP_PLUGIN_LIBRARY):
raise IOError("{}\n{}\n".format(
"Failed to load library ({}).".format(DLRM_BOTTOM_MLP_PLUGIN_LIBRARY),
"Please build the DLRM Bottom MLP plugin."
))
ctypes.CDLL(DLRM_BOTTOM_MLP_PLUGIN_LIBRARY)
from code.common.runner import EngineRunner, get_input_format
from code.common import logging
import code.common.arguments as common_args
import json
import numpy as np
from sklearn.metrics import roc_auc_score
import tensorrt as trt
import torch
import time
def evaluate(ground_truths, predictions):
assert len(ground_truths) == len(predictions), "Number of ground truths are different from number of predictions"
return roc_auc_score(ground_truths, predictions)
def run_dlrm_accuracy(engine_file, batch_size, num_pairs=10000000, verbose=False):
if verbose:
logging.info("Running DLRM accuracy test with:")
logging.info(" engine_file: {:}".format(engine_file))
logging.info(" batch_size: {:}".format(batch_size))
logging.info(" num_pairs: {:}".format(num_pairs))
runner = EngineRunner(engine_file, verbose=verbose)
pair_dir = os.path.join(os.getenv("PREPROCESSED_DATA_DIR", "build/preprocessed_data"), "criteo", "full_recalib")
input_dtype, input_format = get_input_format(runner.engine)
if input_dtype == trt.DataType.FLOAT:
format_string = "fp32"
elif input_dtype == trt.DataType.HALF:
format_string = "fp16"
elif input_dtype == trt.DataType.INT8:
format_string = "int8"
if input_format == trt.TensorFormat.CHW4:
format_string += "_chw4"
else:
raise NotImplementedError("Unsupported DataType {:}".format(input_dtype))
numerical_inputs = np.load(os.path.join(pair_dir, "numeric_{:}.npy".format(format_string)))
categ_inputs = np.load(os.path.join(pair_dir, "categorical_int32.npy"))
predictions = []
refs = []
batch_idx = 0
for pair_idx in range(0, int(num_pairs), batch_size):
actual_batch_size = batch_size if pair_idx + batch_size <= num_pairs else num_pairs - pair_idx
numerical_input = np.ascontiguousarray(numerical_inputs[pair_idx:pair_idx + actual_batch_size])
categ_input = np.ascontiguousarray(categ_inputs[pair_idx:pair_idx + actual_batch_size])
start_time = time.time()
outputs = runner([numerical_input, categ_input], actual_batch_size)
if verbose:
logging.info("Batch {:d} (Size {:}) >> Inference time: {:f}".format(batch_idx, actual_batch_size, time.time() - start_time))
predictions.extend(outputs[0][:actual_batch_size])
batch_idx += 1
ground_truths = np.load(os.path.join(pair_dir, "ground_truth.npy"))[:num_pairs].tolist()
return evaluate(ground_truths, predictions)
def main():
args = common_args.parse_args(common_args.ACCURACY_ARGS)
logging.info("Running accuracy test...")
acc = run_dlrm_accuracy(args["engine_file"], args["batch_size"], args["num_samples"],
verbose=args["verbose"])
logging.info("Accuracy: {:}".format(acc))
if __name__ == "__main__":
main()
|
gangadharkadam/johnerp
|
refs/heads/develop
|
erpnext/accounts/page/accounts_browser/accounts_browser.py
|
34
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import frappe.defaults
from frappe.utils import flt
from erpnext.accounts.utils import get_balance_on
@frappe.whitelist()
def get_companies():
"""get a list of companies based on permission"""
return [d.name for d in frappe.get_list("Company", fields=["name"],
order_by="name")]
@frappe.whitelist()
def get_children():
args = frappe.local.form_dict
ctype, company = args['ctype'], args['comp']
# root
if args['parent'] in ("Accounts", "Cost Centers"):
acc = frappe.db.sql(""" select
name as value, if(group_or_ledger='Group', 1, 0) as expandable
from `tab%s`
where ifnull(parent_%s,'') = ''
and `company` = %s and docstatus<2
order by name""" % (ctype, ctype.lower().replace(' ','_'), '%s'),
company, as_dict=1)
else:
# other
acc = frappe.db.sql("""select
name as value, if(group_or_ledger='Group', 1, 0) as expandable
from `tab%s`
where ifnull(parent_%s,'') = %s
and docstatus<2
order by name""" % (ctype, ctype.lower().replace(' ','_'), '%s'),
args['parent'], as_dict=1)
if ctype == 'Account':
currency = frappe.db.sql("select default_currency from `tabCompany` where name = %s", company)[0][0]
for each in acc:
bal = get_balance_on(each.get("value"))
each["currency"] = currency
each["balance"] = flt(bal)
return acc
|
qedsoftware/commcare-hq
|
refs/heads/master
|
corehq/apps/smsbillables/migrations/0013_auto_20160826_1531.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('smsbillables', '0012_remove_max_length'),
]
operations = [
migrations.AlterField(
model_name='smsbillable',
name='domain',
field=models.CharField(max_length=100, db_index=True),
preserve_default=True,
),
]
|
xueyumusic/pynacl
|
refs/heads/master
|
src/nacl/bindings/crypto_sign.py
|
17
|
# Copyright 2013 Donald Stufft and individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from nacl._sodium import ffi, lib
from nacl.exceptions import BadSignatureError
crypto_sign_BYTES = lib.crypto_sign_bytes()
# crypto_sign_SEEDBYTES = lib.crypto_sign_seedbytes()
crypto_sign_SEEDBYTES = lib.crypto_sign_secretkeybytes() // 2
crypto_sign_PUBLICKEYBYTES = lib.crypto_sign_publickeybytes()
crypto_sign_SECRETKEYBYTES = lib.crypto_sign_secretkeybytes()
crypto_sign_curve25519_BYTES = lib.crypto_box_secretkeybytes()
def crypto_sign_keypair():
"""
Returns a randomly generated public key and secret key.
:rtype: (bytes(public_key), bytes(secret_key))
"""
pk = ffi.new("unsigned char[]", crypto_sign_PUBLICKEYBYTES)
sk = ffi.new("unsigned char[]", crypto_sign_SECRETKEYBYTES)
rc = lib.crypto_sign_keypair(pk, sk)
assert rc == 0
return (
ffi.buffer(pk, crypto_sign_PUBLICKEYBYTES)[:],
ffi.buffer(sk, crypto_sign_SECRETKEYBYTES)[:],
)
def crypto_sign_seed_keypair(seed):
"""
Computes and returns the public key and secret key using the seed ``seed``.
:param seed: bytes
:rtype: (bytes(public_key), bytes(secret_key))
"""
if len(seed) != crypto_sign_SEEDBYTES:
raise ValueError("Invalid seed")
pk = ffi.new("unsigned char[]", crypto_sign_PUBLICKEYBYTES)
sk = ffi.new("unsigned char[]", crypto_sign_SECRETKEYBYTES)
rc = lib.crypto_sign_seed_keypair(pk, sk, seed)
assert rc == 0
return (
ffi.buffer(pk, crypto_sign_PUBLICKEYBYTES)[:],
ffi.buffer(sk, crypto_sign_SECRETKEYBYTES)[:],
)
def crypto_sign(message, sk):
"""
Signs the message ``message`` using the secret key ``sk`` and returns the
signed message.
:param message: bytes
:param sk: bytes
:rtype: bytes
"""
signed = ffi.new("unsigned char[]", len(message) + crypto_sign_BYTES)
signed_len = ffi.new("unsigned long long *")
rc = lib.crypto_sign(signed, signed_len, message, len(message), sk)
assert rc == 0
return ffi.buffer(signed, signed_len[0])[:]
def crypto_sign_open(signed, pk):
"""
Verifies the signature of the signed message ``signed`` using the public
key ``pk`` and returns the unsigned message.
:param signed: bytes
:param pk: bytes
:rtype: bytes
"""
message = ffi.new("unsigned char[]", len(signed))
message_len = ffi.new("unsigned long long *")
if lib.crypto_sign_open(
message, message_len, signed, len(signed), pk) != 0:
raise BadSignatureError("Signature was forged or corrupt")
return ffi.buffer(message, message_len[0])[:]
def crypto_sign_ed25519_pk_to_curve25519(public_key_bytes):
"""
Converts a public Ed25519 key (encoded as bytes ``public_key_bytes``) to
a public Curve25519 key as bytes.
Raises a ValueError if ``public_key_bytes`` is not of length
``crypto_sign_PUBLICKEYBYTES``
:param public_key_bytes: bytes
:rtype: bytes
"""
if len(public_key_bytes) != crypto_sign_PUBLICKEYBYTES:
raise ValueError("Invalid curve public key")
curve_public_key_len = crypto_sign_curve25519_BYTES
curve_public_key = ffi.new("unsigned char[]", curve_public_key_len)
rc = lib.crypto_sign_ed25519_pk_to_curve25519(curve_public_key,
public_key_bytes)
assert rc == 0
return ffi.buffer(curve_public_key, curve_public_key_len)[:]
def crypto_sign_ed25519_sk_to_curve25519(secret_key_bytes):
"""
Converts a secret Ed25519 key (encoded as bytes ``secret_key_bytes``) to
a secret Curve25519 key as bytes.
Raises a ValueError if ``secret_key_bytes``is not of length
``crypto_sign_SECRETKEYBYTES``
:param public_key_bytes: bytes
:rtype: bytes
"""
if len(secret_key_bytes) != crypto_sign_SECRETKEYBYTES:
raise ValueError("Invalid curve public key")
curve_secret_key_len = crypto_sign_curve25519_BYTES
curve_secret_key = ffi.new("unsigned char[]", curve_secret_key_len)
rc = lib.crypto_sign_ed25519_sk_to_curve25519(curve_secret_key,
secret_key_bytes)
assert rc == 0
return ffi.buffer(curve_secret_key, curve_secret_key_len)[:]
|
sadatay/beets
|
refs/heads/master
|
beets/mediafile.py
|
9
|
# This file is part of beets.
# Copyright 2015, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Handles low-level interfacing for files' tags. Wraps Mutagen to
automatically detect file types and provide a unified interface for a
useful subset of music files' tags.
Usage:
>>> f = MediaFile('Lucy.mp3')
>>> f.title
u'Lucy in the Sky with Diamonds'
>>> f.artist = 'The Beatles'
>>> f.save()
A field will always return a reasonable value of the correct type, even
if no tag is present. If no value is available, the value will be false
(e.g., zero or the empty string).
Internally ``MediaFile`` uses ``MediaField`` descriptors to access the
data from the tags. In turn ``MediaField`` uses a number of
``StorageStyle`` strategies to handle format specific logic.
"""
from __future__ import (division, absolute_import, print_function,
unicode_literals)
import mutagen
import mutagen.mp3
import mutagen.oggopus
import mutagen.oggvorbis
import mutagen.mp4
import mutagen.flac
import mutagen.monkeysaudio
import mutagen.asf
import mutagen.aiff
import datetime
import re
import base64
import math
import struct
import imghdr
import os
import traceback
import enum
from beets import logging
from beets.util import displayable_path, syspath
__all__ = ['UnreadableFileError', 'FileTypeError', 'MediaFile']
log = logging.getLogger('beets')
# Human-readable type names.
TYPES = {
'mp3': 'MP3',
'aac': 'AAC',
'alac': 'ALAC',
'ogg': 'OGG',
'opus': 'Opus',
'flac': 'FLAC',
'ape': 'APE',
'wv': 'WavPack',
'mpc': 'Musepack',
'asf': 'Windows Media',
'aiff': 'AIFF',
}
# Exceptions.
class UnreadableFileError(Exception):
"""Mutagen is not able to extract information from the file.
"""
def __init__(self, path):
Exception.__init__(self, displayable_path(path))
class FileTypeError(UnreadableFileError):
"""Reading this type of file is not supported.
If passed the `mutagen_type` argument this indicates that the
mutagen type is not supported by `Mediafile`.
"""
def __init__(self, path, mutagen_type=None):
path = displayable_path(path)
if mutagen_type is None:
msg = path
else:
msg = u'{0}: of mutagen type {1}'.format(path, mutagen_type)
Exception.__init__(self, msg)
class MutagenError(UnreadableFileError):
"""Raised when Mutagen fails unexpectedly---probably due to a bug.
"""
def __init__(self, path, mutagen_exc):
msg = u'{0}: {1}'.format(displayable_path(path), mutagen_exc)
Exception.__init__(self, msg)
# Utility.
def _safe_cast(out_type, val):
"""Try to covert val to out_type but never raise an exception. If
the value can't be converted, then a sensible default value is
returned. out_type should be bool, int, or unicode; otherwise, the
value is just passed through.
"""
if val is None:
return None
if out_type == int:
if isinstance(val, int) or isinstance(val, float):
# Just a number.
return int(val)
else:
# Process any other type as a string.
if not isinstance(val, basestring):
val = unicode(val)
# Get a number from the front of the string.
val = re.match(r'[0-9]*', val.strip()).group(0)
if not val:
return 0
else:
return int(val)
elif out_type == bool:
try:
# Should work for strings, bools, ints:
return bool(int(val))
except ValueError:
return False
elif out_type == unicode:
if isinstance(val, bytes):
return val.decode('utf8', 'ignore')
elif isinstance(val, unicode):
return val
else:
return unicode(val)
elif out_type == float:
if isinstance(val, int) or isinstance(val, float):
return float(val)
else:
if not isinstance(val, basestring):
val = unicode(val)
match = re.match(r'[\+-]?[0-9\.]+', val.strip())
if match:
val = match.group(0)
if val:
return float(val)
return 0.0
else:
return val
# Image coding for ASF/WMA.
def _unpack_asf_image(data):
"""Unpack image data from a WM/Picture tag. Return a tuple
containing the MIME type, the raw image data, a type indicator, and
the image's description.
This function is treated as "untrusted" and could throw all manner
of exceptions (out-of-bounds, etc.). We should clean this up
sometime so that the failure modes are well-defined.
"""
type, size = struct.unpack_from(b'<bi', data)
pos = 5
mime = ""
while data[pos:pos + 2] != b'\x00\x00':
mime += data[pos:pos + 2]
pos += 2
pos += 2
description = ""
while data[pos:pos + 2] != b'\x00\x00':
description += data[pos:pos + 2]
pos += 2
pos += 2
image_data = data[pos:pos + size]
return (mime.decode("utf-16-le"), image_data, type,
description.decode("utf-16-le"))
def _pack_asf_image(mime, data, type=3, description=""):
"""Pack image data for a WM/Picture tag.
"""
tag_data = struct.pack(b'<bi', type, len(data))
tag_data += mime.encode("utf-16-le") + b'\x00\x00'
tag_data += description.encode("utf-16-le") + b'\x00\x00'
tag_data += data
return tag_data
# iTunes Sound Check encoding.
def _sc_decode(soundcheck):
"""Convert a Sound Check string value to a (gain, peak) tuple as
used by ReplayGain.
"""
# SoundCheck tags consist of 10 numbers, each represented by 8
# characters of ASCII hex preceded by a space.
try:
soundcheck = soundcheck.replace(' ', '').decode('hex')
soundcheck = struct.unpack(b'!iiiiiiiiii', soundcheck)
except (struct.error, TypeError, UnicodeEncodeError):
# SoundCheck isn't in the format we expect, so return default
# values.
return 0.0, 0.0
# SoundCheck stores absolute calculated/measured RMS value in an
# unknown unit. We need to find the ratio of this measurement
# compared to a reference value of 1000 to get our gain in dB. We
# play it safe by using the larger of the two values (i.e., the most
# attenuation).
maxgain = max(soundcheck[:2])
if maxgain > 0:
gain = math.log10(maxgain / 1000.0) * -10
else:
# Invalid gain value found.
gain = 0.0
# SoundCheck stores peak values as the actual value of the sample,
# and again separately for the left and right channels. We need to
# convert this to a percentage of full scale, which is 32768 for a
# 16 bit sample. Once again, we play it safe by using the larger of
# the two values.
peak = max(soundcheck[6:8]) / 32768.0
return round(gain, 2), round(peak, 6)
def _sc_encode(gain, peak):
"""Encode ReplayGain gain/peak values as a Sound Check string.
"""
# SoundCheck stores the peak value as the actual value of the
# sample, rather than the percentage of full scale that RG uses, so
# we do a simple conversion assuming 16 bit samples.
peak *= 32768.0
# SoundCheck stores absolute RMS values in some unknown units rather
# than the dB values RG uses. We can calculate these absolute values
# from the gain ratio using a reference value of 1000 units. We also
# enforce the maximum value here, which is equivalent to about
# -18.2dB.
g1 = min(round((10 ** (gain / -10)) * 1000), 65534)
# Same as above, except our reference level is 2500 units.
g2 = min(round((10 ** (gain / -10)) * 2500), 65534)
# The purpose of these values are unknown, but they also seem to be
# unused so we just use zero.
uk = 0
values = (g1, g1, g2, g2, uk, uk, peak, peak, uk, uk)
return (u' %08X' * 10) % values
# Cover art and other images.
def _wider_test_jpeg(data):
"""Test for a jpeg file following the UNIX file implementation which
uses the magic bytes rather than just looking for the bytes b'JFIF'
or b'EXIF' at a fixed position.
"""
if data[:2] == b'\xff\xd8':
return 'jpeg'
def _image_mime_type(data):
"""Return the MIME type of the image data (a bytestring).
"""
# This checks for a jpeg file with only the magic bytes (unrecognized by
# imghdr.what). imghdr.what returns none for that type of file, so
# _wider_test_jpeg is run in that case. It still returns None if it didn't
# match such a jpeg file.
kind = imghdr.what(None, h=data) or _wider_test_jpeg(data)
if kind in ['gif', 'jpeg', 'png', 'tiff', 'bmp']:
return 'image/{0}'.format(kind)
elif kind == 'pgm':
return 'image/x-portable-graymap'
elif kind == 'pbm':
return 'image/x-portable-bitmap'
elif kind == 'ppm':
return 'image/x-portable-pixmap'
elif kind == 'xbm':
return 'image/x-xbitmap'
else:
return 'image/x-{0}'.format(kind)
class ImageType(enum.Enum):
"""Indicates the kind of an `Image` stored in a file's tag.
"""
other = 0
icon = 1
other_icon = 2
front = 3
back = 4
leaflet = 5
media = 6
lead_artist = 7
artist = 8
conductor = 9
group = 10
composer = 11
lyricist = 12
recording_location = 13
recording_session = 14
performance = 15
screen_capture = 16
fish = 17
illustration = 18
artist_logo = 19
publisher_logo = 20
class Image(object):
"""Structure representing image data and metadata that can be
stored and retrieved from tags.
The structure has four properties.
* ``data`` The binary data of the image
* ``desc`` An optional description of the image
* ``type`` An instance of `ImageType` indicating the kind of image
* ``mime_type`` Read-only property that contains the mime type of
the binary data
"""
def __init__(self, data, desc=None, type=None):
self.data = data
self.desc = desc
if isinstance(type, int):
try:
type = list(ImageType)[type]
except IndexError:
log.debug(u"ignoring unknown image type index {0}", type)
type = ImageType.other
self.type = type
@property
def mime_type(self):
if self.data:
return _image_mime_type(self.data)
@property
def type_index(self):
if self.type is None:
# This method is used when a tag format requires the type
# index to be set, so we return "other" as the default value.
return 0
return self.type.value
# StorageStyle classes describe strategies for accessing values in
# Mutagen file objects.
class StorageStyle(object):
"""A strategy for storing a value for a certain tag format (or set
of tag formats). This basic StorageStyle describes simple 1:1
mapping from raw values to keys in a Mutagen file object; subclasses
describe more sophisticated translations or format-specific access
strategies.
MediaFile uses a StorageStyle via three methods: ``get()``,
``set()``, and ``delete()``. It passes a Mutagen file object to
each.
Internally, the StorageStyle implements ``get()`` and ``set()``
using two steps that may be overridden by subtypes. To get a value,
the StorageStyle first calls ``fetch()`` to retrieve the value
corresponding to a key and then ``deserialize()`` to convert the raw
Mutagen value to a consumable Python value. Similarly, to set a
field, we call ``serialize()`` to encode the value and then
``store()`` to assign the result into the Mutagen object.
Each StorageStyle type has a class-level `formats` attribute that is
a list of strings indicating the formats that the style applies to.
MediaFile only uses StorageStyles that apply to the correct type for
a given audio file.
"""
formats = ['FLAC', 'OggOpus', 'OggTheora', 'OggSpeex', 'OggVorbis',
'OggFlac', 'APEv2File', 'WavPack', 'Musepack', 'MonkeysAudio']
"""List of mutagen classes the StorageStyle can handle.
"""
def __init__(self, key, as_type=unicode, suffix=None, float_places=2):
"""Create a basic storage strategy. Parameters:
- `key`: The key on the Mutagen file object used to access the
field's data.
- `as_type`: The Python type that the value is stored as
internally (`unicode`, `int`, `bool`, or `bytes`).
- `suffix`: When `as_type` is a string type, append this before
storing the value.
- `float_places`: When the value is a floating-point number and
encoded as a string, the number of digits to store after the
decimal point.
"""
self.key = key
self.as_type = as_type
self.suffix = suffix
self.float_places = float_places
# Convert suffix to correct string type.
if self.suffix and self.as_type is unicode \
and not isinstance(self.suffix, unicode):
self.suffix = self.suffix.decode('utf8')
# Getter.
def get(self, mutagen_file):
"""Get the value for the field using this style.
"""
return self.deserialize(self.fetch(mutagen_file))
def fetch(self, mutagen_file):
"""Retrieve the raw value of for this tag from the Mutagen file
object.
"""
try:
return mutagen_file[self.key][0]
except (KeyError, IndexError):
return None
def deserialize(self, mutagen_value):
"""Given a raw value stored on a Mutagen object, decode and
return the represented value.
"""
if self.suffix and isinstance(mutagen_value, unicode) \
and mutagen_value.endswith(self.suffix):
return mutagen_value[:-len(self.suffix)]
else:
return mutagen_value
# Setter.
def set(self, mutagen_file, value):
"""Assign the value for the field using this style.
"""
self.store(mutagen_file, self.serialize(value))
def store(self, mutagen_file, value):
"""Store a serialized value in the Mutagen file object.
"""
mutagen_file[self.key] = [value]
def serialize(self, value):
"""Convert the external Python value to a type that is suitable for
storing in a Mutagen file object.
"""
if isinstance(value, float) and self.as_type is unicode:
value = u'{0:.{1}f}'.format(value, self.float_places)
value = self.as_type(value)
elif self.as_type is unicode:
if isinstance(value, bool):
# Store bools as 1/0 instead of True/False.
value = unicode(int(bool(value)))
elif isinstance(value, bytes):
value = value.decode('utf8', 'ignore')
else:
value = unicode(value)
else:
value = self.as_type(value)
if self.suffix:
value += self.suffix
return value
def delete(self, mutagen_file):
"""Remove the tag from the file.
"""
if self.key in mutagen_file:
del mutagen_file[self.key]
class ListStorageStyle(StorageStyle):
"""Abstract storage style that provides access to lists.
The ListMediaField descriptor uses a ListStorageStyle via two
methods: ``get_list()`` and ``set_list()``. It passes a Mutagen file
object to each.
Subclasses may overwrite ``fetch`` and ``store``. ``fetch`` must
return a (possibly empty) list and ``store`` receives a serialized
list of values as the second argument.
The `serialize` and `deserialize` methods (from the base
`StorageStyle`) are still called with individual values. This class
handles packing and unpacking the values into lists.
"""
def get(self, mutagen_file):
"""Get the first value in the field's value list.
"""
try:
return self.get_list(mutagen_file)[0]
except IndexError:
return None
def get_list(self, mutagen_file):
"""Get a list of all values for the field using this style.
"""
return [self.deserialize(item) for item in self.fetch(mutagen_file)]
def fetch(self, mutagen_file):
"""Get the list of raw (serialized) values.
"""
try:
return mutagen_file[self.key]
except KeyError:
return []
def set(self, mutagen_file, value):
"""Set an individual value as the only value for the field using
this style.
"""
self.set_list(mutagen_file, [value])
def set_list(self, mutagen_file, values):
"""Set all values for the field using this style. `values`
should be an iterable.
"""
self.store(mutagen_file, [self.serialize(value) for value in values])
def store(self, mutagen_file, values):
"""Set the list of all raw (serialized) values for this field.
"""
mutagen_file[self.key] = values
class SoundCheckStorageStyleMixin(object):
"""A mixin for storage styles that read and write iTunes SoundCheck
analysis values. The object must have an `index` field that
indicates which half of the gain/peak pair---0 or 1---the field
represents.
"""
def get(self, mutagen_file):
data = self.fetch(mutagen_file)
if data is not None:
return _sc_decode(data)[self.index]
def set(self, mutagen_file, value):
data = self.fetch(mutagen_file)
if data is None:
gain_peak = [0, 0]
else:
gain_peak = list(_sc_decode(data))
gain_peak[self.index] = value or 0
data = self.serialize(_sc_encode(*gain_peak))
self.store(mutagen_file, data)
class ASFStorageStyle(ListStorageStyle):
"""A general storage style for Windows Media/ASF files.
"""
formats = ['ASF']
def deserialize(self, data):
if isinstance(data, mutagen.asf.ASFBaseAttribute):
data = data.value
return data
class MP4StorageStyle(StorageStyle):
"""A general storage style for MPEG-4 tags.
"""
formats = ['MP4']
def serialize(self, value):
value = super(MP4StorageStyle, self).serialize(value)
if self.key.startswith(b'----:') and isinstance(value, unicode):
value = value.encode('utf8')
return value
class MP4TupleStorageStyle(MP4StorageStyle):
"""A style for storing values as part of a pair of numbers in an
MPEG-4 file.
"""
def __init__(self, key, index=0, **kwargs):
super(MP4TupleStorageStyle, self).__init__(key, **kwargs)
self.index = index
def deserialize(self, mutagen_value):
items = mutagen_value or []
packing_length = 2
return list(items) + [0] * (packing_length - len(items))
def get(self, mutagen_file):
value = super(MP4TupleStorageStyle, self).get(mutagen_file)[self.index]
if value == 0:
# The values are always present and saved as integers. So we
# assume that "0" indicates it is not set.
return None
else:
return value
def set(self, mutagen_file, value):
if value is None:
value = 0
items = self.deserialize(self.fetch(mutagen_file))
items[self.index] = int(value)
self.store(mutagen_file, items)
def delete(self, mutagen_file):
if self.index == 0:
super(MP4TupleStorageStyle, self).delete(mutagen_file)
else:
self.set(mutagen_file, None)
class MP4ListStorageStyle(ListStorageStyle, MP4StorageStyle):
pass
class MP4SoundCheckStorageStyle(SoundCheckStorageStyleMixin, MP4StorageStyle):
def __init__(self, key, index=0, **kwargs):
super(MP4SoundCheckStorageStyle, self).__init__(key, **kwargs)
self.index = index
class MP4BoolStorageStyle(MP4StorageStyle):
"""A style for booleans in MPEG-4 files. (MPEG-4 has an atom type
specifically for representing booleans.)
"""
def get(self, mutagen_file):
try:
return mutagen_file[self.key]
except KeyError:
return None
def get_list(self, mutagen_file):
raise NotImplementedError('MP4 bool storage does not support lists')
def set(self, mutagen_file, value):
mutagen_file[self.key] = value
def set_list(self, mutagen_file, values):
raise NotImplementedError('MP4 bool storage does not support lists')
class MP4ImageStorageStyle(MP4ListStorageStyle):
"""Store images as MPEG-4 image atoms. Values are `Image` objects.
"""
def __init__(self, **kwargs):
super(MP4ImageStorageStyle, self).__init__(key=b'covr', **kwargs)
def deserialize(self, data):
return Image(data)
def serialize(self, image):
if image.mime_type == 'image/png':
kind = mutagen.mp4.MP4Cover.FORMAT_PNG
elif image.mime_type == 'image/jpeg':
kind = mutagen.mp4.MP4Cover.FORMAT_JPEG
else:
raise ValueError('MP4 files only supports PNG and JPEG images')
return mutagen.mp4.MP4Cover(image.data, kind)
class MP3StorageStyle(StorageStyle):
"""Store data in ID3 frames.
"""
formats = ['MP3', 'AIFF']
def __init__(self, key, id3_lang=None, **kwargs):
"""Create a new ID3 storage style. `id3_lang` is the value for
the language field of newly created frames.
"""
self.id3_lang = id3_lang
super(MP3StorageStyle, self).__init__(key, **kwargs)
def fetch(self, mutagen_file):
try:
return mutagen_file[self.key].text[0]
except (KeyError, IndexError):
return None
def store(self, mutagen_file, value):
frame = mutagen.id3.Frames[self.key](encoding=3, text=[value])
mutagen_file.tags.setall(self.key, [frame])
class MP3ListStorageStyle(ListStorageStyle, MP3StorageStyle):
"""Store lists of data in multiple ID3 frames.
"""
def fetch(self, mutagen_file):
try:
return mutagen_file[self.key].text
except KeyError:
return []
def store(self, mutagen_file, values):
frame = mutagen.id3.Frames[self.key](encoding=3, text=values)
mutagen_file.tags.setall(self.key, [frame])
class MP3UFIDStorageStyle(MP3StorageStyle):
"""Store data in a UFID ID3 frame with a particular owner.
"""
def __init__(self, owner, **kwargs):
self.owner = owner
super(MP3UFIDStorageStyle, self).__init__('UFID:' + owner, **kwargs)
def fetch(self, mutagen_file):
try:
return mutagen_file[self.key].data
except KeyError:
return None
def store(self, mutagen_file, value):
frames = mutagen_file.tags.getall(self.key)
for frame in frames:
# Replace existing frame data.
if frame.owner == self.owner:
frame.data = value
else:
# New frame.
frame = mutagen.id3.UFID(owner=self.owner, data=value)
mutagen_file.tags.setall(self.key, [frame])
class MP3DescStorageStyle(MP3StorageStyle):
"""Store data in a TXXX (or similar) ID3 frame. The frame is
selected based its ``desc`` field.
"""
def __init__(self, desc=u'', key='TXXX', **kwargs):
self.description = desc
super(MP3DescStorageStyle, self).__init__(key=key, **kwargs)
def store(self, mutagen_file, value):
frames = mutagen_file.tags.getall(self.key)
if self.key != 'USLT':
value = [value]
# try modifying in place
found = False
for frame in frames:
if frame.desc.lower() == self.description.lower():
frame.text = value
found = True
# need to make a new frame?
if not found:
frame = mutagen.id3.Frames[self.key](
desc=bytes(self.description),
text=value,
encoding=3
)
if self.id3_lang:
frame.lang = self.id3_lang
mutagen_file.tags.add(frame)
def fetch(self, mutagen_file):
for frame in mutagen_file.tags.getall(self.key):
if frame.desc.lower() == self.description.lower():
if self.key == 'USLT':
return frame.text
try:
return frame.text[0]
except IndexError:
return None
def delete(self, mutagen_file):
found_frame = None
for frame in mutagen_file.tags.getall(self.key):
if frame.desc.lower() == self.description.lower():
found_frame = frame
break
if found_frame is not None:
del mutagen_file[frame.HashKey]
class MP3SlashPackStorageStyle(MP3StorageStyle):
"""Store value as part of pair that is serialized as a slash-
separated string.
"""
def __init__(self, key, pack_pos=0, **kwargs):
super(MP3SlashPackStorageStyle, self).__init__(key, **kwargs)
self.pack_pos = pack_pos
def _fetch_unpacked(self, mutagen_file):
data = self.fetch(mutagen_file)
if data:
items = unicode(data).split('/')
else:
items = []
packing_length = 2
return list(items) + [None] * (packing_length - len(items))
def get(self, mutagen_file):
return self._fetch_unpacked(mutagen_file)[self.pack_pos]
def set(self, mutagen_file, value):
items = self._fetch_unpacked(mutagen_file)
items[self.pack_pos] = value
if items[0] is None:
items[0] = ''
if items[1] is None:
items.pop() # Do not store last value
self.store(mutagen_file, '/'.join(map(unicode, items)))
def delete(self, mutagen_file):
if self.pack_pos == 0:
super(MP3SlashPackStorageStyle, self).delete(mutagen_file)
else:
self.set(mutagen_file, None)
class MP3ImageStorageStyle(ListStorageStyle, MP3StorageStyle):
"""Converts between APIC frames and ``Image`` instances.
The `get_list` method inherited from ``ListStorageStyle`` returns a
list of ``Image``s. Similarly, the `set_list` method accepts a
list of ``Image``s as its ``values`` argument.
"""
def __init__(self):
super(MP3ImageStorageStyle, self).__init__(key='APIC')
self.as_type = bytes
def deserialize(self, apic_frame):
"""Convert APIC frame into Image."""
return Image(data=apic_frame.data, desc=apic_frame.desc,
type=apic_frame.type)
def fetch(self, mutagen_file):
return mutagen_file.tags.getall(self.key)
def store(self, mutagen_file, frames):
mutagen_file.tags.setall(self.key, frames)
def delete(self, mutagen_file):
mutagen_file.tags.delall(self.key)
def serialize(self, image):
"""Return an APIC frame populated with data from ``image``.
"""
assert isinstance(image, Image)
frame = mutagen.id3.Frames[self.key]()
frame.data = image.data
frame.mime = image.mime_type
frame.desc = (image.desc or u'').encode('utf8')
frame.encoding = 3 # UTF-8 encoding of desc
frame.type = image.type_index
return frame
class MP3SoundCheckStorageStyle(SoundCheckStorageStyleMixin,
MP3DescStorageStyle):
def __init__(self, index=0, **kwargs):
super(MP3SoundCheckStorageStyle, self).__init__(**kwargs)
self.index = index
class ASFImageStorageStyle(ListStorageStyle):
"""Store images packed into Windows Media/ASF byte array attributes.
Values are `Image` objects.
"""
formats = ['ASF']
def __init__(self):
super(ASFImageStorageStyle, self).__init__(key='WM/Picture')
def deserialize(self, asf_picture):
mime, data, type, desc = _unpack_asf_image(asf_picture.value)
return Image(data, desc=desc, type=type)
def serialize(self, image):
pic = mutagen.asf.ASFByteArrayAttribute()
pic.value = _pack_asf_image(image.mime_type, image.data,
type=image.type_index,
description=image.desc or u'')
return pic
class VorbisImageStorageStyle(ListStorageStyle):
"""Store images in Vorbis comments. Both legacy COVERART fields and
modern METADATA_BLOCK_PICTURE tags are supported. Data is
base64-encoded. Values are `Image` objects.
"""
formats = ['OggOpus', 'OggTheora', 'OggSpeex', 'OggVorbis',
'OggFlac']
def __init__(self):
super(VorbisImageStorageStyle, self).__init__(
key='metadata_block_picture'
)
self.as_type = bytes
def fetch(self, mutagen_file):
images = []
if 'metadata_block_picture' not in mutagen_file:
# Try legacy COVERART tags.
if 'coverart' in mutagen_file:
for data in mutagen_file['coverart']:
images.append(Image(base64.b64decode(data)))
return images
for data in mutagen_file["metadata_block_picture"]:
try:
pic = mutagen.flac.Picture(base64.b64decode(data))
except (TypeError, AttributeError):
continue
images.append(Image(data=pic.data, desc=pic.desc,
type=pic.type))
return images
def store(self, mutagen_file, image_data):
# Strip all art, including legacy COVERART.
if 'coverart' in mutagen_file:
del mutagen_file['coverart']
if 'coverartmime' in mutagen_file:
del mutagen_file['coverartmime']
super(VorbisImageStorageStyle, self).store(mutagen_file, image_data)
def serialize(self, image):
"""Turn a Image into a base64 encoded FLAC picture block.
"""
pic = mutagen.flac.Picture()
pic.data = image.data
pic.type = image.type_index
pic.mime = image.mime_type
pic.desc = image.desc or u''
return base64.b64encode(pic.write())
class FlacImageStorageStyle(ListStorageStyle):
"""Converts between ``mutagen.flac.Picture`` and ``Image`` instances.
"""
formats = ['FLAC']
def __init__(self):
super(FlacImageStorageStyle, self).__init__(key='')
def fetch(self, mutagen_file):
return mutagen_file.pictures
def deserialize(self, flac_picture):
return Image(data=flac_picture.data, desc=flac_picture.desc,
type=flac_picture.type)
def store(self, mutagen_file, pictures):
"""``pictures`` is a list of mutagen.flac.Picture instances.
"""
mutagen_file.clear_pictures()
for pic in pictures:
mutagen_file.add_picture(pic)
def serialize(self, image):
"""Turn a Image into a mutagen.flac.Picture.
"""
pic = mutagen.flac.Picture()
pic.data = image.data
pic.type = image.type_index
pic.mime = image.mime_type
pic.desc = image.desc or u''
return pic
def delete(self, mutagen_file):
"""Remove all images from the file.
"""
mutagen_file.clear_pictures()
class APEv2ImageStorageStyle(ListStorageStyle):
"""Store images in APEv2 tags. Values are `Image` objects.
"""
formats = ['APEv2File', 'WavPack', 'Musepack', 'MonkeysAudio', 'OptimFROG']
TAG_NAMES = {
ImageType.other: 'Cover Art (other)',
ImageType.icon: 'Cover Art (icon)',
ImageType.other_icon: 'Cover Art (other icon)',
ImageType.front: 'Cover Art (front)',
ImageType.back: 'Cover Art (back)',
ImageType.leaflet: 'Cover Art (leaflet)',
ImageType.media: 'Cover Art (media)',
ImageType.lead_artist: 'Cover Art (lead)',
ImageType.artist: 'Cover Art (artist)',
ImageType.conductor: 'Cover Art (conductor)',
ImageType.group: 'Cover Art (band)',
ImageType.composer: 'Cover Art (composer)',
ImageType.lyricist: 'Cover Art (lyricist)',
ImageType.recording_location: 'Cover Art (studio)',
ImageType.recording_session: 'Cover Art (recording)',
ImageType.performance: 'Cover Art (performance)',
ImageType.screen_capture: 'Cover Art (movie scene)',
ImageType.fish: 'Cover Art (colored fish)',
ImageType.illustration: 'Cover Art (illustration)',
ImageType.artist_logo: 'Cover Art (band logo)',
ImageType.publisher_logo: 'Cover Art (publisher logo)',
}
def __init__(self):
super(APEv2ImageStorageStyle, self).__init__(key='')
def fetch(self, mutagen_file):
images = []
for cover_type, cover_tag in self.TAG_NAMES.items():
try:
frame = mutagen_file[cover_tag]
text_delimiter_index = frame.value.find(b'\x00')
comment = frame.value[0:text_delimiter_index] \
if text_delimiter_index > 0 else None
image_data = frame.value[text_delimiter_index + 1:]
images.append(Image(data=image_data, type=cover_type,
desc=comment))
except KeyError:
pass
return images
def set_list(self, mutagen_file, values):
self.delete(mutagen_file)
for image in values:
image_type = image.type or ImageType.other
comment = image.desc or ''
image_data = comment.encode('utf8') + b'\x00' + image.data
cover_tag = self.TAG_NAMES[image_type]
mutagen_file[cover_tag] = image_data
def delete(self, mutagen_file):
"""Remove all images from the file.
"""
for cover_tag in self.TAG_NAMES.values():
try:
del mutagen_file[cover_tag]
except KeyError:
pass
# MediaField is a descriptor that represents a single logical field. It
# aggregates several StorageStyles describing how to access the data for
# each file type.
class MediaField(object):
"""A descriptor providing access to a particular (abstract) metadata
field.
"""
def __init__(self, *styles, **kwargs):
"""Creates a new MediaField.
:param styles: `StorageStyle` instances that describe the strategy
for reading and writing the field in particular
formats. There must be at least one style for
each possible file format.
:param out_type: the type of the value that should be returned when
getting this property.
"""
self.out_type = kwargs.get(b'out_type', unicode)
self._styles = styles
def styles(self, mutagen_file):
"""Yields the list of storage styles of this field that can
handle the MediaFile's format.
"""
for style in self._styles:
if mutagen_file.__class__.__name__ in style.formats:
yield style
def __get__(self, mediafile, owner=None):
out = None
for style in self.styles(mediafile.mgfile):
out = style.get(mediafile.mgfile)
if out:
break
return _safe_cast(self.out_type, out)
def __set__(self, mediafile, value):
if value is None:
value = self._none_value()
for style in self.styles(mediafile.mgfile):
style.set(mediafile.mgfile, value)
def __delete__(self, mediafile):
for style in self.styles(mediafile.mgfile):
style.delete(mediafile.mgfile)
def _none_value(self):
"""Get an appropriate "null" value for this field's type. This
is used internally when setting the field to None.
"""
if self.out_type == int:
return 0
elif self.out_type == float:
return 0.0
elif self.out_type == bool:
return False
elif self.out_type == unicode:
return u''
class ListMediaField(MediaField):
"""Property descriptor that retrieves a list of multiple values from
a tag.
Uses ``get_list`` and set_list`` methods of its ``StorageStyle``
strategies to do the actual work.
"""
def __get__(self, mediafile, _):
values = []
for style in self.styles(mediafile.mgfile):
values.extend(style.get_list(mediafile.mgfile))
return [_safe_cast(self.out_type, value) for value in values]
def __set__(self, mediafile, values):
for style in self.styles(mediafile.mgfile):
style.set_list(mediafile.mgfile, values)
def single_field(self):
"""Returns a ``MediaField`` descriptor that gets and sets the
first item.
"""
options = {b'out_type': self.out_type}
return MediaField(*self._styles, **options)
class DateField(MediaField):
"""Descriptor that handles serializing and deserializing dates
The getter parses value from tags into a ``datetime.date`` instance
and setter serializes such an instance into a string.
For granular access to year, month, and day, use the ``*_field``
methods to create corresponding `DateItemField`s.
"""
def __init__(self, *date_styles, **kwargs):
"""``date_styles`` is a list of ``StorageStyle``s to store and
retrieve the whole date from. The ``year`` option is an
additional list of fallback styles for the year. The year is
always set on this style, but is only retrieved if the main
storage styles do not return a value.
"""
super(DateField, self).__init__(*date_styles)
year_style = kwargs.get(b'year', None)
if year_style:
self._year_field = MediaField(*year_style)
def __get__(self, mediafile, owner=None):
year, month, day = self._get_date_tuple(mediafile)
if not year:
return None
try:
return datetime.date(
year,
month or 1,
day or 1
)
except ValueError: # Out of range values.
return None
def __set__(self, mediafile, date):
if date is None:
self._set_date_tuple(mediafile, None, None, None)
else:
self._set_date_tuple(mediafile, date.year, date.month, date.day)
def __delete__(self, mediafile):
super(DateField, self).__delete__(mediafile)
if hasattr(self, '_year_field'):
self._year_field.__delete__(mediafile)
def _get_date_tuple(self, mediafile):
"""Get a 3-item sequence representing the date consisting of a
year, month, and day number. Each number is either an integer or
None.
"""
# Get the underlying data and split on hyphens and slashes.
datestring = super(DateField, self).__get__(mediafile, None)
if isinstance(datestring, basestring):
datestring = re.sub(r'[Tt ].*$', '', unicode(datestring))
items = re.split('[-/]', unicode(datestring))
else:
items = []
# Ensure that we have exactly 3 components, possibly by
# truncating or padding.
items = items[:3]
if len(items) < 3:
items += [None] * (3 - len(items))
# Use year field if year is missing.
if not items[0] and hasattr(self, '_year_field'):
items[0] = self._year_field.__get__(mediafile)
# Convert each component to an integer if possible.
items_ = []
for item in items:
try:
items_.append(int(item))
except:
items_.append(None)
return items_
def _set_date_tuple(self, mediafile, year, month=None, day=None):
"""Set the value of the field given a year, month, and day
number. Each number can be an integer or None to indicate an
unset component.
"""
if year is None:
self.__delete__(mediafile)
return
date = [u'{0:04d}'.format(int(year))]
if month:
date.append(u'{0:02d}'.format(int(month)))
if month and day:
date.append(u'{0:02d}'.format(int(day)))
date = map(unicode, date)
super(DateField, self).__set__(mediafile, u'-'.join(date))
if hasattr(self, '_year_field'):
self._year_field.__set__(mediafile, year)
def year_field(self):
return DateItemField(self, 0)
def month_field(self):
return DateItemField(self, 1)
def day_field(self):
return DateItemField(self, 2)
class DateItemField(MediaField):
"""Descriptor that gets and sets constituent parts of a `DateField`:
the month, day, or year.
"""
def __init__(self, date_field, item_pos):
self.date_field = date_field
self.item_pos = item_pos
def __get__(self, mediafile, _):
return self.date_field._get_date_tuple(mediafile)[self.item_pos]
def __set__(self, mediafile, value):
items = self.date_field._get_date_tuple(mediafile)
items[self.item_pos] = value
self.date_field._set_date_tuple(mediafile, *items)
def __delete__(self, mediafile):
self.__set__(mediafile, None)
class CoverArtField(MediaField):
"""A descriptor that provides access to the *raw image data* for the
cover image on a file. This is used for backwards compatibility: the
full `ImageListField` provides richer `Image` objects.
When there are multiple images we try to pick the most likely to be a front
cover.
"""
def __init__(self):
pass
def __get__(self, mediafile, _):
candidates = mediafile.images
if candidates:
return self.guess_cover_image(candidates).data
else:
return None
@staticmethod
def guess_cover_image(candidates):
if len(candidates) == 1:
return candidates[0]
try:
return next(c for c in candidates if c.type == ImageType.front)
except StopIteration:
return candidates[0]
def __set__(self, mediafile, data):
if data:
mediafile.images = [Image(data=data)]
else:
mediafile.images = []
def __delete__(self, mediafile):
delattr(mediafile, 'images')
class ImageListField(ListMediaField):
"""Descriptor to access the list of images embedded in tags.
The getter returns a list of `Image` instances obtained from
the tags. The setter accepts a list of `Image` instances to be
written to the tags.
"""
def __init__(self):
# The storage styles used here must implement the
# `ListStorageStyle` interface and get and set lists of
# `Image`s.
super(ImageListField, self).__init__(
MP3ImageStorageStyle(),
MP4ImageStorageStyle(),
ASFImageStorageStyle(),
VorbisImageStorageStyle(),
FlacImageStorageStyle(),
APEv2ImageStorageStyle(),
out_type=Image,
)
# MediaFile is a collection of fields.
class MediaFile(object):
"""Represents a multimedia file on disk and provides access to its
metadata.
"""
def __init__(self, path, id3v23=False):
"""Constructs a new `MediaFile` reflecting the file at path. May
throw `UnreadableFileError`.
By default, MP3 files are saved with ID3v2.4 tags. You can use
the older ID3v2.3 standard by specifying the `id3v23` option.
"""
path = syspath(path)
self.path = path
unreadable_exc = (
mutagen.mp3.error,
mutagen.id3.error,
mutagen.flac.error,
mutagen.monkeysaudio.MonkeysAudioHeaderError,
mutagen.mp4.error,
mutagen.oggopus.error,
mutagen.oggvorbis.error,
mutagen.ogg.error,
mutagen.asf.error,
mutagen.apev2.error,
mutagen.aiff.error,
)
try:
self.mgfile = mutagen.File(path)
except unreadable_exc as exc:
log.debug(u'header parsing failed: {0}', unicode(exc))
raise UnreadableFileError(path)
except IOError as exc:
if type(exc) == IOError:
# This is a base IOError, not a subclass from Mutagen or
# anywhere else.
raise
else:
log.debug('{}', traceback.format_exc())
raise MutagenError(path, exc)
except Exception as exc:
# Isolate bugs in Mutagen.
log.debug('{}', traceback.format_exc())
log.error(u'uncaught Mutagen exception in open: {0}', exc)
raise MutagenError(path, exc)
if self.mgfile is None:
# Mutagen couldn't guess the type
raise FileTypeError(path)
elif (type(self.mgfile).__name__ == 'M4A' or
type(self.mgfile).__name__ == 'MP4'):
info = self.mgfile.info
if hasattr(info, 'codec'):
if info.codec and info.codec.startswith('alac'):
self.type = 'alac'
else:
self.type = 'aac'
else:
# This hack differentiates AAC and ALAC on versions of
# Mutagen < 1.26. Once Mutagen > 1.26 is out and
# required by beets, we can remove this.
if hasattr(self.mgfile.info, 'bitrate') and \
self.mgfile.info.bitrate > 0:
self.type = 'aac'
else:
self.type = 'alac'
elif (type(self.mgfile).__name__ == 'ID3' or
type(self.mgfile).__name__ == 'MP3'):
self.type = 'mp3'
elif type(self.mgfile).__name__ == 'FLAC':
self.type = 'flac'
elif type(self.mgfile).__name__ == 'OggOpus':
self.type = 'opus'
elif type(self.mgfile).__name__ == 'OggVorbis':
self.type = 'ogg'
elif type(self.mgfile).__name__ == 'MonkeysAudio':
self.type = 'ape'
elif type(self.mgfile).__name__ == 'WavPack':
self.type = 'wv'
elif type(self.mgfile).__name__ == 'Musepack':
self.type = 'mpc'
elif type(self.mgfile).__name__ == 'ASF':
self.type = 'asf'
elif type(self.mgfile).__name__ == 'AIFF':
self.type = 'aiff'
else:
raise FileTypeError(path, type(self.mgfile).__name__)
# Add a set of tags if it's missing.
if self.mgfile.tags is None:
self.mgfile.add_tags()
# Set the ID3v2.3 flag only for MP3s.
self.id3v23 = id3v23 and self.type == 'mp3'
def save(self):
"""Write the object's tags back to the file.
"""
# Possibly save the tags to ID3v2.3.
kwargs = {}
if self.id3v23:
id3 = self.mgfile
if hasattr(id3, 'tags'):
# In case this is an MP3 object, not an ID3 object.
id3 = id3.tags
id3.update_to_v23()
kwargs['v2_version'] = 3
# Isolate bugs in Mutagen.
try:
self.mgfile.save(**kwargs)
except (IOError, OSError):
# Propagate these through: they don't represent Mutagen bugs.
raise
except Exception as exc:
log.debug('{}', traceback.format_exc())
log.error(u'uncaught Mutagen exception in save: {0}', exc)
raise MutagenError(self.path, exc)
def delete(self):
"""Remove the current metadata tag from the file.
"""
try:
self.mgfile.delete()
except NotImplementedError:
# For Mutagen types that don't support deletion (notably,
# ASF), just delete each tag individually.
for tag in self.mgfile.keys():
del self.mgfile[tag]
# Convenient access to the set of available fields.
@classmethod
def fields(cls):
"""Get the names of all writable properties that reflect
metadata tags (i.e., those that are instances of
:class:`MediaField`).
"""
for property, descriptor in cls.__dict__.items():
if isinstance(descriptor, MediaField):
yield property.decode('utf8')
@classmethod
def _field_sort_name(cls, name):
"""Get a sort key for a field name that determines the order
fields should be written in.
Fields names are kept unchanged, unless they are instances of
:class:`DateItemField`, in which case `year`, `month`, and `day`
are replaced by `date0`, `date1`, and `date2`, respectively, to
make them appear in that order.
"""
if isinstance(cls.__dict__[name], DateItemField):
name = re.sub('year', 'date0', name)
name = re.sub('month', 'date1', name)
name = re.sub('day', 'date2', name)
return name
@classmethod
def sorted_fields(cls):
"""Get the names of all writable metadata fields, sorted in the
order that they should be written.
This is a lexicographic order, except for instances of
:class:`DateItemField`, which are sorted in year-month-day
order.
"""
for property in sorted(cls.fields(), key=cls._field_sort_name):
yield property
@classmethod
def readable_fields(cls):
"""Get all metadata fields: the writable ones from
:meth:`fields` and also other audio properties.
"""
for property in cls.fields():
yield property
for property in ('length', 'samplerate', 'bitdepth', 'bitrate',
'channels', 'format'):
yield property
@classmethod
def add_field(cls, name, descriptor):
"""Add a field to store custom tags.
:param name: the name of the property the field is accessed
through. It must not already exist on this class.
:param descriptor: an instance of :class:`MediaField`.
"""
if not isinstance(descriptor, MediaField):
raise ValueError(
u'{0} must be an instance of MediaField'.format(descriptor))
if name in cls.__dict__:
raise ValueError(
u'property "{0}" already exists on MediaField'.format(name))
setattr(cls, name, descriptor)
def update(self, dict):
"""Set all field values from a dictionary.
For any key in `dict` that is also a field to store tags the
method retrieves the corresponding value from `dict` and updates
the `MediaFile`. If a key has the value `None`, the
corresponding property is deleted from the `MediaFile`.
"""
for field in self.sorted_fields():
if field in dict:
if dict[field] is None:
delattr(self, field)
else:
setattr(self, field, dict[field])
# Field definitions.
title = MediaField(
MP3StorageStyle('TIT2'),
MP4StorageStyle(b"\xa9nam"),
StorageStyle('TITLE'),
ASFStorageStyle('Title'),
)
artist = MediaField(
MP3StorageStyle('TPE1'),
MP4StorageStyle(b"\xa9ART"),
StorageStyle('ARTIST'),
ASFStorageStyle('Author'),
)
album = MediaField(
MP3StorageStyle('TALB'),
MP4StorageStyle(b"\xa9alb"),
StorageStyle('ALBUM'),
ASFStorageStyle('WM/AlbumTitle'),
)
genres = ListMediaField(
MP3ListStorageStyle('TCON'),
MP4ListStorageStyle(b"\xa9gen"),
ListStorageStyle('GENRE'),
ASFStorageStyle('WM/Genre'),
)
genre = genres.single_field()
composer = MediaField(
MP3StorageStyle('TCOM'),
MP4StorageStyle(b"\xa9wrt"),
StorageStyle('COMPOSER'),
ASFStorageStyle('WM/Composer'),
)
grouping = MediaField(
MP3StorageStyle('TIT1'),
MP4StorageStyle(b"\xa9grp"),
StorageStyle('GROUPING'),
ASFStorageStyle('WM/ContentGroupDescription'),
)
track = MediaField(
MP3SlashPackStorageStyle('TRCK', pack_pos=0),
MP4TupleStorageStyle(b'trkn', index=0),
StorageStyle('TRACK'),
StorageStyle('TRACKNUMBER'),
ASFStorageStyle('WM/TrackNumber'),
out_type=int,
)
tracktotal = MediaField(
MP3SlashPackStorageStyle('TRCK', pack_pos=1),
MP4TupleStorageStyle(b'trkn', index=1),
StorageStyle('TRACKTOTAL'),
StorageStyle('TRACKC'),
StorageStyle('TOTALTRACKS'),
ASFStorageStyle('TotalTracks'),
out_type=int,
)
disc = MediaField(
MP3SlashPackStorageStyle('TPOS', pack_pos=0),
MP4TupleStorageStyle(b'disk', index=0),
StorageStyle('DISC'),
StorageStyle('DISCNUMBER'),
ASFStorageStyle('WM/PartOfSet'),
out_type=int,
)
disctotal = MediaField(
MP3SlashPackStorageStyle('TPOS', pack_pos=1),
MP4TupleStorageStyle(b'disk', index=1),
StorageStyle('DISCTOTAL'),
StorageStyle('DISCC'),
StorageStyle('TOTALDISCS'),
ASFStorageStyle('TotalDiscs'),
out_type=int,
)
lyrics = MediaField(
MP3DescStorageStyle(key='USLT'),
MP4StorageStyle(b"\xa9lyr"),
StorageStyle('LYRICS'),
ASFStorageStyle('WM/Lyrics'),
)
comments = MediaField(
MP3DescStorageStyle(key='COMM'),
MP4StorageStyle(b"\xa9cmt"),
StorageStyle('DESCRIPTION'),
StorageStyle('COMMENT'),
ASFStorageStyle('WM/Comments'),
ASFStorageStyle('Description')
)
bpm = MediaField(
MP3StorageStyle('TBPM'),
MP4StorageStyle(b'tmpo', as_type=int),
StorageStyle('BPM'),
ASFStorageStyle('WM/BeatsPerMinute'),
out_type=int,
)
comp = MediaField(
MP3StorageStyle('TCMP'),
MP4BoolStorageStyle(b'cpil'),
StorageStyle('COMPILATION'),
ASFStorageStyle('WM/IsCompilation', as_type=bool),
out_type=bool,
)
albumartist = MediaField(
MP3StorageStyle('TPE2'),
MP4StorageStyle(b'aART'),
StorageStyle('ALBUM ARTIST'),
StorageStyle('ALBUMARTIST'),
ASFStorageStyle('WM/AlbumArtist'),
)
albumtype = MediaField(
MP3DescStorageStyle(u'MusicBrainz Album Type'),
MP4StorageStyle(b'----:com.apple.iTunes:MusicBrainz Album Type'),
StorageStyle('MUSICBRAINZ_ALBUMTYPE'),
ASFStorageStyle('MusicBrainz/Album Type'),
)
label = MediaField(
MP3StorageStyle('TPUB'),
MP4StorageStyle(b'----:com.apple.iTunes:Label'),
MP4StorageStyle(b'----:com.apple.iTunes:publisher'),
StorageStyle('LABEL'),
StorageStyle('PUBLISHER'), # Traktor
ASFStorageStyle('WM/Publisher'),
)
artist_sort = MediaField(
MP3StorageStyle('TSOP'),
MP4StorageStyle(b"soar"),
StorageStyle('ARTISTSORT'),
ASFStorageStyle('WM/ArtistSortOrder'),
)
albumartist_sort = MediaField(
MP3DescStorageStyle(u'ALBUMARTISTSORT'),
MP4StorageStyle(b"soaa"),
StorageStyle('ALBUMARTISTSORT'),
ASFStorageStyle('WM/AlbumArtistSortOrder'),
)
asin = MediaField(
MP3DescStorageStyle(u'ASIN'),
MP4StorageStyle(b"----:com.apple.iTunes:ASIN"),
StorageStyle('ASIN'),
ASFStorageStyle('MusicBrainz/ASIN'),
)
catalognum = MediaField(
MP3DescStorageStyle(u'CATALOGNUMBER'),
MP4StorageStyle(b"----:com.apple.iTunes:CATALOGNUMBER"),
StorageStyle('CATALOGNUMBER'),
ASFStorageStyle('WM/CatalogNo'),
)
disctitle = MediaField(
MP3StorageStyle('TSST'),
MP4StorageStyle(b"----:com.apple.iTunes:DISCSUBTITLE"),
StorageStyle('DISCSUBTITLE'),
ASFStorageStyle('WM/SetSubTitle'),
)
encoder = MediaField(
MP3StorageStyle('TENC'),
MP4StorageStyle(b"\xa9too"),
StorageStyle('ENCODEDBY'),
StorageStyle('ENCODER'),
ASFStorageStyle('WM/EncodedBy'),
)
script = MediaField(
MP3DescStorageStyle(u'Script'),
MP4StorageStyle(b"----:com.apple.iTunes:SCRIPT"),
StorageStyle('SCRIPT'),
ASFStorageStyle('WM/Script'),
)
language = MediaField(
MP3StorageStyle('TLAN'),
MP4StorageStyle(b"----:com.apple.iTunes:LANGUAGE"),
StorageStyle('LANGUAGE'),
ASFStorageStyle('WM/Language'),
)
country = MediaField(
MP3DescStorageStyle('MusicBrainz Album Release Country'),
MP4StorageStyle(b"----:com.apple.iTunes:MusicBrainz "
b"Album Release Country"),
StorageStyle('RELEASECOUNTRY'),
ASFStorageStyle('MusicBrainz/Album Release Country'),
)
albumstatus = MediaField(
MP3DescStorageStyle(u'MusicBrainz Album Status'),
MP4StorageStyle(b"----:com.apple.iTunes:MusicBrainz Album Status"),
StorageStyle('MUSICBRAINZ_ALBUMSTATUS'),
ASFStorageStyle('MusicBrainz/Album Status'),
)
media = MediaField(
MP3StorageStyle('TMED'),
MP4StorageStyle(b"----:com.apple.iTunes:MEDIA"),
StorageStyle('MEDIA'),
ASFStorageStyle('WM/Media'),
)
albumdisambig = MediaField(
# This tag mapping was invented for beets (not used by Picard, etc).
MP3DescStorageStyle(u'MusicBrainz Album Comment'),
MP4StorageStyle(b"----:com.apple.iTunes:MusicBrainz Album Comment"),
StorageStyle('MUSICBRAINZ_ALBUMCOMMENT'),
ASFStorageStyle('MusicBrainz/Album Comment'),
)
# Release date.
date = DateField(
MP3StorageStyle('TDRC'),
MP4StorageStyle(b"\xa9day"),
StorageStyle('DATE'),
ASFStorageStyle('WM/Year'),
year=(StorageStyle('YEAR'),))
year = date.year_field()
month = date.month_field()
day = date.day_field()
# *Original* release date.
original_date = DateField(
MP3StorageStyle('TDOR'),
MP4StorageStyle(b'----:com.apple.iTunes:ORIGINAL YEAR'),
StorageStyle('ORIGINALDATE'),
ASFStorageStyle('WM/OriginalReleaseYear'))
original_year = original_date.year_field()
original_month = original_date.month_field()
original_day = original_date.day_field()
# Nonstandard metadata.
artist_credit = MediaField(
MP3DescStorageStyle(u'Artist Credit'),
MP4StorageStyle(b"----:com.apple.iTunes:Artist Credit"),
StorageStyle('ARTIST_CREDIT'),
ASFStorageStyle('beets/Artist Credit'),
)
albumartist_credit = MediaField(
MP3DescStorageStyle(u'Album Artist Credit'),
MP4StorageStyle(b"----:com.apple.iTunes:Album Artist Credit"),
StorageStyle('ALBUMARTIST_CREDIT'),
ASFStorageStyle('beets/Album Artist Credit'),
)
# Legacy album art field
art = CoverArtField()
# Image list
images = ImageListField()
# MusicBrainz IDs.
mb_trackid = MediaField(
MP3UFIDStorageStyle(owner='http://musicbrainz.org'),
MP4StorageStyle(b'----:com.apple.iTunes:MusicBrainz Track Id'),
StorageStyle('MUSICBRAINZ_TRACKID'),
ASFStorageStyle('MusicBrainz/Track Id'),
)
mb_albumid = MediaField(
MP3DescStorageStyle(u'MusicBrainz Album Id'),
MP4StorageStyle(b'----:com.apple.iTunes:MusicBrainz Album Id'),
StorageStyle('MUSICBRAINZ_ALBUMID'),
ASFStorageStyle('MusicBrainz/Album Id'),
)
mb_artistid = MediaField(
MP3DescStorageStyle(u'MusicBrainz Artist Id'),
MP4StorageStyle(b'----:com.apple.iTunes:MusicBrainz Artist Id'),
StorageStyle('MUSICBRAINZ_ARTISTID'),
ASFStorageStyle('MusicBrainz/Artist Id'),
)
mb_albumartistid = MediaField(
MP3DescStorageStyle(u'MusicBrainz Album Artist Id'),
MP4StorageStyle(b'----:com.apple.iTunes:MusicBrainz Album Artist Id'),
StorageStyle('MUSICBRAINZ_ALBUMARTISTID'),
ASFStorageStyle('MusicBrainz/Album Artist Id'),
)
mb_releasegroupid = MediaField(
MP3DescStorageStyle(u'MusicBrainz Release Group Id'),
MP4StorageStyle(b'----:com.apple.iTunes:MusicBrainz Release Group Id'),
StorageStyle('MUSICBRAINZ_RELEASEGROUPID'),
ASFStorageStyle('MusicBrainz/Release Group Id'),
)
# Acoustid fields.
acoustid_fingerprint = MediaField(
MP3DescStorageStyle(u'Acoustid Fingerprint'),
MP4StorageStyle(b'----:com.apple.iTunes:Acoustid Fingerprint'),
StorageStyle('ACOUSTID_FINGERPRINT'),
ASFStorageStyle('Acoustid/Fingerprint'),
)
acoustid_id = MediaField(
MP3DescStorageStyle(u'Acoustid Id'),
MP4StorageStyle(b'----:com.apple.iTunes:Acoustid Id'),
StorageStyle('ACOUSTID_ID'),
ASFStorageStyle('Acoustid/Id'),
)
# ReplayGain fields.
rg_track_gain = MediaField(
MP3DescStorageStyle(
u'REPLAYGAIN_TRACK_GAIN',
float_places=2, suffix=u' dB'
),
MP3DescStorageStyle(
u'replaygain_track_gain',
float_places=2, suffix=u' dB'
),
MP3SoundCheckStorageStyle(
key='COMM',
index=0, desc=u'iTunNORM',
id3_lang='eng'
),
MP4StorageStyle(
b'----:com.apple.iTunes:replaygain_track_gain',
float_places=2, suffix=b' dB'
),
MP4SoundCheckStorageStyle(
b'----:com.apple.iTunes:iTunNORM',
index=0
),
StorageStyle(
u'REPLAYGAIN_TRACK_GAIN',
float_places=2, suffix=u' dB'
),
ASFStorageStyle(
u'replaygain_track_gain',
float_places=2, suffix=u' dB'
),
out_type=float
)
rg_album_gain = MediaField(
MP3DescStorageStyle(
u'REPLAYGAIN_ALBUM_GAIN',
float_places=2, suffix=u' dB'
),
MP3DescStorageStyle(
u'replaygain_album_gain',
float_places=2, suffix=u' dB'
),
MP4SoundCheckStorageStyle(
b'----:com.apple.iTunes:iTunNORM',
index=1
),
StorageStyle(
u'REPLAYGAIN_ALBUM_GAIN',
float_places=2, suffix=u' dB'
),
ASFStorageStyle(
u'replaygain_album_gain',
float_places=2, suffix=u' dB'
),
out_type=float
)
rg_track_peak = MediaField(
MP3DescStorageStyle(
u'REPLAYGAIN_TRACK_PEAK',
float_places=6
),
MP3DescStorageStyle(
u'replaygain_track_peak',
float_places=6
),
MP3SoundCheckStorageStyle(
key=u'COMM',
index=1, desc=u'iTunNORM',
id3_lang='eng'
),
MP4StorageStyle(
b'----:com.apple.iTunes:replaygain_track_peak',
float_places=6
),
MP4SoundCheckStorageStyle(
b'----:com.apple.iTunes:iTunNORM',
index=1
),
StorageStyle(u'REPLAYGAIN_TRACK_PEAK', float_places=6),
ASFStorageStyle(u'replaygain_track_peak', float_places=6),
out_type=float,
)
rg_album_peak = MediaField(
MP3DescStorageStyle(
u'REPLAYGAIN_ALBUM_PEAK',
float_places=6
),
MP3DescStorageStyle(
u'replaygain_album_peak',
float_places=6
),
MP4StorageStyle(
b'----:com.apple.iTunes:replaygain_album_peak',
float_places=6
),
StorageStyle(u'REPLAYGAIN_ALBUM_PEAK', float_places=6),
ASFStorageStyle(u'replaygain_album_peak', float_places=6),
out_type=float,
)
initial_key = MediaField(
MP3StorageStyle('TKEY'),
MP4StorageStyle(b'----:com.apple.iTunes:initialkey'),
StorageStyle('INITIALKEY'),
ASFStorageStyle('INITIALKEY'),
)
@property
def length(self):
"""The duration of the audio in seconds (a float)."""
return self.mgfile.info.length
@property
def samplerate(self):
"""The audio's sample rate (an int)."""
if hasattr(self.mgfile.info, 'sample_rate'):
return self.mgfile.info.sample_rate
elif self.type == 'opus':
# Opus is always 48kHz internally.
return 48000
return 0
@property
def bitdepth(self):
"""The number of bits per sample in the audio encoding (an int).
Only available for certain file formats (zero where
unavailable).
"""
if hasattr(self.mgfile.info, 'bits_per_sample'):
return self.mgfile.info.bits_per_sample
return 0
@property
def channels(self):
"""The number of channels in the audio (an int)."""
if isinstance(self.mgfile.info, mutagen.mp3.MPEGInfo):
return {
mutagen.mp3.STEREO: 2,
mutagen.mp3.JOINTSTEREO: 2,
mutagen.mp3.DUALCHANNEL: 2,
mutagen.mp3.MONO: 1,
}[self.mgfile.info.mode]
if hasattr(self.mgfile.info, 'channels'):
return self.mgfile.info.channels
return 0
@property
def bitrate(self):
"""The number of bits per seconds used in the audio coding (an
int). If this is provided explicitly by the compressed file
format, this is a precise reflection of the encoding. Otherwise,
it is estimated from the on-disk file size. In this case, some
imprecision is possible because the file header is incorporated
in the file size.
"""
if hasattr(self.mgfile.info, 'bitrate') and self.mgfile.info.bitrate:
# Many formats provide it explicitly.
return self.mgfile.info.bitrate
else:
# Otherwise, we calculate bitrate from the file size. (This
# is the case for all of the lossless formats.)
if not self.length:
# Avoid division by zero if length is not available.
return 0
size = os.path.getsize(self.path)
return int(size * 8 / self.length)
@property
def format(self):
"""A string describing the file format/codec."""
return TYPES[self.type]
|
zneext/mtasa-blue
|
refs/heads/master
|
utils/src/build_gettext_catalog_nsi.py
|
10
|
##############################################################################
#
# PROJECT: Multi Theft Auto v1.0
# LICENSE: See LICENSE in the top level directory
# FILE: utils/build_gettext_catalog_nsi.py
# PURPOSE: Create a template .pot file from a .NSI script
# DEVELOPERS: Dan Chowdhury <>
#
# Multi Theft Auto is available from http://www.multitheftauto.com/
#
##############################################################################
import collections
import polib
import datetime
import os
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-i", "--input", dest="input",
help="nightly.nsi location", default="../Shared/installer/nightly.nsi" )
parser.add_option("-o", "--output", dest="output",
help="POT file output location", default="installer.pot")
parser.add_option("-p", "--project", dest="project",
help="Project name to write to the POT file", default="MTA San Andreas Installer" )
parser.add_option("-v", "--version", dest="version",
help="Version to write to the POT file", default="1.x" )
parser.add_option("-l", "--lang", dest="lang",
help="Default language of the NSI", default="English" )
(options, args) = parser.parse_args()
metadata = {
"Project-Id-Version" : (options.project + " " + options.version).strip(),
"Report-Msgid-Bugs-To" : "",
"POT-Creation-Date" : datetime.datetime.now().strftime('%Y-%m-%d %H:%M%z'),
"PO-Revision-Date" : "YEAR-MO-DA HO:MI+ZONE",
"Last-Translator" : "FULL NAME <EMAIL@ADDRESS>",
"Language-Team" : "LANGUAGE <LL@li.org>",
"Language" : "",
"MIME-Version" : "1.0",
"Content-Type" : "text/plain; charset=UTF-8",
"Content-Transfer-Encoding" : "8bit"
}
NSIFilePath = options.input
# Removes trailing \ which marks a new line
def removeEscapedNewLine(line):
newline = line.rstrip("\n")
newline = line.rstrip()
newlen = len(newline);
if newline.rfind("\\")+1 == len(newline):
return newline[:newlen-1]
return line
# Open our source file
NSIWorkingFile = open(NSIFilePath,"r")
NSIWorkingFileDir,NSIFileName = os.path.split(NSIFilePath)
# Create our new .POT file, and give our metadata
poFile = polib.POFile()
poFile.metadata = metadata
# Create a cache of messageValues : [ [fileName1,lineNumber1], [fileName2,lineNumber2]... ] (The same message could appear on multiple lines)
LangStringCache = collections.OrderedDict()
# Create a cache of messageValues : [ label1, label2 ] (The same message could have multiple NSIS labels)
LangStringLabels = {}
# What we're doing here is looping through each line of our .nsi till we find a LangString of the default language
# Then, we try and grab the line number, the label, and the text
# The text can be multiline, so we have to sometimes continue reading till we reach the end
line=NSIWorkingFile.readline()
lineNo = 1
while line != '':
commands = line.split()
if len(commands) > 3:
if commands[0] == "LangString" and commands[2].upper() == ("${LANG_%s}"%options.lang).upper():
label = commands[1]
value = ""
# Let's assume it's a one-liner
start = line.find('"') + 1
if start:
end = line.find('"',start)
if end != -1:
value = line[start:end]
else: # Nope, multiline
line = removeEscapedNewLine(line)
# Keep reading till we reach the end
value = line[start:]
line = NSIWorkingFile.readline()
lineNo += 1
while line != '':
line = removeEscapedNewLine(line)
end = line.find('"')
if end != -1: #If we found the closing character, append
value += line[:end].lstrip()
break
else: #If not, append and continue
value += line.lstrip()
line=NSIWorkingFile.readline()
lineNo += 1
# Remove whitespace and new lines
value = value.strip("\t\n")
value = polib.unescape ( value )
if not value in LangStringCache:
LangStringCache[value] = []
# Note down our file and line number
LangStringCache[value].append([options.input,lineNo])
if not value in LangStringLabels:
LangStringLabels[value] = []
# Note down our label
LangStringLabels[value].append(label)
line=NSIWorkingFile.readline()
lineNo += 1
# Now, we loop through our cache and build PO entries for each
# We use PO comment field to store our NSIS labels, so we can decode it back later
for msgid,lineOccurances in LangStringCache.iteritems():
entry = polib.POEntry(
msgid=msgid,
msgstr='',
occurrences=lineOccurances,
comment=(" ").join(LangStringLabels[msgid])
)
poFile.append(entry)
NSIWorkingFile.close()
# Finally, let's generate our POT file
poFile.save(options.output)
print ( "NSI POT Export Operation complete" )
|
Tomtomgo/phantomjs
|
refs/heads/master
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/zipfileset_mock.py
|
167
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def make_factory(ziphashes):
"""ZipFileSet factory routine that looks up zipfiles in a dict;
each zipfile should also be a dict of member names -> contents."""
class MockZipFileSet(object):
def __init__(self, url):
self._url = url
self._ziphash = ziphashes[url]
def namelist(self):
return self._ziphash.keys()
def read(self, member):
return self._ziphash[member]
def close(self):
pass
def maker(url):
# We return None because there's no tempfile to delete.
return (None, MockZipFileSet(url))
return maker
|
huddlej/pnwmoths
|
refs/heads/master
|
django/pnwmoths/species/management/commands/bulkops.py
|
1
|
"""
Bulk operations code written by Ole Laursen:
http://ole-laursen.blogspot.com/2010/11/bulk-inserting-django-objects.html
--
Bulk insert/update DB operations for the Django ORM. Useful when
inserting/updating lots of objects where the bottleneck is overhead
in talking to the database. Instead of doing this
for x in seq:
o = SomeObject()
o.foo = x
o.save()
or equivalently this
for x in seq:
SomeObject.objects.create(foo=x)
do this
l = []
for x in seq:
o = SomeObject()
o.foo = x
l.append(o)
insert_many(l)
Note that these operations are really simple. They won't work with
many-to-many relationships, and you may have to divide really big
lists into smaller chunks before sending them through.
History
2010-12-10: quote column names, reported by Beres Botond.
"""
def insert_many(objects, using="default"):
"""Insert list of Django objects in one SQL query. Objects must be
of the same Django model. Note that save is not called and signals
on the model are not raised."""
if not objects:
return
import django.db.models
from django.db import connections
con = connections[using]
model = objects[0].__class__
fields = [f for f in model._meta.fields if not isinstance(f, django.db.models.AutoField)]
parameters = []
for o in objects:
parameters.append(tuple(f.get_db_prep_save(f.pre_save(o, True), connection=con) for f in fields))
table = model._meta.db_table
column_names = ",".join(con.ops.quote_name(f.column) for f in fields)
placeholders = ",".join(("%s",) * len(fields))
con.cursor().executemany(
"insert into %s (%s) values (%s)" % (table, column_names, placeholders),
parameters)
def update_many(objects, fields=[], using="default"):
"""Update list of Django objects in one SQL query, optionally only
overwrite the given fields (as names, e.g. fields=["foo"]).
Objects must be of the same Django model. Note that save is not
called and signals on the model are not raised."""
if not objects:
return
import django.db.models
from django.db import connections
con = connections[using]
names = fields
meta = objects[0]._meta
fields = [f for f in meta.fields if not isinstance(f, django.db.models.AutoField) and (not names or f.name in names)]
if not fields:
raise ValueError("No fields to update, field names are %s." % names)
fields_with_pk = fields + [meta.pk]
parameters = []
for o in objects:
parameters.append(tuple(f.get_db_prep_save(f.pre_save(o, True), connection=con) for f in fields_with_pk))
table = meta.db_table
assignments = ",".join(("%s=%%s"% con.ops.quote_name(f.column)) for f in fields)
con.cursor().executemany(
"update %s set %s where %s=%%s" % (table, assignments, con.ops.quote_name(meta.pk.column)),
parameters)
|
JuanMatSa/PyFME
|
refs/heads/master
|
src/pyfme/aircrafts/aircraft.py
|
2
|
"""
Python Flight Mechanics Engine (PyFME).
Copyright (c) AeroPython Development Team.
Distributed under the terms of the MIT License.
Generic Aircraft
----------------
"""
from abc import abstractmethod
from copy import deepcopy
from warnings import warn
import numpy as np
from scipy.optimize import least_squares
from pyfme.environment.environment import Environment
from pyfme.models.systems import System
from pyfme.utils.anemometry import tas2cas, tas2eas, calculate_alpha_beta_TAS
from pyfme.utils.trimmer import trimming_cost_func
class Aircraft(object):
def __init__(self):
# Mass & Inertia
self.mass = 0 # kg
self.inertia = 0 # kg·m²
# Geometry
self.Sw = 0 # m2
self.chord = 0 # m
self.span = 0 # m
# Controls
self.controls = {}
self.control_limits = {}
# Coefficients
# Aero
self.CL, self.CD, self.Cm = 0, 0, 0
self.CY, self.Cl, self.Cn = 0, 0, 0
# Thrust
self.Ct = 0
# Forces & moments
self.gravity_force = np.zeros(3)
self.total_forces = np.zeros(3)
self.total_moments = np.zeros(3)
self.load_factor = 0
# Velocities
self.TAS = 0 # True Air Speed.
self.CAS = 0 # Calibrated Air Speed.
self.EAS = 0 # Equivalent Air Speed.
self.Mach = 0 # Mach number
self.q_inf = 0 # Dynamic pressure at infty (Pa)
# Angles
self.alpha = 0 # Angle of attack (AOA).
self.beta = 0 # Angle of sideslip (AOS).
# Not present in this model:
self.Dalpha_Dt = 0 # Rate of change of AOA.
self.Dbeta_Dt = 0 # Rate of change of AOS.
@property
def Ixx(self):
return self.inertia[0, 0]
@property
def Iyy(self):
return self.inertia[1, 1]
@property
def Izz(self):
return self.inertia[2, 2]
def update(self, controls, system, environment):
# If a control is not given, the previous value is assigned.
for control_name, control_value in controls.items():
limits = self.control_limits[control_name]
if limits[0] <= control_value <= limits[1]:
self.controls[control_name] = control_value
else:
# TODO: maybe raise a warning and assign max deflection
msg = "Control {} out of range ({} when max={} and min={" \
"}".format(control_name, limits[1], limits[0])
raise ValueError(msg)
# Velocity relative to air: aerodynamic velocity.
aero_vel = system.vel_body - environment.body_wind
self.alpha, self.beta, self.TAS = calculate_alpha_beta_TAS(
u=aero_vel[0], v=aero_vel[1], w=aero_vel[2])
# Setting velocities & dynamic pressure
self.CAS = tas2cas(self.TAS, environment.p, environment.rho)
self.EAS = tas2eas(self.TAS, environment.rho)
self.Mach = self.TAS / environment.a
self.q_inf = 0.5 * environment.rho * self.TAS ** 2
# Gravity force
self.gravity_force = environment.gravity_vector * self.mass
|
BrotherPhil/django
|
refs/heads/master
|
django/db/backends/base/base.py
|
147
|
import time
import warnings
from collections import deque
from contextlib import contextmanager
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import DEFAULT_DB_ALIAS
from django.db.backends import utils
from django.db.backends.signals import connection_created
from django.db.transaction import TransactionManagementError
from django.db.utils import DatabaseError, DatabaseErrorWrapper
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.six.moves import _thread as thread
try:
import pytz
except ImportError:
pytz = None
NO_DB_ALIAS = '__no_db__'
class BaseDatabaseWrapper(object):
"""
Represents a database connection.
"""
# Mapping of Field objects to their column types.
data_types = {}
# Mapping of Field objects to their SQL suffix such as AUTOINCREMENT.
data_types_suffix = {}
# Mapping of Field objects to their SQL for CHECK constraints.
data_type_check_constraints = {}
ops = None
vendor = 'unknown'
SchemaEditorClass = None
queries_limit = 9000
def __init__(self, settings_dict, alias=DEFAULT_DB_ALIAS,
allow_thread_sharing=False):
# Connection related attributes.
# The underlying database connection.
self.connection = None
# `settings_dict` should be a dictionary containing keys such as
# NAME, USER, etc. It's called `settings_dict` instead of `settings`
# to disambiguate it from Django settings modules.
self.settings_dict = settings_dict
self.alias = alias
# Query logging in debug mode or when explicitly enabled.
self.queries_log = deque(maxlen=self.queries_limit)
self.force_debug_cursor = False
# Transaction related attributes.
# Tracks if the connection is in autocommit mode. Per PEP 249, by
# default, it isn't.
self.autocommit = False
# Tracks if the connection is in a transaction managed by 'atomic'.
self.in_atomic_block = False
# Increment to generate unique savepoint ids.
self.savepoint_state = 0
# List of savepoints created by 'atomic'.
self.savepoint_ids = []
# Tracks if the outermost 'atomic' block should commit on exit,
# ie. if autocommit was active on entry.
self.commit_on_exit = True
# Tracks if the transaction should be rolled back to the next
# available savepoint because of an exception in an inner block.
self.needs_rollback = False
# Connection termination related attributes.
self.close_at = None
self.closed_in_transaction = False
self.errors_occurred = False
# Thread-safety related attributes.
self.allow_thread_sharing = allow_thread_sharing
self._thread_ident = thread.get_ident()
# A list of no-argument functions to run when the transaction commits.
# Each entry is an (sids, func) tuple, where sids is a set of the
# active savepoint IDs when this function was registered.
self.run_on_commit = []
# Should we run the on-commit hooks the next time set_autocommit(True)
# is called?
self.run_commit_hooks_on_set_autocommit_on = False
@cached_property
def timezone(self):
"""
Time zone for datetimes stored as naive values in the database.
Returns a tzinfo object or None.
This is only needed when time zone support is enabled and the database
doesn't support time zones. (When the database supports time zones,
the adapter handles aware datetimes so Django doesn't need to.)
"""
if not settings.USE_TZ:
return None
elif self.features.supports_timezones:
return None
elif self.settings_dict['TIME_ZONE'] is None:
return timezone.utc
else:
# Only this branch requires pytz.
return pytz.timezone(self.settings_dict['TIME_ZONE'])
@cached_property
def timezone_name(self):
"""
Name of the time zone of the database connection.
"""
if not settings.USE_TZ:
return settings.TIME_ZONE
elif self.settings_dict['TIME_ZONE'] is None:
return 'UTC'
else:
return self.settings_dict['TIME_ZONE']
@property
def queries_logged(self):
return self.force_debug_cursor or settings.DEBUG
@property
def queries(self):
if len(self.queries_log) == self.queries_log.maxlen:
warnings.warn(
"Limit for query logging exceeded, only the last {} queries "
"will be returned.".format(self.queries_log.maxlen))
return list(self.queries_log)
# ##### Backend-specific methods for creating connections and cursors #####
def get_connection_params(self):
"""Returns a dict of parameters suitable for get_new_connection."""
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a get_connection_params() method')
def get_new_connection(self, conn_params):
"""Opens a connection to the database."""
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a get_new_connection() method')
def init_connection_state(self):
"""Initializes the database connection settings."""
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require an init_connection_state() method')
def create_cursor(self):
"""Creates a cursor. Assumes that a connection is established."""
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a create_cursor() method')
# ##### Backend-specific methods for creating connections #####
def connect(self):
"""Connects to the database. Assumes that the connection is closed."""
# Check for invalid configurations.
self.check_settings()
# In case the previous connection was closed while in an atomic block
self.in_atomic_block = False
self.savepoint_ids = []
self.needs_rollback = False
# Reset parameters defining when to close the connection
max_age = self.settings_dict['CONN_MAX_AGE']
self.close_at = None if max_age is None else time.time() + max_age
self.closed_in_transaction = False
self.errors_occurred = False
# Establish the connection
conn_params = self.get_connection_params()
self.connection = self.get_new_connection(conn_params)
self.set_autocommit(self.settings_dict['AUTOCOMMIT'])
self.init_connection_state()
connection_created.send(sender=self.__class__, connection=self)
self.run_on_commit = []
def check_settings(self):
if self.settings_dict['TIME_ZONE'] is not None:
if not settings.USE_TZ:
raise ImproperlyConfigured(
"Connection '%s' cannot set TIME_ZONE because USE_TZ is "
"False." % self.alias)
elif self.features.supports_timezones:
raise ImproperlyConfigured(
"Connection '%s' cannot set TIME_ZONE because its engine "
"handles time zones conversions natively." % self.alias)
elif pytz is None:
raise ImproperlyConfigured(
"Connection '%s' cannot set TIME_ZONE because pytz isn't "
"installed." % self.alias)
def ensure_connection(self):
"""
Guarantees that a connection to the database is established.
"""
if self.connection is None:
with self.wrap_database_errors:
self.connect()
# ##### Backend-specific wrappers for PEP-249 connection methods #####
def _cursor(self):
self.ensure_connection()
with self.wrap_database_errors:
return self.create_cursor()
def _commit(self):
if self.connection is not None:
with self.wrap_database_errors:
return self.connection.commit()
def _rollback(self):
if self.connection is not None:
with self.wrap_database_errors:
return self.connection.rollback()
def _close(self):
if self.connection is not None:
with self.wrap_database_errors:
return self.connection.close()
# ##### Generic wrappers for PEP-249 connection methods #####
def cursor(self):
"""
Creates a cursor, opening a connection if necessary.
"""
self.validate_thread_sharing()
if self.queries_logged:
cursor = self.make_debug_cursor(self._cursor())
else:
cursor = self.make_cursor(self._cursor())
return cursor
def commit(self):
"""
Commits a transaction and resets the dirty flag.
"""
self.validate_thread_sharing()
self.validate_no_atomic_block()
self._commit()
# A successful commit means that the database connection works.
self.errors_occurred = False
self.run_commit_hooks_on_set_autocommit_on = True
def rollback(self):
"""
Rolls back a transaction and resets the dirty flag.
"""
self.validate_thread_sharing()
self.validate_no_atomic_block()
self._rollback()
# A successful rollback means that the database connection works.
self.errors_occurred = False
self.run_on_commit = []
def close(self):
"""
Closes the connection to the database.
"""
self.validate_thread_sharing()
self.run_on_commit = []
# Don't call validate_no_atomic_block() to avoid making it difficult
# to get rid of a connection in an invalid state. The next connect()
# will reset the transaction state anyway.
if self.closed_in_transaction or self.connection is None:
return
try:
self._close()
finally:
if self.in_atomic_block:
self.closed_in_transaction = True
self.needs_rollback = True
else:
self.connection = None
# ##### Backend-specific savepoint management methods #####
def _savepoint(self, sid):
with self.cursor() as cursor:
cursor.execute(self.ops.savepoint_create_sql(sid))
def _savepoint_rollback(self, sid):
with self.cursor() as cursor:
cursor.execute(self.ops.savepoint_rollback_sql(sid))
def _savepoint_commit(self, sid):
with self.cursor() as cursor:
cursor.execute(self.ops.savepoint_commit_sql(sid))
def _savepoint_allowed(self):
# Savepoints cannot be created outside a transaction
return self.features.uses_savepoints and not self.get_autocommit()
# ##### Generic savepoint management methods #####
def savepoint(self):
"""
Creates a savepoint inside the current transaction. Returns an
identifier for the savepoint that will be used for the subsequent
rollback or commit. Does nothing if savepoints are not supported.
"""
if not self._savepoint_allowed():
return
thread_ident = thread.get_ident()
tid = str(thread_ident).replace('-', '')
self.savepoint_state += 1
sid = "s%s_x%d" % (tid, self.savepoint_state)
self.validate_thread_sharing()
self._savepoint(sid)
return sid
def savepoint_rollback(self, sid):
"""
Rolls back to a savepoint. Does nothing if savepoints are not supported.
"""
if not self._savepoint_allowed():
return
self.validate_thread_sharing()
self._savepoint_rollback(sid)
# Remove any callbacks registered while this savepoint was active.
self.run_on_commit = [
(sids, func) for (sids, func) in self.run_on_commit if sid not in sids
]
def savepoint_commit(self, sid):
"""
Releases a savepoint. Does nothing if savepoints are not supported.
"""
if not self._savepoint_allowed():
return
self.validate_thread_sharing()
self._savepoint_commit(sid)
def clean_savepoints(self):
"""
Resets the counter used to generate unique savepoint ids in this thread.
"""
self.savepoint_state = 0
# ##### Backend-specific transaction management methods #####
def _set_autocommit(self, autocommit):
"""
Backend-specific implementation to enable or disable autocommit.
"""
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a _set_autocommit() method')
# ##### Generic transaction management methods #####
def get_autocommit(self):
"""
Check the autocommit state.
"""
self.ensure_connection()
return self.autocommit
def set_autocommit(self, autocommit, force_begin_transaction_with_broken_autocommit=False):
"""
Enable or disable autocommit.
The usual way to start a transaction is to turn autocommit off.
SQLite does not properly start a transaction when disabling
autocommit. To avoid this buggy behavior and to actually enter a new
transaction, an explcit BEGIN is required. Using
force_begin_transaction_with_broken_autocommit=True will issue an
explicit BEGIN with SQLite. This option will be ignored for other
backends.
"""
self.validate_no_atomic_block()
self.ensure_connection()
start_transaction_under_autocommit = (
force_begin_transaction_with_broken_autocommit
and not autocommit
and self.features.autocommits_when_autocommit_is_off
)
if start_transaction_under_autocommit:
self._start_transaction_under_autocommit()
else:
self._set_autocommit(autocommit)
self.autocommit = autocommit
if autocommit and self.run_commit_hooks_on_set_autocommit_on:
self.run_and_clear_commit_hooks()
self.run_commit_hooks_on_set_autocommit_on = False
def get_rollback(self):
"""
Get the "needs rollback" flag -- for *advanced use* only.
"""
if not self.in_atomic_block:
raise TransactionManagementError(
"The rollback flag doesn't work outside of an 'atomic' block.")
return self.needs_rollback
def set_rollback(self, rollback):
"""
Set or unset the "needs rollback" flag -- for *advanced use* only.
"""
if not self.in_atomic_block:
raise TransactionManagementError(
"The rollback flag doesn't work outside of an 'atomic' block.")
self.needs_rollback = rollback
def validate_no_atomic_block(self):
"""
Raise an error if an atomic block is active.
"""
if self.in_atomic_block:
raise TransactionManagementError(
"This is forbidden when an 'atomic' block is active.")
def validate_no_broken_transaction(self):
if self.needs_rollback:
raise TransactionManagementError(
"An error occurred in the current transaction. You can't "
"execute queries until the end of the 'atomic' block.")
# ##### Foreign key constraints checks handling #####
@contextmanager
def constraint_checks_disabled(self):
"""
Context manager that disables foreign key constraint checking.
"""
disabled = self.disable_constraint_checking()
try:
yield
finally:
if disabled:
self.enable_constraint_checking()
def disable_constraint_checking(self):
"""
Backends can implement as needed to temporarily disable foreign key
constraint checking. Should return True if the constraints were
disabled and will need to be reenabled.
"""
return False
def enable_constraint_checking(self):
"""
Backends can implement as needed to re-enable foreign key constraint
checking.
"""
pass
def check_constraints(self, table_names=None):
"""
Backends can override this method if they can apply constraint
checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE"). Should raise an
IntegrityError if any invalid foreign key references are encountered.
"""
pass
# ##### Connection termination handling #####
def is_usable(self):
"""
Tests if the database connection is usable.
This function may assume that self.connection is not None.
Actual implementations should take care not to raise exceptions
as that may prevent Django from recycling unusable connections.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseWrapper may require an is_usable() method")
def close_if_unusable_or_obsolete(self):
"""
Closes the current connection if unrecoverable errors have occurred,
or if it outlived its maximum age.
"""
if self.connection is not None:
# If the application didn't restore the original autocommit setting,
# don't take chances, drop the connection.
if self.get_autocommit() != self.settings_dict['AUTOCOMMIT']:
self.close()
return
# If an exception other than DataError or IntegrityError occurred
# since the last commit / rollback, check if the connection works.
if self.errors_occurred:
if self.is_usable():
self.errors_occurred = False
else:
self.close()
return
if self.close_at is not None and time.time() >= self.close_at:
self.close()
return
# ##### Thread safety handling #####
def validate_thread_sharing(self):
"""
Validates that the connection isn't accessed by another thread than the
one which originally created it, unless the connection was explicitly
authorized to be shared between threads (via the `allow_thread_sharing`
property). Raises an exception if the validation fails.
"""
if not (self.allow_thread_sharing
or self._thread_ident == thread.get_ident()):
raise DatabaseError("DatabaseWrapper objects created in a "
"thread can only be used in that same thread. The object "
"with alias '%s' was created in thread id %s and this is "
"thread id %s."
% (self.alias, self._thread_ident, thread.get_ident()))
# ##### Miscellaneous #####
def prepare_database(self):
"""
Hook to do any database check or preparation, generally called before
migrating a project or an app.
"""
pass
@cached_property
def wrap_database_errors(self):
"""
Context manager and decorator that re-throws backend-specific database
exceptions using Django's common wrappers.
"""
return DatabaseErrorWrapper(self)
def make_debug_cursor(self, cursor):
"""
Creates a cursor that logs all queries in self.queries_log.
"""
return utils.CursorDebugWrapper(cursor, self)
def make_cursor(self, cursor):
"""
Creates a cursor without debug logging.
"""
return utils.CursorWrapper(cursor, self)
@contextmanager
def temporary_connection(self):
"""
Context manager that ensures that a connection is established, and
if it opened one, closes it to avoid leaving a dangling connection.
This is useful for operations outside of the request-response cycle.
Provides a cursor: with self.temporary_connection() as cursor: ...
"""
must_close = self.connection is None
cursor = self.cursor()
try:
yield cursor
finally:
cursor.close()
if must_close:
self.close()
@cached_property
def _nodb_connection(self):
"""
Alternative connection to be used when there is no need to access
the main database, specifically for test db creation/deletion.
This also prevents the production database from being exposed to
potential child threads while (or after) the test database is destroyed.
Refs #10868, #17786, #16969.
"""
settings_dict = self.settings_dict.copy()
settings_dict['NAME'] = None
nodb_connection = self.__class__(
settings_dict,
alias=NO_DB_ALIAS,
allow_thread_sharing=False)
return nodb_connection
def _start_transaction_under_autocommit(self):
"""
Only required when autocommits_when_autocommit_is_off = True.
"""
raise NotImplementedError(
'subclasses of BaseDatabaseWrapper may require a '
'_start_transaction_under_autocommit() method'
)
def schema_editor(self, *args, **kwargs):
"""
Returns a new instance of this backend's SchemaEditor.
"""
if self.SchemaEditorClass is None:
raise NotImplementedError(
'The SchemaEditorClass attribute of this database wrapper is still None')
return self.SchemaEditorClass(self, *args, **kwargs)
def on_commit(self, func):
if self.in_atomic_block:
# Transaction in progress; save for execution on commit.
self.run_on_commit.append((set(self.savepoint_ids), func))
elif not self.get_autocommit():
raise TransactionManagementError('on_commit() cannot be used in manual transaction management')
else:
# No transaction in progress and in autocommit mode; execute
# immediately.
func()
def run_and_clear_commit_hooks(self):
self.validate_no_atomic_block()
try:
while self.run_on_commit:
sids, func = self.run_on_commit.pop(0)
func()
finally:
self.run_on_commit = []
|
cristiana214/cristianachavez214-cristianachavez
|
refs/heads/master
|
python/src/Lib/distutils/command/build_py.py
|
52
|
"""distutils.command.build_py
Implements the Distutils 'build_py' command."""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id: build_py.py 65742 2008-08-17 04:16:04Z brett.cannon $"
import string, os
from types import *
from glob import glob
from distutils.core import Command
from distutils.errors import *
from distutils.util import convert_path
from distutils import log
class build_py (Command):
description = "\"build\" pure Python modules (copy to build directory)"
user_options = [
('build-lib=', 'd', "directory to \"build\" (copy) to"),
('compile', 'c', "compile .py to .pyc"),
('no-compile', None, "don't compile .py files [default]"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('force', 'f', "forcibly build everything (ignore file timestamps)"),
]
boolean_options = ['compile', 'force']
negative_opt = {'no-compile' : 'compile'}
def initialize_options (self):
self.build_lib = None
self.py_modules = None
self.package = None
self.package_data = None
self.package_dir = None
self.compile = 0
self.optimize = 0
self.force = None
def finalize_options (self):
self.set_undefined_options('build',
('build_lib', 'build_lib'),
('force', 'force'))
# Get the distribution options that are aliases for build_py
# options -- list of packages and list of modules.
self.packages = self.distribution.packages
self.py_modules = self.distribution.py_modules
self.package_data = self.distribution.package_data
self.package_dir = {}
if self.distribution.package_dir:
for name, path in self.distribution.package_dir.items():
self.package_dir[name] = convert_path(path)
self.data_files = self.get_data_files()
# Ick, copied straight from install_lib.py (fancy_getopt needs a
# type system! Hell, *everything* needs a type system!!!)
if type(self.optimize) is not IntType:
try:
self.optimize = int(self.optimize)
assert 0 <= self.optimize <= 2
except (ValueError, AssertionError):
raise DistutilsOptionError, "optimize must be 0, 1, or 2"
def run (self):
# XXX copy_file by default preserves atime and mtime. IMHO this is
# the right thing to do, but perhaps it should be an option -- in
# particular, a site administrator might want installed files to
# reflect the time of installation rather than the last
# modification time before the installed release.
# XXX copy_file by default preserves mode, which appears to be the
# wrong thing to do: if a file is read-only in the working
# directory, we want it to be installed read/write so that the next
# installation of the same module distribution can overwrite it
# without problems. (This might be a Unix-specific issue.) Thus
# we turn off 'preserve_mode' when copying to the build directory,
# since the build directory is supposed to be exactly what the
# installation will look like (ie. we preserve mode when
# installing).
# Two options control which modules will be installed: 'packages'
# and 'py_modules'. The former lets us work with whole packages, not
# specifying individual modules at all; the latter is for
# specifying modules one-at-a-time.
if self.py_modules:
self.build_modules()
if self.packages:
self.build_packages()
self.build_package_data()
self.byte_compile(self.get_outputs(include_bytecode=0))
# run ()
def get_data_files (self):
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
data = []
if not self.packages:
return data
for package in self.packages:
# Locate package source directory
src_dir = self.get_package_dir(package)
# Compute package build directory
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
# Length of path to strip from found files
plen = 0
if src_dir:
plen = len(src_dir)+1
# Strip directory from globbed filenames
filenames = [
file[plen:] for file in self.find_data_files(package, src_dir)
]
data.append((package, src_dir, build_dir, filenames))
return data
def find_data_files (self, package, src_dir):
"""Return filenames for package's data files in 'src_dir'"""
globs = (self.package_data.get('', [])
+ self.package_data.get(package, []))
files = []
for pattern in globs:
# Each pattern has to be converted to a platform-specific path
filelist = glob(os.path.join(src_dir, convert_path(pattern)))
# Files that match more than one pattern are only added once
files.extend([fn for fn in filelist if fn not in files])
return files
def build_package_data (self):
"""Copy data files into build directory"""
lastdir = None
for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
self.mkpath(os.path.dirname(target))
self.copy_file(os.path.join(src_dir, filename), target,
preserve_mode=False)
def get_package_dir (self, package):
"""Return the directory, relative to the top of the source
distribution, where package 'package' should be found
(at least according to the 'package_dir' option, if any)."""
path = string.split(package, '.')
if not self.package_dir:
if path:
return apply(os.path.join, path)
else:
return ''
else:
tail = []
while path:
try:
pdir = self.package_dir[string.join(path, '.')]
except KeyError:
tail.insert(0, path[-1])
del path[-1]
else:
tail.insert(0, pdir)
return os.path.join(*tail)
else:
# Oops, got all the way through 'path' without finding a
# match in package_dir. If package_dir defines a directory
# for the root (nameless) package, then fallback on it;
# otherwise, we might as well have not consulted
# package_dir at all, as we just use the directory implied
# by 'tail' (which should be the same as the original value
# of 'path' at this point).
pdir = self.package_dir.get('')
if pdir is not None:
tail.insert(0, pdir)
if tail:
return apply(os.path.join, tail)
else:
return ''
# get_package_dir ()
def check_package (self, package, package_dir):
# Empty dir name means current directory, which we can probably
# assume exists. Also, os.path.exists and isdir don't know about
# my "empty string means current dir" convention, so we have to
# circumvent them.
if package_dir != "":
if not os.path.exists(package_dir):
raise DistutilsFileError, \
"package directory '%s' does not exist" % package_dir
if not os.path.isdir(package_dir):
raise DistutilsFileError, \
("supposed package directory '%s' exists, " +
"but is not a directory") % package_dir
# Require __init__.py for all but the "root package"
if package:
init_py = os.path.join(package_dir, "__init__.py")
if os.path.isfile(init_py):
return init_py
else:
log.warn(("package init file '%s' not found " +
"(or not a regular file)"), init_py)
# Either not in a package at all (__init__.py not expected), or
# __init__.py doesn't exist -- so don't return the filename.
return None
# check_package ()
def check_module (self, module, module_file):
if not os.path.isfile(module_file):
log.warn("file %s (for module %s) not found", module_file, module)
return 0
else:
return 1
# check_module ()
def find_package_modules (self, package, package_dir):
self.check_package(package, package_dir)
module_files = glob(os.path.join(package_dir, "*.py"))
modules = []
setup_script = os.path.abspath(self.distribution.script_name)
for f in module_files:
abs_f = os.path.abspath(f)
if abs_f != setup_script:
module = os.path.splitext(os.path.basename(f))[0]
modules.append((package, module, f))
else:
self.debug_print("excluding %s" % setup_script)
return modules
def find_modules (self):
"""Finds individually-specified Python modules, ie. those listed by
module name in 'self.py_modules'. Returns a list of tuples (package,
module_base, filename): 'package' is a tuple of the path through
package-space to the module; 'module_base' is the bare (no
packages, no dots) module name, and 'filename' is the path to the
".py" file (relative to the distribution root) that implements the
module.
"""
# Map package names to tuples of useful info about the package:
# (package_dir, checked)
# package_dir - the directory where we'll find source files for
# this package
# checked - true if we have checked that the package directory
# is valid (exists, contains __init__.py, ... ?)
packages = {}
# List of (package, module, filename) tuples to return
modules = []
# We treat modules-in-packages almost the same as toplevel modules,
# just the "package" for a toplevel is empty (either an empty
# string or empty list, depending on context). Differences:
# - don't check for __init__.py in directory for empty package
for module in self.py_modules:
path = string.split(module, '.')
package = string.join(path[0:-1], '.')
module_base = path[-1]
try:
(package_dir, checked) = packages[package]
except KeyError:
package_dir = self.get_package_dir(package)
checked = 0
if not checked:
init_py = self.check_package(package, package_dir)
packages[package] = (package_dir, 1)
if init_py:
modules.append((package, "__init__", init_py))
# XXX perhaps we should also check for just .pyc files
# (so greedy closed-source bastards can distribute Python
# modules too)
module_file = os.path.join(package_dir, module_base + ".py")
if not self.check_module(module, module_file):
continue
modules.append((package, module_base, module_file))
return modules
# find_modules ()
def find_all_modules (self):
"""Compute the list of all modules that will be built, whether
they are specified one-module-at-a-time ('self.py_modules') or
by whole packages ('self.packages'). Return a list of tuples
(package, module, module_file), just like 'find_modules()' and
'find_package_modules()' do."""
modules = []
if self.py_modules:
modules.extend(self.find_modules())
if self.packages:
for package in self.packages:
package_dir = self.get_package_dir(package)
m = self.find_package_modules(package, package_dir)
modules.extend(m)
return modules
# find_all_modules ()
def get_source_files (self):
modules = self.find_all_modules()
filenames = []
for module in modules:
filenames.append(module[-1])
return filenames
def get_module_outfile (self, build_dir, package, module):
outfile_path = [build_dir] + list(package) + [module + ".py"]
return os.path.join(*outfile_path)
def get_outputs (self, include_bytecode=1):
modules = self.find_all_modules()
outputs = []
for (package, module, module_file) in modules:
package = string.split(package, '.')
filename = self.get_module_outfile(self.build_lib, package, module)
outputs.append(filename)
if include_bytecode:
if self.compile:
outputs.append(filename + "c")
if self.optimize > 0:
outputs.append(filename + "o")
outputs += [
os.path.join(build_dir, filename)
for package, src_dir, build_dir, filenames in self.data_files
for filename in filenames
]
return outputs
def build_module (self, module, module_file, package):
if type(package) is StringType:
package = string.split(package, '.')
elif type(package) not in (ListType, TupleType):
raise TypeError, \
"'package' must be a string (dot-separated), list, or tuple"
# Now put the module source file into the "build" area -- this is
# easy, we just copy it somewhere under self.build_lib (the build
# directory for Python source).
outfile = self.get_module_outfile(self.build_lib, package, module)
dir = os.path.dirname(outfile)
self.mkpath(dir)
return self.copy_file(module_file, outfile, preserve_mode=0)
def build_modules (self):
modules = self.find_modules()
for (package, module, module_file) in modules:
# Now "build" the module -- ie. copy the source file to
# self.build_lib (the build directory for Python source).
# (Actually, it gets copied to the directory for this package
# under self.build_lib.)
self.build_module(module, module_file, package)
# build_modules ()
def build_packages (self):
for package in self.packages:
# Get list of (package, module, module_file) tuples based on
# scanning the package directory. 'package' is only included
# in the tuple so that 'find_modules()' and
# 'find_package_tuples()' have a consistent interface; it's
# ignored here (apart from a sanity check). Also, 'module' is
# the *unqualified* module name (ie. no dots, no package -- we
# already know its package!), and 'module_file' is the path to
# the .py file, relative to the current directory
# (ie. including 'package_dir').
package_dir = self.get_package_dir(package)
modules = self.find_package_modules(package, package_dir)
# Now loop over the modules we found, "building" each one (just
# copy it to self.build_lib).
for (package_, module, module_file) in modules:
assert package == package_
self.build_module(module, module_file, package)
# build_packages ()
def byte_compile (self, files):
from distutils.util import byte_compile
prefix = self.build_lib
if prefix[-1] != os.sep:
prefix = prefix + os.sep
# XXX this code is essentially the same as the 'byte_compile()
# method of the "install_lib" command, except for the determination
# of the 'prefix' string. Hmmm.
if self.compile:
byte_compile(files, optimize=0,
force=self.force, prefix=prefix, dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force, prefix=prefix, dry_run=self.dry_run)
# class build_py
|
readbeyond/aeneas
|
refs/heads/master
|
aeneas/tests/test_runtimeconfiguration.py
|
5
|
#!/usr/bin/env python
# coding=utf-8
# aeneas is a Python/C library and a set of tools
# to automagically synchronize audio and text (aka forced alignment)
#
# Copyright (C) 2012-2013, Alberto Pettarin (www.albertopettarin.it)
# Copyright (C) 2013-2015, ReadBeyond Srl (www.readbeyond.it)
# Copyright (C) 2015-2017, Alberto Pettarin (www.albertopettarin.it)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from aeneas.exacttiming import TimeValue
from aeneas.runtimeconfiguration import RuntimeConfiguration
class TestRuntimeConfiguration(unittest.TestCase):
def test_config_string(self):
rconf = RuntimeConfiguration()
rconf.config_string
def test_safety_checks(self):
rconf = RuntimeConfiguration()
self.assertEqual(rconf.safety_checks, True)
def test_sample_rate(self):
rconf = RuntimeConfiguration()
self.assertEqual(rconf.sample_rate, 16000)
def test_dtw_margin(self):
rconf = RuntimeConfiguration()
self.assertEqual(rconf.dtw_margin, TimeValue("60.000"))
def test_mmn(self):
rconf = RuntimeConfiguration()
self.assertEqual(rconf.mmn, False)
def test_mws(self):
rconf = RuntimeConfiguration()
self.assertEqual(rconf.mws, TimeValue("0.040"))
def test_mwl(self):
rconf = RuntimeConfiguration()
self.assertEqual(rconf.mwl, TimeValue("0.100"))
def test_tts(self):
rconf = RuntimeConfiguration()
self.assertEqual(rconf.tts, "espeak")
def test_tts_path(self):
rconf = RuntimeConfiguration()
self.assertEqual(rconf.tts_path, None)
def test_set_granularity(self):
rconf = RuntimeConfiguration()
rconf.set_granularity(level=1)
self.assertEqual(rconf.mmn, False)
self.assertEqual(rconf.mwl, TimeValue("0.100"))
self.assertEqual(rconf.mws, TimeValue("0.040"))
rconf.set_granularity(level=2)
self.assertEqual(rconf.mmn, False)
self.assertEqual(rconf.mwl, TimeValue("0.050"))
self.assertEqual(rconf.mws, TimeValue("0.020"))
rconf.set_granularity(level=3)
self.assertEqual(rconf.mmn, False)
self.assertEqual(rconf.mwl, TimeValue("0.020"))
self.assertEqual(rconf.mws, TimeValue("0.005"))
def test_set_tts(self):
rconf = RuntimeConfiguration()
rconf.set_tts(level=1)
self.assertEqual(rconf.tts, "espeak")
self.assertEqual(rconf.tts_path, None)
rconf.set_tts(level=2)
self.assertEqual(rconf.tts, "espeak")
self.assertEqual(rconf.tts_path, None)
rconf.set_tts(level=3)
self.assertEqual(rconf.tts, "espeak")
self.assertEqual(rconf.tts_path, None)
def test_clone(self):
rconf = RuntimeConfiguration()
rconf2 = rconf.clone()
self.assertNotEqual(id(rconf), id(rconf2))
self.assertEqual(rconf.config_string, rconf2.config_string)
def test_set_rconf_string(self):
params = [
(u"aba_nonspeech_tolerance=0.040", "aba_nonspeech_tolerance", TimeValue("0.040")),
(u"aba_no_zero_duration=0.040", "aba_no_zero_duration", TimeValue("0.040")),
(u"allow_unlisted_languages=True", "allow_unlisted_languages", True),
(u"c_extensions=False", "c_extensions", False),
(u"cdtw=False", "cdtw", False),
(u"cew=False", "cew", False),
(u"cmfcc=False", "cmfcc", False),
(u"cew_subprocess_enabled=True", "cew_subprocess_enabled", True),
(u"cew_subprocess_path=/foo/bar/python", "cew_subprocess_path", "/foo/bar/python"),
(u"downloader_sleep=5.000", "downloader_sleep", TimeValue("5.000")),
(u"downloader_retry_attempts=5", "downloader_retry_attempts", 5),
(u"dtw_algorithm=exact", "dtw_algorithm", "exact"),
(u"dtw_margin=100", "dtw_margin", TimeValue("100")),
(u"ffmpeg_path=/foo/bar/ffmpeg", "ffmpeg_path", "/foo/bar/ffmpeg"),
(u"ffmpeg_sample_rate=8000", "ffmpeg_sample_rate", 8000),
(u"ffprobe_path=/foo/bar/ffprobe", "ffprobe_path", "/foo/bar/ffprobe"),
(u"job_max_tasks=10", "job_max_tasks", 10),
(u"mfcc_filters=100", "mfcc_filters", 100),
(u"mfcc_size=20", "mfcc_size", 20),
(u"mfcc_fft_order=256", "mfcc_fft_order", 256),
(u"mfcc_lower_frequency=120.0", "mfcc_lower_frequency", 120.0),
(u"mfcc_upper_frequency=5000.0", "mfcc_upper_frequency", 5000.0),
(u"mfcc_emphasis_factor=1.0", "mfcc_emphasis_factor", 1.0),
(u"mfcc_mask_nonspeech=True", "mfcc_mask_nonspeech", True),
(u"mfcc_window_length=0.360", "mfcc_window_length", TimeValue("0.360")),
(u"mfcc_window_shift=0.160", "mfcc_window_shift", TimeValue("0.160")),
(u"dtw_margin_l1=100", "dtw_margin_l1", TimeValue("100")),
(u"mfcc_mask_nonspeech_l1=True", "mfcc_mask_nonspeech_l1", True),
(u"mfcc_window_length_l1=0.360", "mfcc_window_length_l1", TimeValue("0.360")),
(u"mfcc_window_shift_l1=0.160", "mfcc_window_shift_l1", TimeValue("0.160")),
(u"dtw_margin_l2=30", "dtw_margin_l2", TimeValue("30")),
(u"mfcc_mask_nonspeech_l2=True", "mfcc_mask_nonspeech_l2", True),
(u"mfcc_window_length_l2=0.360", "mfcc_window_length_l2", TimeValue("0.360")),
(u"mfcc_window_shift_l2=0.160", "mfcc_window_shift_l2", TimeValue("0.160")),
(u"dtw_margin_l3=10", "dtw_margin_l3", TimeValue("10")),
(u"mfcc_mask_nonspeech_l3=True", "mfcc_mask_nonspeech_l3", True),
(u"mfcc_window_length_l3=0.360", "mfcc_window_length_l3", TimeValue("0.360")),
(u"mfcc_window_shift_l3=0.160", "mfcc_window_shift_l3", TimeValue("0.160")),
(u"mfcc_mask_extend_speech_after=1", "mfcc_mask_extend_speech_after", 1),
(u"mfcc_mask_extend_speech_before=1", "mfcc_mask_extend_speech_before", 1),
(u"mfcc_mask_log_energy_threshold=0.750", "mfcc_mask_log_energy_threshold", 0.750),
(u"mfcc_mask_min_nonspeech_length=5", "mfcc_mask_min_nonspeech_length", 5),
(u"nuance_tts_api_id=foo", "nuance_tts_api_id", "foo"),
(u"nuance_tts_api_key=bar", "nuance_tts_api_key", "bar"),
(u"safety_checks=False", "safety_checks", False),
(u"task_max_audio_length=1000", "task_max_audio_length", TimeValue("1000")),
(u"task_max_text_length=1000", "task_max_text_length", 1000),
(u"tmp_path=/foo/bar", "tmp_path", "/foo/bar"),
(u"tts=festival", "tts", "festival"),
(u"tts_path=/foo/bar/festival", "tts_path", "/foo/bar/festival"),
(u"tts_api_sleep=5.000", "tts_api_sleep", TimeValue("5.000")),
(u"tts_api_retry_attempts=3", "tts_api_retry_attempts", 3),
(u"tts_voice_code=ru", "tts_voice_code", "ru"),
(u"tts_cache=True", "tts_cache", True),
(u"tts_l1=festival", "tts_l1", "festival"),
(u"tts_path_l1=/foo/bar/festival", "tts_path_l1", "/foo/bar/festival"),
(u"tts_l2=festival", "tts_l2", "festival"),
(u"tts_path_l2=/foo/bar/festival", "tts_path_l2", "/foo/bar/festival"),
(u"tts_l3=festival", "tts_l3", "festival"),
(u"tts_path_l3=/foo/bar/festival", "tts_path_l3", "/foo/bar/festival"),
(u"vad_extend_speech_after=1.000", "vad_extend_speech_after", TimeValue("1.000")),
(u"vad_extend_speech_before=1.000", "vad_extend_speech_before", TimeValue("1.000")),
(u"vad_log_energy_threshold=0.750", "vad_log_energy_threshold", 0.750),
(u"vad_min_nonspeech_length=0.500", "vad_min_nonspeech_length", TimeValue("0.500")),
]
for string, key, value in params:
rconf = RuntimeConfiguration(string)
self.assertEqual(rconf[key], value)
if __name__ == "__main__":
unittest.main()
|
mathom/kombu
|
refs/heads/master
|
kombu/messaging.py
|
1
|
"""
kombu.messaging
===============
Sending and receiving messages.
"""
from __future__ import absolute_import
from itertools import count
from .compression import compress
from .connection import maybe_channel, is_connection
from .entity import Exchange, Queue, DELIVERY_MODES
from .five import int_types, text_t, values
from .serialization import encode
from .utils import ChannelPromise, maybe_list
__all__ = ['Exchange', 'Queue', 'Producer', 'Consumer']
# XXX compat attribute
entry_to_queue = Queue.from_dict
class Producer(object):
"""Message Producer.
:param channel: Connection or channel.
:keyword exchange: Optional default exchange.
:keyword routing_key: Optional default routing key.
:keyword serializer: Default serializer. Default is `"json"`.
:keyword compression: Default compression method. Default is no
compression.
:keyword auto_declare: Automatically declare the default exchange
at instantiation. Default is :const:`True`.
:keyword on_return: Callback to call for undeliverable messages,
when the `mandatory` or `immediate` arguments to
:meth:`publish` is used. This callback needs the following
signature: `(exception, exchange, routing_key, message)`.
Note that the producer needs to drain events to use this feature.
"""
#: Default exchange
exchange = None
#: Default routing key.
routing_key = ''
#: Default serializer to use. Default is JSON.
serializer = None
#: Default compression method. Disabled by default.
compression = None
#: By default the exchange is declared at instantiation.
#: If you want to declare manually then you can set this
#: to :const:`False`.
auto_declare = True
#: Basic return callback.
on_return = None
#: Set if channel argument was a Connection instance (using
#: default_channel).
__connection__ = None
def __init__(self, channel, exchange=None, routing_key=None,
serializer=None, auto_declare=None, compression=None,
on_return=None):
self._channel = channel
self.exchange = exchange
self.routing_key = routing_key or self.routing_key
self.serializer = serializer or self.serializer
self.compression = compression or self.compression
self.on_return = on_return or self.on_return
self._channel_promise = None
if self.exchange is None:
self.exchange = Exchange('')
if auto_declare is not None:
self.auto_declare = auto_declare
if self._channel:
self.revive(self._channel)
def __repr__(self):
return '<Producer: {0.channel}>'.format(self)
def __reduce__(self):
return self.__class__, self.__reduce_args__()
def __reduce_args__(self):
return (None, self.exchange, self.routing_key, self.serializer,
self.auto_declare, self.compression)
def declare(self):
"""Declare the exchange.
This happens automatically at instantiation if
:attr:`auto_declare` is enabled.
"""
if self.exchange.name:
self.exchange.declare()
def maybe_declare(self, entity, retry=False, **retry_policy):
"""Declare the exchange if it hasn't already been declared
during this session."""
if entity:
from .common import maybe_declare
return maybe_declare(entity, self.channel, retry, **retry_policy)
def publish(self, body, routing_key=None, delivery_mode=None,
mandatory=False, immediate=False, priority=0,
content_type=None, content_encoding=None, serializer=None,
headers=None, compression=None, exchange=None, retry=False,
retry_policy=None, declare=[], **properties):
"""Publish message to the specified exchange.
:param body: Message body.
:keyword routing_key: Message routing key.
:keyword delivery_mode: See :attr:`delivery_mode`.
:keyword mandatory: Currently not supported.
:keyword immediate: Currently not supported.
:keyword priority: Message priority. A number between 0 and 9.
:keyword content_type: Content type. Default is auto-detect.
:keyword content_encoding: Content encoding. Default is auto-detect.
:keyword serializer: Serializer to use. Default is auto-detect.
:keyword compression: Compression method to use. Default is none.
:keyword headers: Mapping of arbitrary headers to pass along
with the message body.
:keyword exchange: Override the exchange. Note that this exchange
must have been declared.
:keyword declare: Optional list of required entities that must
have been declared before publishing the message. The entities
will be declared using :func:`~kombu.common.maybe_declare`.
:keyword retry: Retry publishing, or declaring entities if the
connection is lost.
:keyword retry_policy: Retry configuration, this is the keywords
supported by :meth:`~kombu.Connection.ensure`.
:keyword \*\*properties: Additional message properties, see AMQP spec.
"""
headers = {} if headers is None else headers
retry_policy = {} if retry_policy is None else retry_policy
routing_key = self.routing_key if routing_key is None else routing_key
compression = self.compression if compression is None else compression
exchange = exchange or self.exchange
if isinstance(exchange, Exchange):
delivery_mode = delivery_mode or exchange.delivery_mode
exchange = exchange.name
else:
delivery_mode = delivery_mode or self.exchange.delivery_mode
if not isinstance(delivery_mode, int_types):
delivery_mode = DELIVERY_MODES[delivery_mode]
properties['delivery_mode'] = delivery_mode
body, content_type, content_encoding = self._prepare(
body, serializer, content_type, content_encoding,
compression, headers)
publish = self._publish
if retry:
publish = self.connection.ensure(self, publish, **retry_policy)
return publish(body, priority, content_type,
content_encoding, headers, properties,
routing_key, mandatory, immediate, exchange, declare)
def _publish(self, body, priority, content_type, content_encoding,
headers, properties, routing_key, mandatory,
immediate, exchange, declare):
channel = self.channel
message = channel.prepare_message(
body, priority, content_type,
content_encoding, headers, properties,
)
if declare:
maybe_declare = self.maybe_declare
[maybe_declare(entity) for entity in declare]
return channel.basic_publish(
message,
exchange=exchange, routing_key=routing_key,
mandatory=mandatory, immediate=immediate,
)
def _get_channel(self):
channel = self._channel
if isinstance(channel, ChannelPromise):
channel = self._channel = channel()
self.exchange.revive(channel)
if self.on_return:
channel.events['basic_return'].add(self.on_return)
return channel
def _set_channel(self, channel):
self._channel = channel
channel = property(_get_channel, _set_channel)
def revive(self, channel):
"""Revive the producer after connection loss."""
if is_connection(channel):
connection = channel
self.__connection__ = connection
channel = ChannelPromise(lambda: connection.default_channel)
if isinstance(channel, ChannelPromise):
self._channel = channel
self.exchange = self.exchange(channel)
else:
# Channel already concrete
self._channel = channel
if self.on_return:
self._channel.events['basic_return'].add(self.on_return)
self.exchange = self.exchange(channel)
if self.auto_declare:
# auto_decare is not recommended as this will force
# evaluation of the channel.
self.declare()
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.release()
def release(self):
pass
close = release
def _prepare(self, body, serializer=None, content_type=None,
content_encoding=None, compression=None, headers=None):
# No content_type? Then we're serializing the data internally.
if not content_type:
serializer = serializer or self.serializer
(content_type, content_encoding,
body) = encode(body, serializer=serializer)
else:
# If the programmer doesn't want us to serialize,
# make sure content_encoding is set.
if isinstance(body, text_t):
if not content_encoding:
content_encoding = 'utf-8'
body = body.encode(content_encoding)
# If they passed in a string, we can't know anything
# about it. So assume it's binary data.
elif not content_encoding:
content_encoding = 'binary'
if compression:
body, headers['compression'] = compress(body, compression)
return body, content_type, content_encoding
@property
def connection(self):
try:
return self.__connection__ or self.channel.connection.client
except AttributeError:
pass
class Consumer(object):
"""Message consumer.
:param channel: see :attr:`channel`.
:param queues: see :attr:`queues`.
:keyword no_ack: see :attr:`no_ack`.
:keyword auto_declare: see :attr:`auto_declare`
:keyword callbacks: see :attr:`callbacks`.
:keyword on_message: See :attr:`on_message`
:keyword on_decode_error: see :attr:`on_decode_error`.
"""
#: The connection/channel to use for this consumer.
channel = None
#: A single :class:`~kombu.Queue`, or a list of queues to
#: consume from.
queues = None
#: Flag for message acknowledgment disabled/enabled.
#: Enabled by default.
no_ack = None
#: By default all entities will be declared at instantiation, if you
#: want to handle this manually you can set this to :const:`False`.
auto_declare = True
#: List of callbacks called in order when a message is received.
#:
#: The signature of the callbacks must take two arguments:
#: `(body, message)`, which is the decoded message body and
#: the `Message` instance (a subclass of
#: :class:`~kombu.transport.base.Message`).
callbacks = None
#: Optional function called whenever a message is received.
#:
#: When defined this function will be called instead of the
#: :meth:`receive` method, and :attr:`callbacks` will be disabled.
#:
#: So this can be used as an alternative to :attr:`callbacks` when
#: you don't want the body to be automatically decoded.
#: Note that the message will still be decompressed if the message
#: has the ``compression`` header set.
#:
#: The signature of the callback must take a single argument,
#: which is the raw message object (a subclass of
#: :class:`~kombu.transport.base.Message`).
#:
#: Also note that the ``message.body`` attribute, which is the raw
#: contents of the message body, may in some cases be a read-only
#: :class:`buffer` object.
on_message = None
#: Callback called when a message can't be decoded.
#:
#: The signature of the callback must take two arguments: `(message,
#: exc)`, which is the message that can't be decoded and the exception
#: that occurred while trying to decode it.
on_decode_error = None
_tags = count(1) # global
def __init__(self, channel, queues=None, no_ack=None, auto_declare=None,
callbacks=None, on_decode_error=None, on_message=None):
self.channel = channel
self.queues = self.queues or [] if queues is None else queues
self.no_ack = self.no_ack if no_ack is None else no_ack
self.callbacks = (self.callbacks or [] if callbacks is None
else callbacks)
self.on_message = on_message
self._active_tags = {}
if auto_declare is not None:
self.auto_declare = auto_declare
if on_decode_error is not None:
self.on_decode_error = on_decode_error
if self.channel:
self.revive(self.channel)
def revive(self, channel):
"""Revive consumer after connection loss."""
self._active_tags.clear()
channel = self.channel = maybe_channel(channel)
self.queues = [queue(self.channel)
for queue in maybe_list(self.queues)]
for queue in self.queues:
queue.revive(channel)
if self.auto_declare:
self.declare()
def declare(self):
"""Declare queues, exchanges and bindings.
This is done automatically at instantiation if :attr:`auto_declare`
is set.
"""
for queue in self.queues:
queue.declare()
def register_callback(self, callback):
"""Register a new callback to be called when a message
is received.
The signature of the callback needs to accept two arguments:
`(body, message)`, which is the decoded message body
and the `Message` instance (a subclass of
:class:`~kombu.transport.base.Message`.
"""
self.callbacks.append(callback)
def __enter__(self):
self.consume()
return self
def __exit__(self, *exc_info):
try:
self.cancel()
except Exception:
pass
def add_queue(self, queue):
queue = queue(self.channel)
if self.auto_declare:
queue.declare()
self.queues.append(queue)
return queue
def add_queue_from_dict(self, queue, **options):
return self.add_queue(Queue.from_dict(queue, **options))
def consume(self, no_ack=None):
if self.queues:
no_ack = self.no_ack if no_ack is None else no_ack
H, T = self.queues[:-1], self.queues[-1]
for queue in H:
self._basic_consume(queue, no_ack=no_ack, nowait=True)
self._basic_consume(T, no_ack=no_ack, nowait=False)
def cancel(self):
"""End all active queue consumers.
This does not affect already delivered messages, but it does
mean the server will not send any more messages for this consumer.
"""
cancel = self.channel.basic_cancel
for tag in values(self._active_tags):
cancel(tag)
self._active_tags.clear()
close = cancel
def cancel_by_queue(self, queue):
"""Cancel consumer by queue name."""
try:
tag = self._active_tags.pop(queue)
except KeyError:
pass
else:
self.queues[:] = [q for q in self.queues if q.name != queue]
self.channel.basic_cancel(tag)
def consuming_from(self, queue):
name = queue
if isinstance(queue, Queue):
name = queue.name
return any(q.name == name for q in self.queues)
def purge(self):
"""Purge messages from all queues.
.. warning::
This will *delete all ready messages*, there is no
undo operation.
"""
return sum(queue.purge() for queue in self.queues)
def flow(self, active):
"""Enable/disable flow from peer.
This is a simple flow-control mechanism that a peer can use
to avoid overflowing its queues or otherwise finding itself
receiving more messages than it can process.
The peer that receives a request to stop sending content
will finish sending the current content (if any), and then wait
until flow is reactivated.
"""
self.channel.flow(active)
def qos(self, prefetch_size=0, prefetch_count=0, apply_global=False):
"""Specify quality of service.
The client can request that messages should be sent in
advance so that when the client finishes processing a message,
the following message is already held locally, rather than needing
to be sent down the channel. Prefetching gives a performance
improvement.
The prefetch window is Ignored if the :attr:`no_ack` option is set.
:param prefetch_size: Specify the prefetch window in octets.
The server will send a message in advance if it is equal to
or smaller in size than the available prefetch size (and
also falls within other prefetch limits). May be set to zero,
meaning "no specific limit", although other prefetch limits
may still apply.
:param prefetch_count: Specify the prefetch window in terms of
whole messages.
:param apply_global: Apply new settings globally on all channels.
Currently not supported by RabbitMQ.
"""
return self.channel.basic_qos(prefetch_size,
prefetch_count,
apply_global)
def recover(self, requeue=False):
"""Redeliver unacknowledged messages.
Asks the broker to redeliver all unacknowledged messages
on the specified channel.
:keyword requeue: By default the messages will be redelivered
to the original recipient. With `requeue` set to true, the
server will attempt to requeue the message, potentially then
delivering it to an alternative subscriber.
"""
return self.channel.basic_recover(requeue=requeue)
def receive(self, body, message):
"""Method called when a message is received.
This dispatches to the registered :attr:`callbacks`.
:param body: The decoded message body.
:param message: The `Message` instance.
:raises NotImplementedError: If no consumer callbacks have been
registered.
"""
callbacks = self.callbacks
if not callbacks:
raise NotImplementedError('Consumer does not have any callbacks')
[callback(body, message) for callback in callbacks]
def _basic_consume(self, queue, consumer_tag=None,
no_ack=no_ack, nowait=True):
tag = self._active_tags.get(queue.name)
if tag is None:
tag = self._add_tag(queue, consumer_tag)
queue.consume(tag, self._receive_callback,
no_ack=no_ack, nowait=nowait)
return tag
def _add_tag(self, queue, consumer_tag=None):
tag = consumer_tag or str(next(self._tags))
self._active_tags[queue.name] = tag
return tag
def _receive_callback(self, message):
on_m, channel, decoded = self.on_message, self.channel, None
try:
m2p = getattr(channel, 'message_to_python', None)
if m2p:
message = m2p(message)
decoded = None if on_m else message.decode()
except Exception as exc:
if not self.on_decode_error:
raise
self.on_decode_error(message, exc)
else:
return on_m(message) if on_m else self.receive(decoded, message)
def __repr__(self):
return '<Consumer: {0.queues}>'.format(self)
@property
def connection(self):
try:
return self.channel.connection.client
except AttributeError:
pass
|
grammy3/rewards
|
refs/heads/master
|
share/qt/extract_strings_qt.py
|
1294
|
#!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {')
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
|
allotria/intellij-community
|
refs/heads/master
|
python/testData/quickFixes/PyMoveAttributeToInitQuickFixTest/skipDocstring_after.py
|
79
|
__author__ = 'ktisha'
class A:
"""
class docstring
"""
def __init__(self):
self.b = 1
def foo(self):
pass
|
hs29590/pi_electroplating
|
refs/heads/master
|
pydobot/message.py
|
1
|
class Message:
def __init__(self, b=None):
if b is None:
self.header = bytes([0xAA, 0xAA])
self.len = 0x00
self.ctrl = 0x00
self.params = bytes([])
self.checksum = None
else:
self.header = b[0:2]
self.len = b[2]
self.id = b[3]
self.ctrl = b[4]
self.params = b[5:-1]
self.checksum = b[-1:][0]
def __repr__(self):
return "Message()"
def __str__(self):
self.refresh()
ret = "%s:%d:%d:%d:%s:%s" % (self.header.hex(), self.len, self.id, self.ctrl, self.params.hex(), self.checksum)
return ret.upper()
def refresh(self):
if self.checksum is None:
self.checksum = self.id + self.ctrl
for i in range(len(self.params)):
self.checksum += self.params[i]
self.checksum = self.checksum % 256
self.checksum = 2 ** 8 - self.checksum
self.checksum = self.checksum % 256
self.len = 0x02 + len(self.params)
def bytes(self):
self.refresh()
if len(self.params) > 0:
command = bytearray([0xAA, 0xAA, self.len, self.id, self.ctrl])
command.extend(self.params)
command.append(self.checksum)
else:
command = bytes([0xAA, 0xAA, self.len, self.id, self.ctrl, self.checksum])
return command
|
ilstreltsov/django-db-mailer
|
refs/heads/master
|
dbmail/migrations/0007_auto_20150708_2016.py
|
2
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('dbmail', '0006_auto_20150708_0714'),
]
operations = [
migrations.AlterModelOptions(
name='mailsubscription',
options={'verbose_name': 'Mail Subscription', 'verbose_name_plural': 'Mail Subscriptions'},
),
migrations.AlterField(
model_name='mailsubscription',
name='address',
field=models.CharField(help_text='Must be phone number/email/token', unique=True, max_length=60, verbose_name='Address'),
),
migrations.AlterField(
model_name='mailsubscription',
name='backend',
field=models.CharField(default=b'dbmail.backends.mail', max_length=50, verbose_name='Backend', choices=[(b'dbmail.backends.mail', 'MailBox'), (b'dbmail.backends.push', 'Push'), (b'dbmail.backends.sms', 'SMS'), (b'dbmail.backends.tts', 'TTS')]),
),
migrations.AlterField(
model_name='mailsubscription',
name='defer_at_allowed_hours',
field=models.BooleanField(default=False, verbose_name='Defer at allowed hours'),
),
migrations.AlterField(
model_name='mailsubscription',
name='end_hour',
field=models.CharField(default=b'23:59', max_length=5, verbose_name='End hour'),
),
migrations.AlterField(
model_name='mailsubscription',
name='is_checked',
field=models.BooleanField(default=False, db_index=True, verbose_name='Is checked'),
),
migrations.AlterField(
model_name='mailsubscription',
name='is_enabled',
field=models.BooleanField(default=True, db_index=True, verbose_name='Is enabled'),
),
migrations.AlterField(
model_name='mailsubscription',
name='start_hour',
field=models.CharField(default=b'00:00', max_length=5, verbose_name='Start hour'),
),
]
|
Maronato/aosalunos
|
refs/heads/master
|
misago/apps/usercp/signature/views.py
|
3
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.utils.translation import ugettext as _
from misago import messages
from misago.apps.errors import error403, error404
from misago.decorators import block_guest
from misago.markdown import signature_markdown
from misago.messages import Message
from misago.shortcuts import render_to_response
from misago.apps.usercp.template import RequestContext
from misago.apps.usercp.signature.forms import SignatureForm
@block_guest
def signature(request):
# Intercept all requests if we can't use signature
if not request.acl.usercp.can_use_signature():
return error403(request)
if request.user.signature_ban:
return render_to_response('usercp/signature_banned.html',
context_instance=RequestContext(request, {
'tab': 'signature'}));
siggy_text = ''
message = request.messages.get_message('usercp_signature')
if request.method == 'POST':
form = SignatureForm(request.POST, request=request, initial={'signature': request.user.signature})
if form.is_valid():
request.user.signature = form.cleaned_data['signature']
if request.user.signature:
request.user.signature_preparsed = signature_markdown(request.acl,
request.user.signature)
else:
request.user.signature_preparsed = None
request.user.save(force_update=True)
messages.success(request, _("Your signature has been changed."), 'usercp_signature')
return redirect(reverse('usercp_signature'))
else:
message = Message(form.non_field_errors()[0], messages.ERROR)
else:
form = SignatureForm(request=request, initial={'signature': request.user.signature})
return render_to_response('usercp/signature.html',
context_instance=RequestContext(request, {
'message': message,
'tab': 'signature',
'form': form}));
|
iphoting/healthchecks
|
refs/heads/heroku
|
hc/front/tests/test_details.py
|
2
|
from datetime import datetime, timedelta as td
from unittest.mock import patch
from django.utils import timezone
from hc.api.models import Flip, Check, Ping
from hc.test import BaseTestCase
class DetailsTestCase(BaseTestCase):
def setUp(self):
super().setUp()
self.check = Check.objects.create(project=self.project)
ping = Ping.objects.create(owner=self.check)
# Older MySQL versions don't store microseconds. This makes sure
# the ping is older than any notifications we may create later:
ping.created = "2000-01-01T00:00:00+00:00"
ping.save()
self.url = "/checks/%s/details/" % self.check.code
def test_it_works(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "How To Ping", status_code=200)
# The page should contain timezone strings
self.assertContains(r, "Europe/Riga")
def test_it_checks_ownership(self):
self.client.login(username="charlie@example.org", password="password")
r = self.client.get(self.url)
self.assertEqual(r.status_code, 404)
def test_it_shows_cron_expression(self):
self.check.kind = "cron"
self.check.save()
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "Cron Expression", status_code=200)
def test_it_allows_cross_team_access(self):
self.client.login(username="bob@example.org", password="password")
r = self.client.get(self.url)
self.assertEqual(r.status_code, 200)
def test_it_shows_new_check_notice(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url + "?new")
self.assertContains(r, "Your new check is ready!", status_code=200)
def test_it_hides_actions_from_readonly_users(self):
self.bobs_membership.rw = False
self.bobs_membership.save()
self.client.login(username="bob@example.org", password="password")
r = self.client.get(self.url)
self.assertNotContains(r, "edit-name", status_code=200)
self.assertNotContains(r, "edit-desc")
self.assertNotContains(r, "Filtering Rules")
self.assertNotContains(r, "pause-btn")
self.assertNotContains(r, "Change Schedule")
self.assertNotContains(r, "Create a Copy…")
self.assertNotContains(r, "transfer-btn")
self.assertNotContains(r, "details-remove-check")
def test_it_hides_resume_action_from_readonly_users(self):
self.bobs_membership.rw = False
self.bobs_membership.save()
self.check.status = "paused"
self.check.manual_resume = True
self.check.save()
self.client.login(username="bob@example.org", password="password")
r = self.client.get(self.url)
self.assertNotContains(r, "resume-btn", status_code=200)
def test_crontab_example_guesses_schedules(self):
self.client.login(username="alice@example.org", password="password")
pairs = [
(td(minutes=1), "* * * * *"),
(td(minutes=12), "*/12 * * * *"),
(td(hours=1), "0 * * * *"),
(td(hours=6), "0 */6 * * *"),
(td(days=1), "0 0 * * *"),
]
for timeout, expression in pairs:
self.check.timeout = timeout
self.check.save()
r = self.client.get(self.url)
self.assertContains(r, f"{expression} /your/command.sh")
self.assertNotContains(r, 'FIXME: replace "* * * * *"')
def test_crontab_example_handles_unsupported_timeout_values(self):
self.client.login(username="alice@example.org", password="password")
self.check.timeout = td(minutes=13)
self.check.save()
r = self.client.get(self.url)
self.assertContains(r, f"* * * * * /your/command.sh")
self.assertContains(r, 'FIXME: replace "* * * * *"')
@patch("hc.lib.date.timezone.now")
def test_it_calculates_downtime_summary(self, mock_now):
mock_now.return_value = datetime(2020, 2, 1, tzinfo=timezone.utc)
self.check.created = datetime(2019, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
self.check.save()
# going down on Jan 15, at 12:00
f1 = Flip(owner=self.check)
f1.created = datetime(2020, 1, 15, 12, 0, 0, tzinfo=timezone.utc)
f1.old_status = "up"
f1.new_status = "down"
f1.save()
# back up on Jan 15, at 13:00
f2 = Flip(owner=self.check)
f2.created = datetime(2020, 1, 15, 13, 0, 0, tzinfo=timezone.utc)
f2.old_status = "down"
f2.new_status = "up"
f2.save()
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "Feb. 2020")
self.assertContains(r, "Jan. 2020")
self.assertContains(r, "Dec. 2019")
# The summary for Jan. 2020 should be "1 downtime, 1 hour total"
self.assertContains(r, "1 downtime, 1 hour total", html=True)
@patch("hc.lib.date.timezone.now")
def test_it_handles_months_when_check_did_not_exist(self, mock_now):
mock_now.return_value = datetime(2020, 2, 1, tzinfo=timezone.utc)
self.check.created = datetime(2020, 1, 10, 0, 0, 0, tzinfo=timezone.utc)
self.check.save()
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "Feb. 2020")
self.assertContains(r, "Jan. 2020")
self.assertContains(r, "Dec. 2019")
# The summary for Dec. 2019 should be "–"
self.assertContains(r, "<td>–</td>", html=True)
|
skyfallen/Kaggle-Diabetic-Retinopathy-Detection
|
refs/heads/master
|
Code/traincaffe/kappa.py
|
1
|
import sys
import numpy as np
def main(args):
# Read options
unweighted = squared = linear = verbose = csv = False
if "-u" in args or "--unweighted" in args:
unweighted = True
elif "-s" in args or "--squared" in args:
squared = True
else:
linear = True
if "-v" in args or "--verbose" in args:
verbose = True
if "-c" in args or "--csv" in args:
csv = True
# Read ratings. Last argument is the filename
if csv:
ratings = np.genfromtxt(args[-1], delimiter=",")
else:
ratings = np.genfromtxt(args[-1])
categories = int(np.amax(ratings)) + 1
subjects = ratings.size / 2
# Build weight matrix
weighted = np.empty((categories, categories))
for i in range(categories):
for j in range(categories):
if unweighted:
weighted[i, j] = (i != j)
elif squared:
weighted[i, j] = abs(i - j) ** 2
else: #linear
weighted[i, j] = abs(i - j)
# Build observed matrix
observed = np.zeros((categories, categories))
distributions = np.zeros((categories, 2))
for k in range(subjects):
observed[ratings[k, 0], ratings[k, 1]] += 1
distributions[ratings[k, 0], 0] += 1
distributions[ratings[k, 1], 1] += 1
# Normalize observed and distribution arrays
observed = observed / subjects
distributions = distributions / subjects
# Build expected array
expected = np.empty((categories, categories))
for i in range(categories):
for j in range(categories):
expected[i, j] = distributions[i, 0] * distributions[j, 1]
# Calculate kappa
kappa = 1.0 - (sum(sum(weighted * observed)) / sum(sum(weighted * expected)))
if verbose:
print "Kappa",
if unweighted:
print "(unweighted):",
elif squared:
print "(squared):",
else:
print "(equal weights):",
print kappa
print "Categories: " + str(categories)
print "Subjects: " + str(subjects)
else:
print kappa
def usage():
print "usage: python kappa.py {[-l]|-u|-s} [-v] [-c] FILENAME"
print
print "Calculates Weighted Kappa and Cohen's Kappa for interrater agreement"
print "(two raters, any number of ordinal categories)"
print "See http://en.wikipedia.org/wiki/Cohen's_kappa for more information"
print
print "FILENAME must be a text file with a pair of integers in each line."
print "The values in each pair correspond to the rating that each of the"
print "two reviewers gave to a particular subject."
print "The pairs must be whitespaced-separated (or comma-separated, with the -c flag)."
print
print "Options:"
print "--------"
print
print "-l --linear: Linear weights for disagreements (default)"
print "-u --unweighted: Cohen's Kappa (unweighted agreement/disagreement)"
print "-s --squared: Squared weights for disagreements"
print "-v --verbose: Includes number of categories and subjects in the output"
print "-c --csv: For text files with comma-separated values"
if __name__ == "__main__":
if len(sys.argv) == 1 or "--help" in sys.argv:
usage()
else:
main(sys.argv[1:])
|
howcouldyouforgetthisusername/aries-analyzer
|
refs/heads/master
|
build/main/setuptools-27.2.0-py2.7.egg/setuptools/archive_util.py
|
36
|
"""Utilities for extracting common archive formats"""
import zipfile
import tarfile
import os
import shutil
import posixpath
import contextlib
from distutils.errors import DistutilsError
from pkg_resources import ensure_directory, ContextualZipFile
__all__ = [
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
]
class UnrecognizedFormat(DistutilsError):
"""Couldn't recognize the archive type"""
def default_filter(src, dst):
"""The default progress/filter callback; returns True for all files"""
return dst
def unpack_archive(filename, extract_dir, progress_filter=default_filter,
drivers=None):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
`progress_filter` is a function taking two arguments: a source path
internal to the archive ('/'-separated), and a filesystem path where it
will be extracted. The callback must return the desired extract path
(which may be the same as the one passed in), or else ``None`` to skip
that file or directory. The callback can thus be used to report on the
progress of the extraction, as well as to filter the items extracted or
alter their extraction paths.
`drivers`, if supplied, must be a non-empty sequence of functions with the
same signature as this function (minus the `drivers` argument), that raise
``UnrecognizedFormat`` if they do not support extracting the designated
archive type. The `drivers` are tried in sequence until one is found that
does not raise an error, or until all are exhausted (in which case
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
drivers, the module's ``extraction_drivers`` constant will be used, which
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
order.
"""
for driver in drivers or extraction_drivers:
try:
driver(filename, extract_dir, progress_filter)
except UnrecognizedFormat:
continue
else:
return
else:
raise UnrecognizedFormat(
"Not a recognized archive type: %s" % filename
)
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
""""Unpack" a directory, using the same interface as for archives
Raises ``UnrecognizedFormat`` if `filename` is not a directory
"""
if not os.path.isdir(filename):
raise UnrecognizedFormat("%s is not a directory" % filename)
paths = {
filename: ('', extract_dir),
}
for base, dirs, files in os.walk(filename):
src, dst = paths[base]
for d in dirs:
paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
for f in files:
target = os.path.join(dst, f)
target = progress_filter(src + f, target)
if not target:
# skip non-files
continue
ensure_directory(target)
f = os.path.join(base, f)
shutil.copyfile(f, target)
shutil.copystat(f, target)
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack zip `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
if not zipfile.is_zipfile(filename):
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
with ContextualZipFile(filename) as z:
for info in z.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name.split('/'):
continue
target = os.path.join(extract_dir, *name.split('/'))
target = progress_filter(name, target)
if not target:
continue
if name.endswith('/'):
# directory
ensure_directory(target)
else:
# file
ensure_directory(target)
data = z.read(info.filename)
with open(target, 'wb') as f:
f.write(data)
unix_attributes = info.external_attr >> 16
if unix_attributes:
os.chmod(target, unix_attributes)
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
of the `progress_filter` argument.
"""
try:
tarobj = tarfile.open(filename)
except tarfile.TarError:
raise UnrecognizedFormat(
"%s is not a compressed or uncompressed tar file" % (filename,)
)
with contextlib.closing(tarobj):
# don't do any chowning!
tarobj.chown = lambda *args: None
for member in tarobj:
name = member.name
# don't extract absolute paths or ones with .. in them
if not name.startswith('/') and '..' not in name.split('/'):
prelim_dst = os.path.join(extract_dir, *name.split('/'))
# resolve any links and to extract the link targets as normal
# files
while member is not None and (member.islnk() or member.issym()):
linkpath = member.linkname
if member.issym():
base = posixpath.dirname(member.name)
linkpath = posixpath.join(base, linkpath)
linkpath = posixpath.normpath(linkpath)
member = tarobj._getmember(linkpath)
if member is not None and (member.isfile() or member.isdir()):
final_dst = progress_filter(name, prelim_dst)
if final_dst:
if final_dst.endswith(os.sep):
final_dst = final_dst[:-1]
try:
# XXX Ugh
tarobj._extract_member(member, final_dst)
except tarfile.ExtractError:
# chown/chmod/mkfifo/mknode/makedev failed
pass
return True
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
|
Beauhurst/django
|
refs/heads/master
|
tests/template_tests/templatetags/custom.py
|
36
|
import operator
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.html import escape, format_html
register = template.Library()
@register.filter
@stringfilter
def trim(value, num):
return value[:num]
@register.filter
def noop(value, param=None):
"""A noop filter that always return its first argument and does nothing with
its second (optional) one.
Useful for testing out whitespace in filter arguments (see #19882)."""
return value
@register.simple_tag(takes_context=True)
def context_stack_length(context):
return len(context.dicts)
@register.simple_tag
def no_params():
"""Expected no_params __doc__"""
return "no_params - Expected result"
no_params.anything = "Expected no_params __dict__"
@register.simple_tag
def one_param(arg):
"""Expected one_param __doc__"""
return "one_param - Expected result: %s" % arg
one_param.anything = "Expected one_param __dict__"
@register.simple_tag(takes_context=False)
def explicit_no_context(arg):
"""Expected explicit_no_context __doc__"""
return "explicit_no_context - Expected result: %s" % arg
explicit_no_context.anything = "Expected explicit_no_context __dict__"
@register.simple_tag(takes_context=True)
def no_params_with_context(context):
"""Expected no_params_with_context __doc__"""
return "no_params_with_context - Expected result (context value: %s)" % context['value']
no_params_with_context.anything = "Expected no_params_with_context __dict__"
@register.simple_tag(takes_context=True)
def params_and_context(context, arg):
"""Expected params_and_context __doc__"""
return "params_and_context - Expected result (context value: %s): %s" % (context['value'], arg)
params_and_context.anything = "Expected params_and_context __dict__"
@register.simple_tag
def simple_two_params(one, two):
"""Expected simple_two_params __doc__"""
return "simple_two_params - Expected result: %s, %s" % (one, two)
simple_two_params.anything = "Expected simple_two_params __dict__"
@register.simple_tag
def simple_keyword_only_param(*, kwarg):
return "simple_keyword_only_param - Expected result: %s" % kwarg
@register.simple_tag
def simple_keyword_only_default(*, kwarg=42):
return "simple_keyword_only_default - Expected result: %s" % kwarg
@register.simple_tag
def simple_one_default(one, two='hi'):
"""Expected simple_one_default __doc__"""
return "simple_one_default - Expected result: %s, %s" % (one, two)
simple_one_default.anything = "Expected simple_one_default __dict__"
@register.simple_tag
def simple_unlimited_args(one, two='hi', *args):
"""Expected simple_unlimited_args __doc__"""
return "simple_unlimited_args - Expected result: %s" % (
', '.join(str(arg) for arg in [one, two] + list(args))
)
simple_unlimited_args.anything = "Expected simple_unlimited_args __dict__"
@register.simple_tag
def simple_only_unlimited_args(*args):
"""Expected simple_only_unlimited_args __doc__"""
return "simple_only_unlimited_args - Expected result: %s" % ', '.join(str(arg) for arg in args)
simple_only_unlimited_args.anything = "Expected simple_only_unlimited_args __dict__"
@register.simple_tag
def simple_unlimited_args_kwargs(one, two='hi', *args, **kwargs):
"""Expected simple_unlimited_args_kwargs __doc__"""
# Sort the dictionary by key to guarantee the order for testing.
sorted_kwarg = sorted(kwargs.items(), key=operator.itemgetter(0))
return "simple_unlimited_args_kwargs - Expected result: %s / %s" % (
', '.join(str(arg) for arg in [one, two] + list(args)),
', '.join('%s=%s' % (k, v) for (k, v) in sorted_kwarg)
)
simple_unlimited_args_kwargs.anything = "Expected simple_unlimited_args_kwargs __dict__"
@register.simple_tag(takes_context=True)
def simple_tag_without_context_parameter(arg):
"""Expected simple_tag_without_context_parameter __doc__"""
return "Expected result"
simple_tag_without_context_parameter.anything = "Expected simple_tag_without_context_parameter __dict__"
@register.simple_tag(takes_context=True)
def escape_naive(context):
"""A tag that doesn't even think about escaping issues"""
return "Hello {0}!".format(context['name'])
@register.simple_tag(takes_context=True)
def escape_explicit(context):
"""A tag that uses escape explicitly"""
return escape("Hello {0}!".format(context['name']))
@register.simple_tag(takes_context=True)
def escape_format_html(context):
"""A tag that uses format_html"""
return format_html("Hello {0}!", context['name'])
@register.simple_tag(takes_context=True)
def current_app(context):
return "%s" % context.current_app
@register.simple_tag(takes_context=True)
def use_l10n(context):
return "%s" % context.use_l10n
@register.simple_tag(name='minustwo')
def minustwo_overridden_name(value):
return value - 2
register.simple_tag(lambda x: x - 1, name='minusone')
@register.tag('counter')
def counter(parser, token):
return CounterNode()
class CounterNode(template.Node):
def __init__(self):
self.count = 0
def render(self, context):
count = self.count
self.count = count + 1
return count
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.